From 56b0d91f4dc7416538ac4c1af04c52ac5bf5929b Mon Sep 17 00:00:00 2001 From: Philipp Gesang Date: Mon, 8 Dec 2014 20:29:21 +0100 Subject: [*] reorganize src/ subtree This moves the fontloader files into a separate subdirectory to simplify maintainance. --- Makefile | 2 + src/fontloader/luaotfload-basics-gen.lua | 368 + src/fontloader/luaotfload-basics-nod.lua | 178 + src/fontloader/luaotfload-fontloader.lua | 14628 +++++++++++++++++++++++++++++ src/fontloader/luaotfload-fonts-cbk.lua | 68 + src/fontloader/luaotfload-fonts-def.lua | 97 + src/fontloader/luaotfload-fonts-enc.lua | 28 + src/fontloader/luaotfload-fonts-ext.lua | 272 + src/fontloader/luaotfload-fonts-inj.lua | 526 ++ src/fontloader/luaotfload-fonts-lua.lua | 33 + src/fontloader/luaotfload-fonts-otn.lua | 2848 ++++++ src/fontloader/luaotfload-fonts-tfm.lua | 38 + src/luaotfload-basics-gen.lua | 368 - src/luaotfload-basics-nod.lua | 178 - src/luaotfload-fontloader.lua | 14628 ----------------------------- src/luaotfload-fonts-cbk.lua | 68 - src/luaotfload-fonts-def.lua | 97 - src/luaotfload-fonts-enc.lua | 28 - src/luaotfload-fonts-ext.lua | 272 - src/luaotfload-fonts-inj.lua | 526 -- src/luaotfload-fonts-lua.lua | 33 - src/luaotfload-fonts-otn.lua | 2848 ------ src/luaotfload-fonts-tfm.lua | 38 - src/luaotfload-main.lua | 110 +- 24 files changed, 19143 insertions(+), 19137 deletions(-) create mode 100644 src/fontloader/luaotfload-basics-gen.lua create mode 100644 src/fontloader/luaotfload-basics-nod.lua create mode 100644 src/fontloader/luaotfload-fontloader.lua create mode 100644 src/fontloader/luaotfload-fonts-cbk.lua create mode 100644 src/fontloader/luaotfload-fonts-def.lua create mode 100644 src/fontloader/luaotfload-fonts-enc.lua create mode 100644 src/fontloader/luaotfload-fonts-ext.lua create mode 100644 src/fontloader/luaotfload-fonts-inj.lua create mode 100644 src/fontloader/luaotfload-fonts-lua.lua create mode 100644 src/fontloader/luaotfload-fonts-otn.lua create mode 100644 src/fontloader/luaotfload-fonts-tfm.lua delete mode 100644 src/luaotfload-basics-gen.lua delete mode 100644 src/luaotfload-basics-nod.lua delete mode 100644 src/luaotfload-fontloader.lua delete mode 100644 src/luaotfload-fonts-cbk.lua delete mode 100644 src/luaotfload-fonts-def.lua delete mode 100644 src/luaotfload-fonts-enc.lua delete mode 100644 src/luaotfload-fonts-ext.lua delete mode 100644 src/luaotfload-fonts-inj.lua delete mode 100644 src/luaotfload-fonts-lua.lua delete mode 100644 src/luaotfload-fonts-otn.lua delete mode 100644 src/luaotfload-fonts-tfm.lua diff --git a/Makefile b/Makefile index 4124a1a..60dec60 100644 --- a/Makefile +++ b/Makefile @@ -5,10 +5,12 @@ NAME = luaotfload DOCSRCDIR = ./doc SCRIPTSRCDIR = ./scripts SRCSRCDIR = ./src +FONTLOADERDIR = $(SRCSRCDIR)/fontloader BUILDDIR = ./build MISCDIR = ./misc SRC = $(wildcard $(SRCSRCDIR)/luaotfload-*.lua) +SRC += $(wildcard $(FONTLOADERDIR)/*.lua) SRC += $(SRCSRCDIR)/luaotfload.sty SRC += $(MISCDIR)/luaotfload-blacklist.cnf diff --git a/src/fontloader/luaotfload-basics-gen.lua b/src/fontloader/luaotfload-basics-gen.lua new file mode 100644 index 0000000..c19a49a --- /dev/null +++ b/src/fontloader/luaotfload-basics-gen.lua @@ -0,0 +1,368 @@ +if not modules then modules = { } end modules ['luat-basics-gen'] = { + version = 1.100, + comment = "companion to luatex-*.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end + +local dummyfunction = function() +end + +local dummyreporter = function(c) + return function(...) + (texio.reporter or texio.write_nl)(c .. " : " .. string.formatters(...)) + end +end + +statistics = { + register = dummyfunction, + starttiming = dummyfunction, + stoptiming = dummyfunction, + elapsedtime = nil, +} + +directives = { + register = dummyfunction, + enable = dummyfunction, + disable = dummyfunction, +} + +trackers = { + register = dummyfunction, + enable = dummyfunction, + disable = dummyfunction, +} + +experiments = { + register = dummyfunction, + enable = dummyfunction, + disable = dummyfunction, +} + +storage = { -- probably no longer needed + register = dummyfunction, + shared = { }, +} + +logs = { + new = dummyreporter, + reporter = dummyreporter, + messenger = dummyreporter, + report = dummyfunction, +} + +callbacks = { + register = function(n,f) return callback.register(n,f) end, + +} + +utilities = { + storage = { + allocate = function(t) return t or { } end, + mark = function(t) return t or { } end, + }, +} + +characters = characters or { + data = { } +} + +-- we need to cheat a bit here + +texconfig.kpse_init = true + +resolvers = resolvers or { } -- no fancy file helpers used + +local remapper = { + otf = "opentype fonts", + ttf = "truetype fonts", + ttc = "truetype fonts", + dfont = "truetype fonts", -- "truetype dictionary", + cid = "cid maps", + cidmap = "cid maps", + fea = "font feature files", + pfa = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this! + pfb = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this! + afm = "afm", +} + +function resolvers.findfile(name,fileformat) + name = string.gsub(name,"\\","/") + if not fileformat or fileformat == "" then + fileformat = file.suffix(name) + if fileformat == "" then + fileformat = "tex" + end + end + fileformat = string.lower(fileformat) + fileformat = remapper[fileformat] or fileformat + local found = kpse.find_file(name,fileformat) + if not found or found == "" then + found = kpse.find_file(name,"other text files") + end + return found +end + +-- function resolvers.findbinfile(name,fileformat) +-- if not fileformat or fileformat == "" then +-- fileformat = file.suffix(name) +-- end +-- return resolvers.findfile(name,(fileformat and remapper[fileformat]) or fileformat) +-- end + +resolvers.findbinfile = resolvers.findfile + +function resolvers.loadbinfile(filename,filetype) + local data = io.loaddata(filename) + return true, data, #data +end + +function resolvers.resolve(s) + return s +end + +function resolvers.unresolve(s) + return s +end + +-- Caches ... I will make a real stupid version some day when I'm in the +-- mood. After all, the generic code does not need the more advanced +-- ConTeXt features. Cached data is not shared between ConTeXt and other +-- usage as I don't want any dependency at all. Also, ConTeXt might have +-- different needs and tricks added. + +--~ containers.usecache = true + +caches = { } + +local writable = nil +local readables = { } +local usingjit = jit + +if not caches.namespace or caches.namespace == "" or caches.namespace == "context" then + caches.namespace = 'generic' +end + +do + + -- standard context tree setup + + local cachepaths = kpse.expand_var('$TEXMFCACHE') or "" + + -- quite like tex live or so (the weird $TEXMFCACHE test seems to be needed on miktex) + + if cachepaths == "" or cachepaths == "$TEXMFCACHE" then + cachepaths = kpse.expand_var('$TEXMFVAR') or "" + end + + -- this also happened to be used (the weird $TEXMFVAR test seems to be needed on miktex) + + if cachepaths == "" or cachepaths == "$TEXMFVAR" then + cachepaths = kpse.expand_var('$VARTEXMF') or "" + end + + -- and this is a last resort (hm, we could use TEMP or TEMPDIR) + + if cachepaths == "" then + local fallbacks = { "TMPDIR", "TEMPDIR", "TMP", "TEMP", "HOME", "HOMEPATH" } + for i=1,#fallbacks do + cachepaths = os.getenv(fallbacks[i]) or "" + if cachepath ~= "" and lfs.isdir(cachepath) then + break + end + end + end + + if cachepaths == "" then + cachepaths = "." + end + + cachepaths = string.split(cachepaths,os.type == "windows" and ";" or ":") + + for i=1,#cachepaths do + local cachepath = cachepaths[i] + if not lfs.isdir(cachepath) then + lfs.mkdirs(cachepath) -- needed for texlive and latex + if lfs.isdir(cachepath) then + texio.write(string.format("(created cache path: %s)",cachepath)) + end + end + if file.is_writable(cachepath) then + writable = file.join(cachepath,"luatex-cache") + lfs.mkdir(writable) + writable = file.join(writable,caches.namespace) + lfs.mkdir(writable) + break + end + end + + for i=1,#cachepaths do + if file.is_readable(cachepaths[i]) then + readables[#readables+1] = file.join(cachepaths[i],"luatex-cache",caches.namespace) + end + end + + if not writable then + texio.write_nl("quiting: fix your writable cache path") + os.exit() + elseif #readables == 0 then + texio.write_nl("quiting: fix your readable cache path") + os.exit() + elseif #readables == 1 and readables[1] == writable then + texio.write(string.format("(using cache: %s)",writable)) + else + texio.write(string.format("(using write cache: %s)",writable)) + texio.write(string.format("(using read cache: %s)",table.concat(readables, " "))) + end + +end + +function caches.getwritablepath(category,subcategory) + local path = file.join(writable,category) + lfs.mkdir(path) + path = file.join(path,subcategory) + lfs.mkdir(path) + return path +end + +function caches.getreadablepaths(category,subcategory) + local t = { } + for i=1,#readables do + t[i] = file.join(readables[i],category,subcategory) + end + return t +end + +local function makefullname(path,name) + if path and path ~= "" then + return file.addsuffix(file.join(path,name),"lua"), file.addsuffix(file.join(path,name),usingjit and "lub" or "luc") + end +end + +function caches.is_writable(path,name) + local fullname = makefullname(path,name) + return fullname and file.is_writable(fullname) +end + +function caches.loaddata(paths,name) + for i=1,#paths do + local data = false + local luaname, lucname = makefullname(paths[i],name) + if lucname and not lfs.isfile(lucname) and type(caches.compile) == "function" then + -- in case we used luatex and luajittex mixed ... lub or luc file + texio.write(string.format("(compiling luc: %s)",lucname)) + data = loadfile(luaname) + if data then + data = data() + end + if data then + caches.compile(data,luaname,lucname) + return data + end + end + if lucname and lfs.isfile(lucname) then -- maybe also check for size + texio.write(string.format("(load luc: %s)",lucname)) + data = loadfile(lucname) + if data then + data = data() + end + if data then + return data + else + texio.write(string.format("(loading failed: %s)",lucname)) + end + end + if luaname and lfs.isfile(luaname) then + texio.write(string.format("(load lua: %s)",luaname)) + data = loadfile(luaname) + if data then + data = data() + end + if data then + return data + end + end + end +end + +function caches.savedata(path,name,data) + local luaname, lucname = makefullname(path,name) + if luaname then + texio.write(string.format("(save: %s)",luaname)) + table.tofile(luaname,data,true) + if lucname and type(caches.compile) == "function" then + os.remove(lucname) -- better be safe + texio.write(string.format("(save: %s)",lucname)) + caches.compile(data,luaname,lucname) + end + end +end + +-- According to KH os.execute is not permitted in plain/latex so there is +-- no reason to use the normal context way. So the method here is slightly +-- different from the one we have in context. We also use different suffixes +-- as we don't want any clashes (sharing cache files is not that handy as +-- context moves on faster.) +-- +-- Beware: serialization might fail on large files (so maybe we should pcall +-- this) in which case one should limit the method to luac and enable support +-- for execution. + +-- function caches.compile(data,luaname,lucname) +-- local d = io.loaddata(luaname) +-- if not d or d == "" then +-- d = table.serialize(data,true) -- slow +-- end +-- if d and d ~= "" then +-- local f = io.open(lucname,'w') +-- if f then +-- local s = loadstring(d) +-- if s then +-- f:write(string.dump(s,true)) +-- end +-- f:close() +-- end +-- end +-- end + +function caches.compile(data,luaname,lucname) + local d = io.loaddata(luaname) + if not d or d == "" then + d = table.serialize(data,true) -- slow + end + if d and d ~= "" then + local f = io.open(lucname,'wb') + if f then + local s = loadstring(d) + if s then + f:write(string.dump(s,true)) + end + f:close() + end + end +end + +-- + +function table.setmetatableindex(t,f) + setmetatable(t,{ __index = f }) +end + +-- helper for plain: + +arguments = { } + +if arg then + for i=1,#arg do + local k, v = string.match(arg[i],"^%-%-([^=]+)=?(.-)$") + if k and v then + arguments[k] = v + end + end +end diff --git a/src/fontloader/luaotfload-basics-nod.lua b/src/fontloader/luaotfload-basics-nod.lua new file mode 100644 index 0000000..373dab5 --- /dev/null +++ b/src/fontloader/luaotfload-basics-nod.lua @@ -0,0 +1,178 @@ +if not modules then modules = { } end modules ['luatex-fonts-nod'] = { + version = 1.001, + comment = "companion to luatex-fonts.lua", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end + +-- Don't depend on code here as it is only needed to complement the +-- font handler code. + +-- Attributes: + +if tex.attribute[0] ~= 0 then + + texio.write_nl("log","!") + texio.write_nl("log","! Attribute 0 is reserved for ConTeXt's font feature management and has to be") + texio.write_nl("log","! set to zero. Also, some attributes in the range 1-255 are used for special") + texio.write_nl("log","! purposes so setting them at the TeX end might break the font handler.") + texio.write_nl("log","!") + + tex.attribute[0] = 0 -- else no features + +end + +attributes = attributes or { } +attributes.unsetvalue = -0x7FFFFFFF + +local numbers, last = { }, 127 + +attributes.private = attributes.private or function(name) + local number = numbers[name] + if not number then + if last < 255 then + last = last + 1 + end + number = last + numbers[name] = number + end + return number +end + +-- Nodes: + +nodes = { } +nodes.pool = { } +nodes.handlers = { } + +local nodecodes = { } for k,v in next, node.types () do nodecodes[string.gsub(v,"_","")] = k end +local whatcodes = { } for k,v in next, node.whatsits() do whatcodes[string.gsub(v,"_","")] = k end +local glyphcodes = { [0] = "character", "glyph", "ligature", "ghost", "left", "right" } +local disccodes = { [0] = "discretionary", "explicit", "automatic", "regular", "first", "second" } + +nodes.nodecodes = nodecodes +nodes.whatcodes = whatcodes +nodes.whatsitcodes = whatcodes +nodes.glyphcodes = glyphcodes +nodes.disccodes = disccodes + +local free_node = node.free +local remove_node = node.remove +local new_node = node.new +local traverse_id = node.traverse_id + +nodes.handlers.protectglyphs = node.protect_glyphs +nodes.handlers.unprotectglyphs = node.unprotect_glyphs + +local math_code = nodecodes.math +local end_of_math = node.end_of_math + +function node.end_of_math(n) + if n.id == math_code and n.subtype == 1 then + return n + else + return end_of_math(n) + end +end + +function nodes.remove(head, current, free_too) + local t = current + head, current = remove_node(head,current) + if t then + if free_too then + free_node(t) + t = nil + else + t.next, t.prev = nil, nil + end + end + return head, current, t +end + +function nodes.delete(head,current) + return nodes.remove(head,current,true) +end + +function nodes.pool.kern(k) + local n = new_node("kern",1) + n.kern = k + return n +end + +-- experimental + +local getfield = node.getfield or function(n,tag) return n[tag] end +local setfield = node.setfield or function(n,tag,value) n[tag] = value end + +nodes.getfield = getfield +nodes.setfield = setfield + +nodes.getattr = getfield +nodes.setattr = setfield + +if node.getid then nodes.getid = node.getid else function nodes.getid (n) return getfield(n,"id") end end +if node.getsubtype then nodes.getsubtype = node.getsubtype else function nodes.getsubtype(n) return getfield(n,"subtype") end end +if node.getnext then nodes.getnext = node.getnext else function nodes.getnext (n) return getfield(n,"next") end end +if node.getprev then nodes.getprev = node.getprev else function nodes.getprev (n) return getfield(n,"prev") end end +if node.getchar then nodes.getchar = node.getchar else function nodes.getchar (n) return getfield(n,"char") end end +if node.getfont then nodes.getfont = node.getfont else function nodes.getfont (n) return getfield(n,"font") end end +if node.getlist then nodes.getlist = node.getlist else function nodes.getlist (n) return getfield(n,"list") end end + +function nodes.tonut (n) return n end +function nodes.tonode(n) return n end + +-- being lazy ... just copy a bunch ... not all needed in generic but we assume +-- nodes to be kind of private anyway + +nodes.tostring = node.tostring or tostring +nodes.copy = node.copy +nodes.copy_list = node.copy_list +nodes.delete = node.delete +nodes.dimensions = node.dimensions +nodes.end_of_math = node.end_of_math +nodes.flush_list = node.flush_list +nodes.flush_node = node.flush_node +nodes.free = node.free +nodes.insert_after = node.insert_after +nodes.insert_before = node.insert_before +nodes.hpack = node.hpack +nodes.new = node.new +nodes.tail = node.tail +nodes.traverse = node.traverse +nodes.traverse_id = node.traverse_id +nodes.slide = node.slide +nodes.vpack = node.vpack + +nodes.first_glyph = node.first_glyph +nodes.first_character = node.first_character +nodes.has_glyph = node.has_glyph or node.first_glyph + +nodes.current_attr = node.current_attr +nodes.do_ligature_n = node.do_ligature_n +nodes.has_field = node.has_field +nodes.last_node = node.last_node +nodes.usedlist = node.usedlist +nodes.protrusion_skippable = node.protrusion_skippable +nodes.write = node.write + +nodes.has_attribute = node.has_attribute +nodes.set_attribute = node.set_attribute +nodes.unset_attribute = node.unset_attribute + +nodes.protect_glyphs = node.protect_glyphs +nodes.unprotect_glyphs = node.unprotect_glyphs +nodes.kerning = node.kerning +nodes.ligaturing = node.ligaturing +nodes.mlist_to_hlist = node.mlist_to_hlist + +-- in generic code, at least for some time, we stay nodes, while in context +-- we can go nuts (e.g. experimental); this split permits us us keep code +-- used elsewhere stable but at the same time play around in context + +nodes.nuts = nodes diff --git a/src/fontloader/luaotfload-fontloader.lua b/src/fontloader/luaotfload-fontloader.lua new file mode 100644 index 0000000..e9c6638 --- /dev/null +++ b/src/fontloader/luaotfload-fontloader.lua @@ -0,0 +1,14628 @@ +-- merged file : luatex-fonts-merged.lua +-- parent file : luatex-fonts.lua +-- merge date : 12/06/14 14:20:08 + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['l-lua']={ + version=1.001, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local major,minor=string.match(_VERSION,"^[^%d]+(%d+)%.(%d+).*$") +_MAJORVERSION=tonumber(major) or 5 +_MINORVERSION=tonumber(minor) or 1 +_LUAVERSION=_MAJORVERSION+_MINORVERSION/10 +if not lpeg then + lpeg=require("lpeg") +end +if loadstring then + local loadnormal=load + function load(first,...) + if type(first)=="string" then + return loadstring(first,...) + else + return loadnormal(first,...) + end + end +else + loadstring=load +end +if not ipairs then + local function iterate(a,i) + i=i+1 + local v=a[i] + if v~=nil then + return i,v + end + end + function ipairs(a) + return iterate,a,0 + end +end +if not pairs then + function pairs(t) + return next,t + end +end +if not table.unpack then + table.unpack=_G.unpack +elseif not unpack then + _G.unpack=table.unpack +end +if not package.loaders then + package.loaders=package.searchers +end +local print,select,tostring=print,select,tostring +local inspectors={} +function setinspector(inspector) + inspectors[#inspectors+1]=inspector +end +function inspect(...) + for s=1,select("#",...) do + local value=select(s,...) + local done=false + for i=1,#inspectors do + done=inspectors[i](value) + if done then + break + end + end + if not done then + print(tostring(value)) + end + end +end +local dummy=function() end +function optionalrequire(...) + local ok,result=xpcall(require,dummy,...) + if ok then + return result + end +end +if lua then + lua.mask=load([[τεχ = 1]]) and "utf" or "ascii" +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['l-lpeg']={ + version=1.001, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +lpeg=require("lpeg") +if not lpeg.print then function lpeg.print(...) print(lpeg.pcode(...)) end end +local type,next,tostring=type,next,tostring +local byte,char,gmatch,format=string.byte,string.char,string.gmatch,string.format +local floor=math.floor +local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt +local lpegtype,lpegmatch,lpegprint=lpeg.type,lpeg.match,lpeg.print +if setinspector then + setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end) +end +lpeg.patterns=lpeg.patterns or {} +local patterns=lpeg.patterns +local anything=P(1) +local endofstring=P(-1) +local alwaysmatched=P(true) +patterns.anything=anything +patterns.endofstring=endofstring +patterns.beginofstring=alwaysmatched +patterns.alwaysmatched=alwaysmatched +local sign=S('+-') +local zero=P('0') +local digit=R('09') +local octdigit=R("07") +local lowercase=R("az") +local uppercase=R("AZ") +local underscore=P("_") +local hexdigit=digit+lowercase+uppercase +local cr,lf,crlf=P("\r"),P("\n"),P("\r\n") +local newline=P("\r")*(P("\n")+P(true))+P("\n") +local escaped=P("\\")*anything +local squote=P("'") +local dquote=P('"') +local space=P(" ") +local period=P(".") +local comma=P(",") +local utfbom_32_be=P('\000\000\254\255') +local utfbom_32_le=P('\255\254\000\000') +local utfbom_16_be=P('\254\255') +local utfbom_16_le=P('\255\254') +local utfbom_8=P('\239\187\191') +local utfbom=utfbom_32_be+utfbom_32_le+utfbom_16_be+utfbom_16_le+utfbom_8 +local utftype=utfbom_32_be*Cc("utf-32-be")+utfbom_32_le*Cc("utf-32-le")+utfbom_16_be*Cc("utf-16-be")+utfbom_16_le*Cc("utf-16-le")+utfbom_8*Cc("utf-8")+alwaysmatched*Cc("utf-8") +local utfstricttype=utfbom_32_be*Cc("utf-32-be")+utfbom_32_le*Cc("utf-32-le")+utfbom_16_be*Cc("utf-16-be")+utfbom_16_le*Cc("utf-16-le")+utfbom_8*Cc("utf-8") +local utfoffset=utfbom_32_be*Cc(4)+utfbom_32_le*Cc(4)+utfbom_16_be*Cc(2)+utfbom_16_le*Cc(2)+utfbom_8*Cc(3)+Cc(0) +local utf8next=R("\128\191") +patterns.utfbom_32_be=utfbom_32_be +patterns.utfbom_32_le=utfbom_32_le +patterns.utfbom_16_be=utfbom_16_be +patterns.utfbom_16_le=utfbom_16_le +patterns.utfbom_8=utfbom_8 +patterns.utf_16_be_nl=P("\000\r\000\n")+P("\000\r")+P("\000\n") +patterns.utf_16_le_nl=P("\r\000\n\000")+P("\r\000")+P("\n\000") +patterns.utf_32_be_nl=P("\000\000\000\r\000\000\000\n")+P("\000\000\000\r")+P("\000\000\000\n") +patterns.utf_32_le_nl=P("\r\000\000\000\n\000\000\000")+P("\r\000\000\000")+P("\n\000\000\000") +patterns.utf8one=R("\000\127") +patterns.utf8two=R("\194\223")*utf8next +patterns.utf8three=R("\224\239")*utf8next*utf8next +patterns.utf8four=R("\240\244")*utf8next*utf8next*utf8next +patterns.utfbom=utfbom +patterns.utftype=utftype +patterns.utfstricttype=utfstricttype +patterns.utfoffset=utfoffset +local utf8char=patterns.utf8one+patterns.utf8two+patterns.utf8three+patterns.utf8four +local validutf8char=utf8char^0*endofstring*Cc(true)+Cc(false) +local utf8character=P(1)*R("\128\191")^0 +patterns.utf8=utf8char +patterns.utf8char=utf8char +patterns.utf8character=utf8character +patterns.validutf8=validutf8char +patterns.validutf8char=validutf8char +local eol=S("\n\r") +local spacer=S(" \t\f\v") +local whitespace=eol+spacer +local nonspacer=1-spacer +local nonwhitespace=1-whitespace +patterns.eol=eol +patterns.spacer=spacer +patterns.whitespace=whitespace +patterns.nonspacer=nonspacer +patterns.nonwhitespace=nonwhitespace +local stripper=spacer^0*C((spacer^0*nonspacer^1)^0) +local fullstripper=whitespace^0*C((whitespace^0*nonwhitespace^1)^0) +local collapser=Cs(spacer^0/""*nonspacer^0*((spacer^0/" "*nonspacer^1)^0)) +local b_collapser=Cs(whitespace^0/""*(nonwhitespace^1+whitespace^1/" ")^0) +local e_collapser=Cs((whitespace^1*P(-1)/""+nonwhitespace^1+whitespace^1/" ")^0) +local m_collapser=Cs((nonwhitespace^1+whitespace^1/" ")^0) +local b_stripper=Cs(spacer^0/""*(nonspacer^1+spacer^1/" ")^0) +local e_stripper=Cs((spacer^1*P(-1)/""+nonspacer^1+spacer^1/" ")^0) +local m_stripper=Cs((nonspacer^1+spacer^1/" ")^0) +patterns.stripper=stripper +patterns.fullstripper=fullstripper +patterns.collapser=collapser +patterns.b_collapser=b_collapser +patterns.m_collapser=m_collapser +patterns.e_collapser=e_collapser +patterns.b_stripper=b_stripper +patterns.m_stripper=m_stripper +patterns.e_stripper=e_stripper +patterns.lowercase=lowercase +patterns.uppercase=uppercase +patterns.letter=patterns.lowercase+patterns.uppercase +patterns.space=space +patterns.tab=P("\t") +patterns.spaceortab=patterns.space+patterns.tab +patterns.newline=newline +patterns.emptyline=newline^1 +patterns.equal=P("=") +patterns.comma=comma +patterns.commaspacer=comma*spacer^0 +patterns.period=period +patterns.colon=P(":") +patterns.semicolon=P(";") +patterns.underscore=underscore +patterns.escaped=escaped +patterns.squote=squote +patterns.dquote=dquote +patterns.nosquote=(escaped+(1-squote))^0 +patterns.nodquote=(escaped+(1-dquote))^0 +patterns.unsingle=(squote/"")*patterns.nosquote*(squote/"") +patterns.undouble=(dquote/"")*patterns.nodquote*(dquote/"") +patterns.unquoted=patterns.undouble+patterns.unsingle +patterns.unspacer=((patterns.spacer^1)/"")^0 +patterns.singlequoted=squote*patterns.nosquote*squote +patterns.doublequoted=dquote*patterns.nodquote*dquote +patterns.quoted=patterns.doublequoted+patterns.singlequoted +patterns.digit=digit +patterns.octdigit=octdigit +patterns.hexdigit=hexdigit +patterns.sign=sign +patterns.cardinal=digit^1 +patterns.integer=sign^-1*digit^1 +patterns.unsigned=digit^0*period*digit^1 +patterns.float=sign^-1*patterns.unsigned +patterns.cunsigned=digit^0*comma*digit^1 +patterns.cpunsigned=digit^0*(period+comma)*digit^1 +patterns.cfloat=sign^-1*patterns.cunsigned +patterns.cpfloat=sign^-1*patterns.cpunsigned +patterns.number=patterns.float+patterns.integer +patterns.cnumber=patterns.cfloat+patterns.integer +patterns.cpnumber=patterns.cpfloat+patterns.integer +patterns.oct=zero*octdigit^1 +patterns.octal=patterns.oct +patterns.HEX=zero*P("X")*(digit+uppercase)^1 +patterns.hex=zero*P("x")*(digit+lowercase)^1 +patterns.hexadecimal=zero*S("xX")*hexdigit^1 +patterns.hexafloat=sign^-1*zero*S("xX")*(hexdigit^0*period*hexdigit^1+hexdigit^1*period*hexdigit^0+hexdigit^1)*(S("pP")*sign^-1*hexdigit^1)^-1 +patterns.decafloat=sign^-1*(digit^0*period*digit^1+digit^1*period*digit^0+digit^1)*S("eE")*sign^-1*digit^1 +patterns.propername=(uppercase+lowercase+underscore)*(uppercase+lowercase+underscore+digit)^0*endofstring +patterns.somecontent=(anything-newline-space)^1 +patterns.beginline=#(1-newline) +patterns.longtostring=Cs(whitespace^0/""*((patterns.quoted+nonwhitespace^1+whitespace^1/""*(P(-1)+Cc(" ")))^0)) +local function anywhere(pattern) + return P { P(pattern)+1*V(1) } +end +lpeg.anywhere=anywhere +function lpeg.instringchecker(p) + p=anywhere(p) + return function(str) + return lpegmatch(p,str) and true or false + end +end +function lpeg.splitter(pattern,action) + return (((1-P(pattern))^1)/action+1)^0 +end +function lpeg.tsplitter(pattern,action) + return Ct((((1-P(pattern))^1)/action+1)^0) +end +local splitters_s,splitters_m,splitters_t={},{},{} +local function splitat(separator,single) + local splitter=(single and splitters_s[separator]) or splitters_m[separator] + if not splitter then + separator=P(separator) + local other=C((1-separator)^0) + if single then + local any=anything + splitter=other*(separator*C(any^0)+"") + splitters_s[separator]=splitter + else + splitter=other*(separator*other)^0 + splitters_m[separator]=splitter + end + end + return splitter +end +local function tsplitat(separator) + local splitter=splitters_t[separator] + if not splitter then + splitter=Ct(splitat(separator)) + splitters_t[separator]=splitter + end + return splitter +end +lpeg.splitat=splitat +lpeg.tsplitat=tsplitat +function string.splitup(str,separator) + if not separator then + separator="," + end + return lpegmatch(splitters_m[separator] or splitat(separator),str) +end +local cache={} +function lpeg.split(separator,str) + local c=cache[separator] + if not c then + c=tsplitat(separator) + cache[separator]=c + end + return lpegmatch(c,str) +end +function string.split(str,separator) + if separator then + local c=cache[separator] + if not c then + c=tsplitat(separator) + cache[separator]=c + end + return lpegmatch(c,str) + else + return { str } + end +end +local spacing=patterns.spacer^0*newline +local empty=spacing*Cc("") +local nonempty=Cs((1-spacing)^1)*spacing^-1 +local content=(empty+nonempty)^1 +patterns.textline=content +local linesplitter=tsplitat(newline) +patterns.linesplitter=linesplitter +function string.splitlines(str) + return lpegmatch(linesplitter,str) +end +local cache={} +function lpeg.checkedsplit(separator,str) + local c=cache[separator] + if not c then + separator=P(separator) + local other=C((1-separator)^1) + c=Ct(separator^0*other*(separator^1*other)^0) + cache[separator]=c + end + return lpegmatch(c,str) +end +function string.checkedsplit(str,separator) + local c=cache[separator] + if not c then + separator=P(separator) + local other=C((1-separator)^1) + c=Ct(separator^0*other*(separator^1*other)^0) + cache[separator]=c + end + return lpegmatch(c,str) +end +local function f2(s) local c1,c2=byte(s,1,2) return c1*64+c2-12416 end +local function f3(s) local c1,c2,c3=byte(s,1,3) return (c1*64+c2)*64+c3-925824 end +local function f4(s) local c1,c2,c3,c4=byte(s,1,4) return ((c1*64+c2)*64+c3)*64+c4-63447168 end +local utf8byte=patterns.utf8one/byte+patterns.utf8two/f2+patterns.utf8three/f3+patterns.utf8four/f4 +patterns.utf8byte=utf8byte +local cache={} +function lpeg.stripper(str) + if type(str)=="string" then + local s=cache[str] + if not s then + s=Cs(((S(str)^1)/""+1)^0) + cache[str]=s + end + return s + else + return Cs(((str^1)/""+1)^0) + end +end +local cache={} +function lpeg.keeper(str) + if type(str)=="string" then + local s=cache[str] + if not s then + s=Cs((((1-S(str))^1)/""+1)^0) + cache[str]=s + end + return s + else + return Cs((((1-str)^1)/""+1)^0) + end +end +function lpeg.frontstripper(str) + return (P(str)+P(true))*Cs(anything^0) +end +function lpeg.endstripper(str) + return Cs((1-P(str)*endofstring)^0) +end +function lpeg.replacer(one,two,makefunction,isutf) + local pattern + local u=isutf and utf8char or 1 + if type(one)=="table" then + local no=#one + local p=P(false) + if no==0 then + for k,v in next,one do + p=p+P(k)/v + end + pattern=Cs((p+u)^0) + elseif no==1 then + local o=one[1] + one,two=P(o[1]),o[2] + pattern=Cs((one/two+u)^0) + else + for i=1,no do + local o=one[i] + p=p+P(o[1])/o[2] + end + pattern=Cs((p+u)^0) + end + else + pattern=Cs((P(one)/(two or "")+u)^0) + end + if makefunction then + return function(str) + return lpegmatch(pattern,str) + end + else + return pattern + end +end +function lpeg.finder(lst,makefunction,isutf) + local pattern + if type(lst)=="table" then + pattern=P(false) + if #lst==0 then + for k,v in next,lst do + pattern=pattern+P(k) + end + else + for i=1,#lst do + pattern=pattern+P(lst[i]) + end + end + else + pattern=P(lst) + end + if isutf then + pattern=((utf8char or 1)-pattern)^0*pattern + else + pattern=(1-pattern)^0*pattern + end + if makefunction then + return function(str) + return lpegmatch(pattern,str) + end + else + return pattern + end +end +local splitters_f,splitters_s={},{} +function lpeg.firstofsplit(separator) + local splitter=splitters_f[separator] + if not splitter then + local pattern=P(separator) + splitter=C((1-pattern)^0) + splitters_f[separator]=splitter + end + return splitter +end +function lpeg.secondofsplit(separator) + local splitter=splitters_s[separator] + if not splitter then + local pattern=P(separator) + splitter=(1-pattern)^0*pattern*C(anything^0) + splitters_s[separator]=splitter + end + return splitter +end +local splitters_s,splitters_p={},{} +function lpeg.beforesuffix(separator) + local splitter=splitters_s[separator] + if not splitter then + local pattern=P(separator) + splitter=C((1-pattern)^0)*pattern*endofstring + splitters_s[separator]=splitter + end + return splitter +end +function lpeg.afterprefix(separator) + local splitter=splitters_p[separator] + if not splitter then + local pattern=P(separator) + splitter=pattern*C(anything^0) + splitters_p[separator]=splitter + end + return splitter +end +function lpeg.balancer(left,right) + left,right=P(left),P(right) + return P { left*((1-left-right)+V(1))^0*right } +end +local nany=utf8char/"" +function lpeg.counter(pattern) + pattern=Cs((P(pattern)/" "+nany)^0) + return function(str) + return #lpegmatch(pattern,str) + end +end +utf=utf or (unicode and unicode.utf8) or {} +local utfcharacters=utf and utf.characters or string.utfcharacters +local utfgmatch=utf and utf.gmatch +local utfchar=utf and utf.char +lpeg.UP=lpeg.P +if utfcharacters then + function lpeg.US(str) + local p=P(false) + for uc in utfcharacters(str) do + p=p+P(uc) + end + return p + end +elseif utfgmatch then + function lpeg.US(str) + local p=P(false) + for uc in utfgmatch(str,".") do + p=p+P(uc) + end + return p + end +else + function lpeg.US(str) + local p=P(false) + local f=function(uc) + p=p+P(uc) + end + lpegmatch((utf8char/f)^0,str) + return p + end +end +local range=utf8byte*utf8byte+Cc(false) +function lpeg.UR(str,more) + local first,last + if type(str)=="number" then + first=str + last=more or first + else + first,last=lpegmatch(range,str) + if not last then + return P(str) + end + end + if first==last then + return P(str) + elseif utfchar and (last-first<8) then + local p=P(false) + for i=first,last do + p=p+P(utfchar(i)) + end + return p + else + local f=function(b) + return b>=first and b<=last + end + return utf8byte/f + end +end +function lpeg.is_lpeg(p) + return p and lpegtype(p)=="pattern" +end +function lpeg.oneof(list,...) + if type(list)~="table" then + list={ list,... } + end + local p=P(list[1]) + for l=2,#list do + p=p+P(list[l]) + end + return p +end +local sort=table.sort +local function copyindexed(old) + local new={} + for i=1,#old do + new[i]=old + end + return new +end +local function sortedkeys(tab) + local keys,s={},0 + for key,_ in next,tab do + s=s+1 + keys[s]=key + end + sort(keys) + return keys +end +function lpeg.append(list,pp,delayed,checked) + local p=pp + if #list>0 then + local keys=copyindexed(list) + sort(keys) + for i=#keys,1,-1 do + local k=keys[i] + if p then + p=P(k)+p + else + p=P(k) + end + end + elseif delayed then + local keys=sortedkeys(list) + if p then + for i=1,#keys,1 do + local k=keys[i] + local v=list[k] + p=P(k)/list+p + end + else + for i=1,#keys do + local k=keys[i] + local v=list[k] + if p then + p=P(k)+p + else + p=P(k) + end + end + if p then + p=p/list + end + end + elseif checked then + local keys=sortedkeys(list) + for i=1,#keys do + local k=keys[i] + local v=list[k] + if p then + if k==v then + p=P(k)+p + else + p=P(k)/v+p + end + else + if k==v then + p=P(k) + else + p=P(k)/v + end + end + end + else + local keys=sortedkeys(list) + for i=1,#keys do + local k=keys[i] + local v=list[k] + if p then + p=P(k)/v+p + else + p=P(k)/v + end + end + end + return p +end +local function make(t,hash) + local p=P(false) + local keys=sortedkeys(t) + for i=1,#keys do + local k=keys[i] + local v=t[k] + local h=hash[v] + if h then + if next(v) then + p=p+P(k)*(make(v,hash)+P(true)) + else + p=p+P(k)*P(true) + end + else + if next(v) then + p=p+P(k)*make(v,hash) + else + p=p+P(k) + end + end + end + return p +end +function lpeg.utfchartabletopattern(list) + local tree={} + local hash={} + local n=#list + if n==0 then + for s in next,list do + local t=tree + for c in gmatch(s,".") do + local tc=t[c] + if not tc then + tc={} + t[c]=tc + end + t=tc + end + hash[t]=s + end + else + for i=1,n do + local t=tree + local s=list[i] + for c in gmatch(s,".") do + local tc=t[c] + if not tc then + tc={} + t[c]=tc + end + t=tc + end + hash[t]=s + end + end + return make(tree,hash) +end +patterns.containseol=lpeg.finder(eol) +local function nextstep(n,step,result) + local m=n%step + local d=floor(n/step) + if d>0 then + local v=V(tostring(step)) + local s=result.start + for i=1,d do + if s then + s=v*s + else + s=v + end + end + result.start=s + end + if step>1 and result.start then + local v=V(tostring(step/2)) + result[tostring(step)]=v*v + end + if step>0 then + return nextstep(m,step/2,result) + else + return result + end +end +function lpeg.times(pattern,n) + return P(nextstep(n,2^16,{ "start",["1"]=pattern })) +end +local trailingzeros=zero^0*-digit +local case_1=period*trailingzeros/"" +local case_2=period*(digit-trailingzeros)^1*(trailingzeros/"") +local number=digit^1*(case_1+case_2) +local stripper=Cs((number+1)^0) +lpeg.patterns.stripzeros=stripper +local byte_to_HEX={} +local byte_to_hex={} +local byte_to_dec={} +local hex_to_byte={} +for i=0,255 do + local H=format("%02X",i) + local h=format("%02x",i) + local d=format("%03i",i) + local c=char(i) + byte_to_HEX[c]=H + byte_to_hex[c]=h + byte_to_dec[c]=d + hex_to_byte[h]=c + hex_to_byte[H]=c +end +local hextobyte=P(2)/hex_to_byte +local bytetoHEX=P(1)/byte_to_HEX +local bytetohex=P(1)/byte_to_hex +local bytetodec=P(1)/byte_to_dec +local hextobytes=Cs(hextobyte^0) +local bytestoHEX=Cs(bytetoHEX^0) +local bytestohex=Cs(bytetohex^0) +local bytestodec=Cs(bytetodec^0) +patterns.hextobyte=hextobyte +patterns.bytetoHEX=bytetoHEX +patterns.bytetohex=bytetohex +patterns.bytetodec=bytetodec +patterns.hextobytes=hextobytes +patterns.bytestoHEX=bytestoHEX +patterns.bytestohex=bytestohex +patterns.bytestodec=bytestodec +function string.toHEX(s) + if not s or s=="" then + return s + else + return lpegmatch(bytestoHEX,s) + end +end +function string.tohex(s) + if not s or s=="" then + return s + else + return lpegmatch(bytestohex,s) + end +end +function string.todec(s) + if not s or s=="" then + return s + else + return lpegmatch(bytestodec,s) + end +end +function string.tobytes(s) + if not s or s=="" then + return s + else + return lpegmatch(hextobytes,s) + end +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['l-functions']={ + version=1.001, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +functions=functions or {} +function functions.dummy() end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['l-string']={ + version=1.001, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local string=string +local sub,gmatch,format,char,byte,rep,lower=string.sub,string.gmatch,string.format,string.char,string.byte,string.rep,string.lower +local lpegmatch,patterns=lpeg.match,lpeg.patterns +local P,S,C,Ct,Cc,Cs=lpeg.P,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cc,lpeg.Cs +local unquoted=patterns.squote*C(patterns.nosquote)*patterns.squote+patterns.dquote*C(patterns.nodquote)*patterns.dquote +function string.unquoted(str) + return lpegmatch(unquoted,str) or str +end +function string.quoted(str) + return format("%q",str) +end +function string.count(str,pattern) + local n=0 + for _ in gmatch(str,pattern) do + n=n+1 + end + return n +end +function string.limit(str,n,sentinel) + if #str>n then + sentinel=sentinel or "..." + return sub(str,1,(n-#sentinel))..sentinel + else + return str + end +end +local stripper=patterns.stripper +local fullstripper=patterns.fullstripper +local collapser=patterns.collapser +local longtostring=patterns.longtostring +function string.strip(str) + return lpegmatch(stripper,str) or "" +end +function string.fullstrip(str) + return lpegmatch(fullstripper,str) or "" +end +function string.collapsespaces(str) + return lpegmatch(collapser,str) or "" +end +function string.longtostring(str) + return lpegmatch(longtostring,str) or "" +end +local pattern=P(" ")^0*P(-1) +function string.is_empty(str) + if str=="" then + return true + else + return lpegmatch(pattern,str) and true or false + end +end +local anything=patterns.anything +local allescapes=Cc("%")*S(".-+%?()[]*") +local someescapes=Cc("%")*S(".-+%()[]") +local matchescapes=Cc(".")*S("*?") +local pattern_a=Cs ((allescapes+anything )^0 ) +local pattern_b=Cs ((someescapes+matchescapes+anything )^0 ) +local pattern_c=Cs (Cc("^")*(someescapes+matchescapes+anything )^0*Cc("$") ) +function string.escapedpattern(str,simple) + return lpegmatch(simple and pattern_b or pattern_a,str) +end +function string.topattern(str,lowercase,strict) + if str=="" or type(str)~="string" then + return ".*" + elseif strict then + str=lpegmatch(pattern_c,str) + else + str=lpegmatch(pattern_b,str) + end + if lowercase then + return lower(str) + else + return str + end +end +function string.valid(str,default) + return (type(str)=="string" and str~="" and str) or default or nil +end +string.itself=function(s) return s end +local pattern=Ct(C(1)^0) +function string.totable(str) + return lpegmatch(pattern,str) +end +local replacer=lpeg.replacer("@","%%") +function string.tformat(fmt,...) + return format(lpegmatch(replacer,fmt),...) +end +string.quote=string.quoted +string.unquote=string.unquoted + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['l-table']={ + version=1.001, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local type,next,tostring,tonumber,ipairs,select=type,next,tostring,tonumber,ipairs,select +local table,string=table,string +local concat,sort,insert,remove=table.concat,table.sort,table.insert,table.remove +local format,lower,dump=string.format,string.lower,string.dump +local getmetatable,setmetatable=getmetatable,setmetatable +local getinfo=debug.getinfo +local lpegmatch,patterns=lpeg.match,lpeg.patterns +local floor=math.floor +local stripper=patterns.stripper +function table.strip(tab) + local lst,l={},0 + for i=1,#tab do + local s=lpegmatch(stripper,tab[i]) or "" + if s=="" then + else + l=l+1 + lst[l]=s + end + end + return lst +end +function table.keys(t) + if t then + local keys,k={},0 + for key,_ in next,t do + k=k+1 + keys[k]=key + end + return keys + else + return {} + end +end +local function compare(a,b) + local ta,tb=type(a),type(b) + if ta==tb then + return a0 then + local n=0 + for _,v in next,t do + n=n+1 + end + if n==#t then + local tt,nt={},0 + for i=1,#t do + local v=t[i] + local tv=type(v) + if tv=="number" then + nt=nt+1 + if hexify then + tt[nt]=format("0x%X",v) + else + tt[nt]=tostring(v) + end + elseif tv=="string" then + nt=nt+1 + tt[nt]=format("%q",v) + elseif tv=="boolean" then + nt=nt+1 + tt[nt]=v and "true" or "false" + else + tt=nil + break + end + end + return tt + end + end + return nil +end +local propername=patterns.propername +local function dummy() end +local function do_serialize(root,name,depth,level,indexed) + if level>0 then + depth=depth.." " + if indexed then + handle(format("%s{",depth)) + else + local tn=type(name) + if tn=="number" then + if hexify then + handle(format("%s[0x%X]={",depth,name)) + else + handle(format("%s[%s]={",depth,name)) + end + elseif tn=="string" then + if noquotes and not reserved[name] and lpegmatch(propername,name) then + handle(format("%s%s={",depth,name)) + else + handle(format("%s[%q]={",depth,name)) + end + elseif tn=="boolean" then + handle(format("%s[%s]={",depth,name and "true" or "false")) + else + handle(format("%s{",depth)) + end + end + end + if root and next(root) then + local first,last=nil,0 + if compact then + last=#root + for k=1,last do + if root[k]==nil then + last=k-1 + break + end + end + if last>0 then + first=1 + end + end + local sk=sortedkeys(root) + for i=1,#sk do + local k=sk[i] + local v=root[k] + local tv,tk=type(v),type(k) + if compact and first and tk=="number" and k>=first and k<=last then + if tv=="number" then + if hexify then + handle(format("%s 0x%X,",depth,v)) + else + handle(format("%s %s,",depth,v)) + end + elseif tv=="string" then + if reduce and tonumber(v) then + handle(format("%s %s,",depth,v)) + else + handle(format("%s %q,",depth,v)) + end + elseif tv=="table" then + if not next(v) then + handle(format("%s {},",depth)) + elseif inline then + local st=simple_table(v) + if st then + handle(format("%s { %s },",depth,concat(st,", "))) + else + do_serialize(v,k,depth,level+1,true) + end + else + do_serialize(v,k,depth,level+1,true) + end + elseif tv=="boolean" then + handle(format("%s %s,",depth,v and "true" or "false")) + elseif tv=="function" then + if functions then + handle(format('%s load(%q),',depth,dump(v))) + else + handle(format('%s "function",',depth)) + end + else + handle(format("%s %q,",depth,tostring(v))) + end + elseif k=="__p__" then + if false then + handle(format("%s __p__=nil,",depth)) + end + elseif tv=="number" then + if tk=="number" then + if hexify then + handle(format("%s [0x%X]=0x%X,",depth,k,v)) + else + handle(format("%s [%s]=%s,",depth,k,v)) + end + elseif tk=="boolean" then + if hexify then + handle(format("%s [%s]=0x%X,",depth,k and "true" or "false",v)) + else + handle(format("%s [%s]=%s,",depth,k and "true" or "false",v)) + end + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + if hexify then + handle(format("%s %s=0x%X,",depth,k,v)) + else + handle(format("%s %s=%s,",depth,k,v)) + end + else + if hexify then + handle(format("%s [%q]=0x%X,",depth,k,v)) + else + handle(format("%s [%q]=%s,",depth,k,v)) + end + end + elseif tv=="string" then + if reduce and tonumber(v) then + if tk=="number" then + if hexify then + handle(format("%s [0x%X]=%s,",depth,k,v)) + else + handle(format("%s [%s]=%s,",depth,k,v)) + end + elseif tk=="boolean" then + handle(format("%s [%s]=%s,",depth,k and "true" or "false",v)) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s=%s,",depth,k,v)) + else + handle(format("%s [%q]=%s,",depth,k,v)) + end + else + if tk=="number" then + if hexify then + handle(format("%s [0x%X]=%q,",depth,k,v)) + else + handle(format("%s [%s]=%q,",depth,k,v)) + end + elseif tk=="boolean" then + handle(format("%s [%s]=%q,",depth,k and "true" or "false",v)) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s=%q,",depth,k,v)) + else + handle(format("%s [%q]=%q,",depth,k,v)) + end + end + elseif tv=="table" then + if not next(v) then + if tk=="number" then + if hexify then + handle(format("%s [0x%X]={},",depth,k)) + else + handle(format("%s [%s]={},",depth,k)) + end + elseif tk=="boolean" then + handle(format("%s [%s]={},",depth,k and "true" or "false")) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s={},",depth,k)) + else + handle(format("%s [%q]={},",depth,k)) + end + elseif inline then + local st=simple_table(v) + if st then + if tk=="number" then + if hexify then + handle(format("%s [0x%X]={ %s },",depth,k,concat(st,", "))) + else + handle(format("%s [%s]={ %s },",depth,k,concat(st,", "))) + end + elseif tk=="boolean" then + handle(format("%s [%s]={ %s },",depth,k and "true" or "false",concat(st,", "))) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s={ %s },",depth,k,concat(st,", "))) + else + handle(format("%s [%q]={ %s },",depth,k,concat(st,", "))) + end + else + do_serialize(v,k,depth,level+1) + end + else + do_serialize(v,k,depth,level+1) + end + elseif tv=="boolean" then + if tk=="number" then + if hexify then + handle(format("%s [0x%X]=%s,",depth,k,v and "true" or "false")) + else + handle(format("%s [%s]=%s,",depth,k,v and "true" or "false")) + end + elseif tk=="boolean" then + handle(format("%s [%s]=%s,",depth,tostring(k),v and "true" or "false")) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s=%s,",depth,k,v and "true" or "false")) + else + handle(format("%s [%q]=%s,",depth,k,v and "true" or "false")) + end + elseif tv=="function" then + if functions then + local f=getinfo(v).what=="C" and dump(dummy) or dump(v) + if tk=="number" then + if hexify then + handle(format("%s [0x%X]=load(%q),",depth,k,f)) + else + handle(format("%s [%s]=load(%q),",depth,k,f)) + end + elseif tk=="boolean" then + handle(format("%s [%s]=load(%q),",depth,k and "true" or "false",f)) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s=load(%q),",depth,k,f)) + else + handle(format("%s [%q]=load(%q),",depth,k,f)) + end + end + else + if tk=="number" then + if hexify then + handle(format("%s [0x%X]=%q,",depth,k,tostring(v))) + else + handle(format("%s [%s]=%q,",depth,k,tostring(v))) + end + elseif tk=="boolean" then + handle(format("%s [%s]=%q,",depth,k and "true" or "false",tostring(v))) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s=%q,",depth,k,tostring(v))) + else + handle(format("%s [%q]=%q,",depth,k,tostring(v))) + end + end + end + end + if level>0 then + handle(format("%s},",depth)) + end +end +local function serialize(_handle,root,name,specification) + local tname=type(name) + if type(specification)=="table" then + noquotes=specification.noquotes + hexify=specification.hexify + handle=_handle or specification.handle or print + reduce=specification.reduce or false + functions=specification.functions + compact=specification.compact + inline=specification.inline and compact + if functions==nil then + functions=true + end + if compact==nil then + compact=true + end + if inline==nil then + inline=compact + end + else + noquotes=false + hexify=false + handle=_handle or print + reduce=false + compact=true + inline=true + functions=true + end + if tname=="string" then + if name=="return" then + handle("return {") + else + handle(name.."={") + end + elseif tname=="number" then + if hexify then + handle(format("[0x%X]={",name)) + else + handle("["..name.."]={") + end + elseif tname=="boolean" then + if name then + handle("return {") + else + handle("{") + end + else + handle("t={") + end + if root then + if getmetatable(root) then + local dummy=root._w_h_a_t_e_v_e_r_ + root._w_h_a_t_e_v_e_r_=nil + end + if next(root) then + do_serialize(root,name,"",0) + end + end + handle("}") +end +function table.serialize(root,name,specification) + local t,n={},0 + local function flush(s) + n=n+1 + t[n]=s + end + serialize(flush,root,name,specification) + return concat(t,"\n") +end +table.tohandle=serialize +local maxtab=2*1024 +function table.tofile(filename,root,name,specification) + local f=io.open(filename,'w') + if f then + if maxtab>1 then + local t,n={},0 + local function flush(s) + n=n+1 + t[n]=s + if n>maxtab then + f:write(concat(t,"\n"),"\n") + t,n={},0 + end + end + serialize(flush,root,name,specification) + f:write(concat(t,"\n"),"\n") + else + local function flush(s) + f:write(s,"\n") + end + serialize(flush,root,name,specification) + end + f:close() + io.flush() + end +end +local function flattened(t,f,depth) + if f==nil then + f={} + depth=0xFFFF + elseif tonumber(f) then + depth=f + f={} + elseif not depth then + depth=0xFFFF + end + for k,v in next,t do + if type(k)~="number" then + if depth>0 and type(v)=="table" then + flattened(v,f,depth-1) + else + f[#f+1]=v + end + end + end + for k=1,#t do + local v=t[k] + if depth>0 and type(v)=="table" then + flattened(v,f,depth-1) + else + f[#f+1]=v + end + end + return f +end +table.flattened=flattened +local function unnest(t,f) + if not f then + f={} + end + for i=1,#t do + local v=t[i] + if type(v)=="table" then + if type(v[1])=="table" then + unnest(v,f) + else + f[#f+1]=v + end + else + f[#f+1]=v + end + end + return f +end +function table.unnest(t) + return unnest(t) +end +local function are_equal(a,b,n,m) + if a and b and #a==#b then + n=n or 1 + m=m or #a + for i=n,m do + local ai,bi=a[i],b[i] + if ai==bi then + elseif type(ai)=="table" and type(bi)=="table" then + if not are_equal(ai,bi) then + return false + end + else + return false + end + end + return true + else + return false + end +end +local function identical(a,b) + for ka,va in next,a do + local vb=b[ka] + if va==vb then + elseif type(va)=="table" and type(vb)=="table" then + if not identical(va,vb) then + return false + end + else + return false + end + end + return true +end +table.identical=identical +table.are_equal=are_equal +local function sparse(old,nest,keeptables) + local new={} + for k,v in next,old do + if not (v=="" or v==false) then + if nest and type(v)=="table" then + v=sparse(v,nest) + if keeptables or next(v) then + new[k]=v + end + else + new[k]=v + end + end + end + return new +end +table.sparse=sparse +function table.compact(t) + return sparse(t,true,true) +end +function table.contains(t,v) + if t then + for i=1,#t do + if t[i]==v then + return i + end + end + end + return false +end +function table.count(t) + local n=0 + for k,v in next,t do + n=n+1 + end + return n +end +function table.swapped(t,s) + local n={} + if s then + for k,v in next,s do + n[k]=v + end + end + for k,v in next,t do + n[v]=k + end + return n +end +function table.mirrored(t) + local n={} + for k,v in next,t do + n[v]=k + n[k]=v + end + return n +end +function table.reversed(t) + if t then + local tt,tn={},#t + if tn>0 then + local ttn=0 + for i=tn,1,-1 do + ttn=ttn+1 + tt[ttn]=t[i] + end + end + return tt + end +end +function table.reverse(t) + if t then + local n=#t + for i=1,floor(n/2) do + local j=n-i+1 + t[i],t[j]=t[j],t[i] + end + return t + end +end +function table.sequenced(t,sep,simple) + if not t then + return "" + end + local n=#t + local s={} + if n>0 then + for i=1,n do + s[i]=tostring(t[i]) + end + else + n=0 + for k,v in sortedhash(t) do + if simple then + if v==true then + n=n+1 + s[n]=k + elseif v and v~="" then + n=n+1 + s[n]=k.."="..tostring(v) + end + else + n=n+1 + s[n]=k.."="..tostring(v) + end + end + end + return concat(s,sep or " | ") +end +function table.print(t,...) + if type(t)~="table" then + print(tostring(t)) + else + serialize(print,t,...) + end +end +if setinspector then + setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end) +end +function table.sub(t,i,j) + return { unpack(t,i,j) } +end +function table.is_empty(t) + return not t or not next(t) +end +function table.has_one_entry(t) + return t and not next(t,next(t)) +end +function table.loweredkeys(t) + local l={} + for k,v in next,t do + l[lower(k)]=v + end + return l +end +function table.unique(old) + local hash={} + local new={} + local n=0 + for i=1,#old do + local oi=old[i] + if not hash[oi] then + n=n+1 + new[n]=oi + hash[oi]=true + end + end + return new +end +function table.sorted(t,...) + sort(t,...) + return t +end +function table.values(t,s) + if t then + local values,keys,v={},{},0 + for key,value in next,t do + if not keys[value] then + v=v+1 + values[v]=value + keys[k]=key + end + end + if s then + sort(values) + end + return values + else + return {} + end +end +function table.filtered(t,pattern,sort,cmp) + if t and type(pattern)=="string" then + if sort then + local s + if cmp then + s=sortedhashkeys(t,function(a,b) return cmp(t,a,b) end) + else + s=sortedkeys(t) + end + local n=0 + local m=#s + local function kv(s) + while n16*1024*1024 then + step=16*1024*1024 + else + step=floor(size/(1024*1024))*1024*1024/8 + end + local data={} + while true do + local r=f:read(step) + if not r then + return concat(data) + else + data[#data+1]=r + end + end + end +end +io.readall=readall +function io.loaddata(filename,textmode) + local f=io.open(filename,(textmode and 'r') or 'rb') + if f then + local data=readall(f) + f:close() + if #data>0 then + return data + end + end +end +function io.savedata(filename,data,joiner) + local f=io.open(filename,"wb") + if f then + if type(data)=="table" then + f:write(concat(data,joiner or "")) + elseif type(data)=="function" then + data(f) + else + f:write(data or "") + end + f:close() + io.flush() + return true + else + return false + end +end +function io.loadlines(filename,n) + local f=io.open(filename,'r') + if not f then + elseif n then + local lines={} + for i=1,n do + local line=f:read("*lines") + if line then + lines[#lines+1]=line + else + break + end + end + f:close() + lines=concat(lines,"\n") + if #lines>0 then + return lines + end + else + local line=f:read("*line") or "" + f:close() + if #line>0 then + return line + end + end +end +function io.loadchunk(filename,n) + local f=io.open(filename,'rb') + if f then + local data=f:read(n or 1024) + f:close() + if #data>0 then + return data + end + end +end +function io.exists(filename) + local f=io.open(filename) + if f==nil then + return false + else + f:close() + return true + end +end +function io.size(filename) + local f=io.open(filename) + if f==nil then + return 0 + else + local s=f:seek("end") + f:close() + return s + end +end +function io.noflines(f) + if type(f)=="string" then + local f=io.open(filename) + if f then + local n=f and io.noflines(f) or 0 + f:close() + return n + else + return 0 + end + else + local n=0 + for _ in f:lines() do + n=n+1 + end + f:seek('set',0) + return n + end +end +local nextchar={ + [ 4]=function(f) + return f:read(1,1,1,1) + end, + [ 2]=function(f) + return f:read(1,1) + end, + [ 1]=function(f) + return f:read(1) + end, + [-2]=function(f) + local a,b=f:read(1,1) + return b,a + end, + [-4]=function(f) + local a,b,c,d=f:read(1,1,1,1) + return d,c,b,a + end +} +function io.characters(f,n) + if f then + return nextchar[n or 1],f + end +end +local nextbyte={ + [4]=function(f) + local a,b,c,d=f:read(1,1,1,1) + if d then + return byte(a),byte(b),byte(c),byte(d) + end + end, + [3]=function(f) + local a,b,c=f:read(1,1,1) + if b then + return byte(a),byte(b),byte(c) + end + end, + [2]=function(f) + local a,b=f:read(1,1) + if b then + return byte(a),byte(b) + end + end, + [1]=function (f) + local a=f:read(1) + if a then + return byte(a) + end + end, + [-2]=function (f) + local a,b=f:read(1,1) + if b then + return byte(b),byte(a) + end + end, + [-3]=function(f) + local a,b,c=f:read(1,1,1) + if b then + return byte(c),byte(b),byte(a) + end + end, + [-4]=function(f) + local a,b,c,d=f:read(1,1,1,1) + if d then + return byte(d),byte(c),byte(b),byte(a) + end + end +} +function io.bytes(f,n) + if f then + return nextbyte[n or 1],f + else + return nil,nil + end +end +function io.ask(question,default,options) + while true do + io.write(question) + if options then + io.write(format(" [%s]",concat(options,"|"))) + end + if default then + io.write(format(" [%s]",default)) + end + io.write(format(" ")) + io.flush() + local answer=io.read() + answer=gsub(answer,"^%s*(.*)%s*$","%1") + if answer=="" and default then + return default + elseif not options then + return answer + else + for k=1,#options do + if options[k]==answer then + return answer + end + end + local pattern="^"..answer + for k=1,#options do + local v=options[k] + if find(v,pattern) then + return v + end + end + end + end +end +local function readnumber(f,n,m) + if m then + f:seek("set",n) + n=m + end + if n==1 then + return byte(f:read(1)) + elseif n==2 then + local a,b=byte(f:read(2),1,2) + return 256*a+b + elseif n==3 then + local a,b,c=byte(f:read(3),1,3) + return 256*256*a+256*b+c + elseif n==4 then + local a,b,c,d=byte(f:read(4),1,4) + return 256*256*256*a+256*256*b+256*c+d + elseif n==8 then + local a,b=readnumber(f,4),readnumber(f,4) + return 256*a+b + elseif n==12 then + local a,b,c=readnumber(f,4),readnumber(f,4),readnumber(f,4) + return 256*256*a+256*b+c + elseif n==-2 then + local b,a=byte(f:read(2),1,2) + return 256*a+b + elseif n==-3 then + local c,b,a=byte(f:read(3),1,3) + return 256*256*a+256*b+c + elseif n==-4 then + local d,c,b,a=byte(f:read(4),1,4) + return 256*256*256*a+256*256*b+256*c+d + elseif n==-8 then + local h,g,f,e,d,c,b,a=byte(f:read(8),1,8) + return 256*256*256*256*256*256*256*a+256*256*256*256*256*256*b+256*256*256*256*256*c+256*256*256*256*d+256*256*256*e+256*256*f+256*g+h + else + return 0 + end +end +io.readnumber=readnumber +function io.readstring(f,n,m) + if m then + f:seek("set",n) + n=m + end + local str=gsub(f:read(n),"\000","") + return str +end +if not io.i_limiter then function io.i_limiter() end end +if not io.o_limiter then function io.o_limiter() end end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['l-file']={ + version=1.001, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +file=file or {} +local file=file +if not lfs then + lfs=optionalrequire("lfs") +end +if not lfs then + lfs={ + getcurrentdir=function() + return "." + end, + attributes=function() + return nil + end, + isfile=function(name) + local f=io.open(name,'rb') + if f then + f:close() + return true + end + end, + isdir=function(name) + print("you need to load lfs") + return false + end + } +elseif not lfs.isfile then + local attributes=lfs.attributes + function lfs.isdir(name) + return attributes(name,"mode")=="directory" + end + function lfs.isfile(name) + return attributes(name,"mode")=="file" + end +end +local insert,concat=table.insert,table.concat +local match,find,gmatch=string.match,string.find,string.gmatch +local lpegmatch=lpeg.match +local getcurrentdir,attributes=lfs.currentdir,lfs.attributes +local checkedsplit=string.checkedsplit +local P,R,S,C,Cs,Cp,Cc,Ct=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Cs,lpeg.Cp,lpeg.Cc,lpeg.Ct +local colon=P(":") +local period=P(".") +local periods=P("..") +local fwslash=P("/") +local bwslash=P("\\") +local slashes=S("\\/") +local noperiod=1-period +local noslashes=1-slashes +local name=noperiod^1 +local suffix=period/""*(1-period-slashes)^1*-1 +local pattern=C((1-(slashes^1*noslashes^1*-1))^1)*P(1) +local function pathpart(name,default) + return name and lpegmatch(pattern,name) or default or "" +end +local pattern=(noslashes^0*slashes)^1*C(noslashes^1)*-1 +local function basename(name) + return name and lpegmatch(pattern,name) or name +end +local pattern=(noslashes^0*slashes^1)^0*Cs((1-suffix)^1)*suffix^0 +local function nameonly(name) + return name and lpegmatch(pattern,name) or name +end +local pattern=(noslashes^0*slashes)^0*(noperiod^1*period)^1*C(noperiod^1)*-1 +local function suffixonly(name) + return name and lpegmatch(pattern,name) or "" +end +local pattern=(noslashes^0*slashes)^0*noperiod^1*((period*C(noperiod^1))^1)*-1+Cc("") +local function suffixesonly(name) + if name then + return lpegmatch(pattern,name) + else + return "" + end +end +file.pathpart=pathpart +file.basename=basename +file.nameonly=nameonly +file.suffixonly=suffixonly +file.suffix=suffixonly +file.suffixesonly=suffixesonly +file.suffixes=suffixesonly +file.dirname=pathpart +file.extname=suffixonly +local drive=C(R("az","AZ"))*colon +local path=C((noslashes^0*slashes)^0) +local suffix=period*C(P(1-period)^0*P(-1)) +local base=C((1-suffix)^0) +local rest=C(P(1)^0) +drive=drive+Cc("") +path=path+Cc("") +base=base+Cc("") +suffix=suffix+Cc("") +local pattern_a=drive*path*base*suffix +local pattern_b=path*base*suffix +local pattern_c=C(drive*path)*C(base*suffix) +local pattern_d=path*rest +function file.splitname(str,splitdrive) + if not str then + elseif splitdrive then + return lpegmatch(pattern_a,str) + else + return lpegmatch(pattern_b,str) + end +end +function file.splitbase(str) + if str then + return lpegmatch(pattern_d,str) + else + return "",str + end +end +function file.nametotable(str,splitdrive) + if str then + local path,drive,subpath,name,base,suffix=lpegmatch(pattern_c,str) + if splitdrive then + return { + path=path, + drive=drive, + subpath=subpath, + name=name, + base=base, + suffix=suffix, + } + else + return { + path=path, + name=name, + base=base, + suffix=suffix, + } + end + end +end +local pattern=Cs(((period*(1-period-slashes)^1*-1)/""+1)^1) +function file.removesuffix(name) + return name and lpegmatch(pattern,name) +end +local suffix=period/""*(1-period-slashes)^1*-1 +local pattern=Cs((noslashes^0*slashes^1)^0*((1-suffix)^1))*Cs(suffix) +function file.addsuffix(filename,suffix,criterium) + if not filename or not suffix or suffix=="" then + return filename + elseif criterium==true then + return filename.."."..suffix + elseif not criterium then + local n,s=lpegmatch(pattern,filename) + if not s or s=="" then + return filename.."."..suffix + else + return filename + end + else + local n,s=lpegmatch(pattern,filename) + if s and s~="" then + local t=type(criterium) + if t=="table" then + for i=1,#criterium do + if s==criterium[i] then + return filename + end + end + elseif t=="string" then + if s==criterium then + return filename + end + end + end + return (n or filename).."."..suffix + end +end +local suffix=period*(1-period-slashes)^1*-1 +local pattern=Cs((1-suffix)^0) +function file.replacesuffix(name,suffix) + if name and suffix and suffix~="" then + return lpegmatch(pattern,name).."."..suffix + else + return name + end +end +local reslasher=lpeg.replacer(P("\\"),"/") +function file.reslash(str) + return str and lpegmatch(reslasher,str) +end +function file.is_writable(name) + if not name then + elseif lfs.isdir(name) then + name=name.."/m_t_x_t_e_s_t.tmp" + local f=io.open(name,"wb") + if f then + f:close() + os.remove(name) + return true + end + elseif lfs.isfile(name) then + local f=io.open(name,"ab") + if f then + f:close() + return true + end + else + local f=io.open(name,"ab") + if f then + f:close() + os.remove(name) + return true + end + end + return false +end +local readable=P("r")*Cc(true) +function file.is_readable(name) + if name then + local a=attributes(name) + return a and lpegmatch(readable,a.permissions) or false + else + return false + end +end +file.isreadable=file.is_readable +file.iswritable=file.is_writable +function file.size(name) + if name then + local a=attributes(name) + return a and a.size or 0 + else + return 0 + end +end +function file.splitpath(str,separator) + return str and checkedsplit(lpegmatch(reslasher,str),separator or io.pathseparator) +end +function file.joinpath(tab,separator) + return tab and concat(tab,separator or io.pathseparator) +end +local someslash=S("\\/") +local stripper=Cs(P(fwslash)^0/""*reslasher) +local isnetwork=someslash*someslash*(1-someslash)+(1-fwslash-colon)^1*colon +local isroot=fwslash^1*-1 +local hasroot=fwslash^1 +local reslasher=lpeg.replacer(S("\\/"),"/") +local deslasher=lpeg.replacer(S("\\/")^1,"/") +function file.join(one,two,three,...) + if not two then + return one=="" and one or lpegmatch(stripper,one) + end + if one=="" then + return lpegmatch(stripper,three and concat({ two,three,... },"/") or two) + end + if lpegmatch(isnetwork,one) then + local one=lpegmatch(reslasher,one) + local two=lpegmatch(deslasher,three and concat({ two,three,... },"/") or two) + if lpegmatch(hasroot,two) then + return one..two + else + return one.."/"..two + end + elseif lpegmatch(isroot,one) then + local two=lpegmatch(deslasher,three and concat({ two,three,... },"/") or two) + if lpegmatch(hasroot,two) then + return two + else + return "/"..two + end + else + return lpegmatch(deslasher,concat({ one,two,three,... },"/")) + end +end +local drivespec=R("az","AZ")^1*colon +local anchors=fwslash+drivespec +local untouched=periods+(1-period)^1*P(-1) +local mswindrive=Cs(drivespec*(bwslash/"/"+fwslash)^0) +local mswinuncpath=(bwslash+fwslash)*(bwslash+fwslash)*Cc("//") +local splitstarter=(mswindrive+mswinuncpath+Cc(false))*Ct(lpeg.splitat(S("/\\")^1)) +local absolute=fwslash +function file.collapsepath(str,anchor) + if not str then + return + end + if anchor==true and not lpegmatch(anchors,str) then + str=getcurrentdir().."/"..str + end + if str=="" or str=="." then + return "." + elseif lpegmatch(untouched,str) then + return lpegmatch(reslasher,str) + end + local starter,oldelements=lpegmatch(splitstarter,str) + local newelements={} + local i=#oldelements + while i>0 do + local element=oldelements[i] + if element=='.' then + elseif element=='..' then + local n=i-1 + while n>0 do + local element=oldelements[n] + if element~='..' and element~='.' then + oldelements[n]='.' + break + else + n=n-1 + end + end + if n<1 then + insert(newelements,1,'..') + end + elseif element~="" then + insert(newelements,1,element) + end + i=i-1 + end + if #newelements==0 then + return starter or "." + elseif starter then + return starter..concat(newelements,'/') + elseif lpegmatch(absolute,str) then + return "/"..concat(newelements,'/') + else + newelements=concat(newelements,'/') + if anchor=="." and find(str,"^%./") then + return "./"..newelements + else + return newelements + end + end +end +local tricky=S("/\\")*P(-1) +local attributes=lfs.attributes +function lfs.isdir(name) + if lpegmatch(tricky,name) then + return attributes(name,"mode")=="directory" + else + return attributes(name.."/.","mode")=="directory" + end +end +function lfs.isfile(name) + return attributes(name,"mode")=="file" +end +local validchars=R("az","09","AZ","--","..") +local pattern_a=lpeg.replacer(1-validchars) +local pattern_a=Cs((validchars+P(1)/"-")^1) +local whatever=P("-")^0/"" +local pattern_b=Cs(whatever*(1-whatever*-1)^1) +function file.robustname(str,strict) + if str then + str=lpegmatch(pattern_a,str) or str + if strict then + return lpegmatch(pattern_b,str) or str + else + return str + end + end +end +file.readdata=io.loaddata +file.savedata=io.savedata +function file.copy(oldname,newname) + if oldname and newname then + local data=io.loaddata(oldname) + if data and data~="" then + file.savedata(newname,data) + end + end +end +local letter=R("az","AZ")+S("_-+") +local separator=P("://") +local qualified=period^0*fwslash+letter*colon+letter^1*separator+letter^1*fwslash +local rootbased=fwslash+letter*colon +lpeg.patterns.qualified=qualified +lpeg.patterns.rootbased=rootbased +function file.is_qualified_path(filename) + return filename and lpegmatch(qualified,filename)~=nil +end +function file.is_rootbased_path(filename) + return filename and lpegmatch(rootbased,filename)~=nil +end +function file.strip(name,dir) + if name then + local b,a=match(name,"^(.-)"..dir.."(.*)$") + return a~="" and a or name + end +end +function lfs.mkdirs(path) + local full="" + for sub in gmatch(path,"(/*[^\\/]+)") do + full=full..sub + lfs.mkdir(full) + end +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['l-boolean']={ + version=1.001, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local type,tonumber=type,tonumber +boolean=boolean or {} +local boolean=boolean +function boolean.tonumber(b) + if b then return 1 else return 0 end +end +function toboolean(str,tolerant) + if str==nil then + return false + elseif str==false then + return false + elseif str==true then + return true + elseif str=="true" then + return true + elseif str=="false" then + return false + elseif not tolerant then + return false + elseif str==0 then + return false + elseif (tonumber(str) or 0)>0 then + return true + else + return str=="yes" or str=="on" or str=="t" + end +end +string.toboolean=toboolean +function string.booleanstring(str) + if str=="0" then + return false + elseif str=="1" then + return true + elseif str=="" then + return false + elseif str=="false" then + return false + elseif str=="true" then + return true + elseif (tonumber(str) or 0)>0 then + return true + else + return str=="yes" or str=="on" or str=="t" + end +end +function string.is_boolean(str,default,strict) + if type(str)=="string" then + if str=="true" or str=="yes" or str=="on" or str=="t" or (not strict and str=="1") then + return true + elseif str=="false" or str=="no" or str=="off" or str=="f" or (not strict and str=="0") then + return false + end + end + return default +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['l-math']={ + version=1.001, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local floor,sin,cos,tan=math.floor,math.sin,math.cos,math.tan +if not math.round then + function math.round(x) return floor(x+0.5) end +end +if not math.div then + function math.div(n,m) return floor(n/m) end +end +if not math.mod then + function math.mod(n,m) return n%m end +end +local pipi=2*math.pi/360 +if not math.sind then + function math.sind(d) return sin(d*pipi) end + function math.cosd(d) return cos(d*pipi) end + function math.tand(d) return tan(d*pipi) end +end +if not math.odd then + function math.odd (n) return n%2~=0 end + function math.even(n) return n%2==0 end +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['util-str']={ + version=1.001, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +utilities=utilities or {} +utilities.strings=utilities.strings or {} +local strings=utilities.strings +local format,gsub,rep,sub=string.format,string.gsub,string.rep,string.sub +local load,dump=load,string.dump +local tonumber,type,tostring=tonumber,type,tostring +local unpack,concat=table.unpack,table.concat +local P,V,C,S,R,Ct,Cs,Cp,Carg,Cc=lpeg.P,lpeg.V,lpeg.C,lpeg.S,lpeg.R,lpeg.Ct,lpeg.Cs,lpeg.Cp,lpeg.Carg,lpeg.Cc +local patterns,lpegmatch=lpeg.patterns,lpeg.match +local utfchar,utfbyte=utf.char,utf.byte +local loadstripped=nil +if _LUAVERSION<5.2 then + loadstripped=function(str,shortcuts) + return load(str) + end +else + loadstripped=function(str,shortcuts) + if shortcuts then + return load(dump(load(str),true),nil,nil,shortcuts) + else + return load(dump(load(str),true)) + end + end +end +if not number then number={} end +local stripper=patterns.stripzeros +local function points(n) + n=tonumber(n) + return (not n or n==0) and "0pt" or lpegmatch(stripper,format("%.5fpt",n/65536)) +end +local function basepoints(n) + n=tonumber(n) + return (not n or n==0) and "0bp" or lpegmatch(stripper,format("%.5fbp",n*(7200/7227)/65536)) +end +number.points=points +number.basepoints=basepoints +local rubish=patterns.spaceortab^0*patterns.newline +local anyrubish=patterns.spaceortab+patterns.newline +local anything=patterns.anything +local stripped=(patterns.spaceortab^1/"")*patterns.newline +local leading=rubish^0/"" +local trailing=(anyrubish^1*patterns.endofstring)/"" +local redundant=rubish^3/"\n" +local pattern=Cs(leading*(trailing+redundant+stripped+anything)^0) +function strings.collapsecrlf(str) + return lpegmatch(pattern,str) +end +local repeaters={} +function strings.newrepeater(str,offset) + offset=offset or 0 + local s=repeaters[str] + if not s then + s={} + repeaters[str]=s + end + local t=s[offset] + if t then + return t + end + t={} + setmetatable(t,{ __index=function(t,k) + if not k then + return "" + end + local n=k+offset + local s=n>0 and rep(str,n) or "" + t[k]=s + return s + end }) + s[offset]=t + return t +end +local extra,tab,start=0,0,4,0 +local nspaces=strings.newrepeater(" ") +string.nspaces=nspaces +local pattern=Carg(1)/function(t) + extra,tab,start=0,t or 7,1 + end*Cs(( + Cp()*patterns.tab/function(position) + local current=(position-start+1)+extra + local spaces=tab-(current-1)%tab + if spaces>0 then + extra=extra+spaces-1 + return nspaces[spaces] + else + return "" + end + end+patterns.newline*Cp()/function(position) + extra,start=0,position + end+patterns.anything + )^1) +function strings.tabtospace(str,tab) + return lpegmatch(pattern,str,1,tab or 7) +end +local newline=patterns.newline +local endofstring=patterns.endofstring +local whitespace=patterns.whitespace +local spacer=patterns.spacer +local space=spacer^0 +local nospace=space/"" +local endofline=nospace*newline +local stripend=(whitespace^1*endofstring)/"" +local normalline=(nospace*((1-space*(newline+endofstring))^1)*nospace) +local stripempty=endofline^1/"" +local normalempty=endofline^1 +local singleempty=endofline*(endofline^0/"") +local doubleempty=endofline*endofline^-1*(endofline^0/"") +local stripstart=stripempty^0 +local p_prune_normal=Cs (stripstart*(stripend+normalline+normalempty )^0 ) +local p_prune_collapse=Cs (stripstart*(stripend+normalline+doubleempty )^0 ) +local p_prune_noempty=Cs (stripstart*(stripend+normalline+singleempty )^0 ) +local p_retain_normal=Cs ((normalline+normalempty )^0 ) +local p_retain_collapse=Cs ((normalline+doubleempty )^0 ) +local p_retain_noempty=Cs ((normalline+singleempty )^0 ) +local striplinepatterns={ + ["prune"]=p_prune_normal, + ["prune and collapse"]=p_prune_collapse, + ["prune and no empty"]=p_prune_noempty, + ["retain"]=p_retain_normal, + ["retain and collapse"]=p_retain_collapse, + ["retain and no empty"]=p_retain_noempty, + ["collapse"]=patterns.collapser, +} +strings.striplinepatterns=striplinepatterns +function strings.striplines(str,how) + return str and lpegmatch(how and striplinepatterns[how] or p_prune_collapse,str) or str +end +strings.striplong=strings.striplines +function strings.nice(str) + str=gsub(str,"[:%-+_]+"," ") + return str +end +local n=0 +local sequenced=table.sequenced +function string.autodouble(s,sep) + if s==nil then + return '""' + end + local t=type(s) + if t=="number" then + return tostring(s) + end + if t=="table" then + return ('"'..sequenced(s,sep or ",")..'"') + end + return ('"'..tostring(s)..'"') +end +function string.autosingle(s,sep) + if s==nil then + return "''" + end + local t=type(s) + if t=="number" then + return tostring(s) + end + if t=="table" then + return ("'"..sequenced(s,sep or ",").."'") + end + return ("'"..tostring(s).."'") +end +local tracedchars={} +string.tracedchars=tracedchars +strings.tracers=tracedchars +function string.tracedchar(b) + if type(b)=="number" then + return tracedchars[b] or (utfchar(b).." (U+"..format('%05X',b)..")") + else + local c=utfbyte(b) + return tracedchars[c] or (b.." (U+"..format('%05X',c)..")") + end +end +function number.signed(i) + if i>0 then + return "+",i + else + return "-",-i + end +end +local zero=P("0")^1/"" +local plus=P("+")/"" +local minus=P("-") +local separator=S(".") +local digit=R("09") +local trailing=zero^1*#S("eE") +local exponent=(S("eE")*(plus+Cs((minus*zero^0*P(-1))/"")+minus)*zero^0*(P(-1)*Cc("0")+P(1)^1)) +local pattern_a=Cs(minus^0*digit^1*(separator/""*trailing+separator*(trailing+digit)^0)*exponent) +local pattern_b=Cs((exponent+P(1))^0) +function number.sparseexponent(f,n) + if not n then + n=f + f="%e" + end + local tn=type(n) + if tn=="string" then + local m=tonumber(n) + if m then + return lpegmatch((f=="%e" or f=="%E") and pattern_a or pattern_b,format(f,m)) + end + elseif tn=="number" then + return lpegmatch((f=="%e" or f=="%E") and pattern_a or pattern_b,format(f,n)) + end + return tostring(n) +end +local template=[[ +%s +%s +return function(%s) return %s end +]] +local preamble,environment="",{} +if _LUAVERSION<5.2 then + preamble=[[ +local lpeg=lpeg +local type=type +local tostring=tostring +local tonumber=tonumber +local format=string.format +local concat=table.concat +local signed=number.signed +local points=number.points +local basepoints= number.basepoints +local utfchar=utf.char +local utfbyte=utf.byte +local lpegmatch=lpeg.match +local nspaces=string.nspaces +local tracedchar=string.tracedchar +local autosingle=string.autosingle +local autodouble=string.autodouble +local sequenced=table.sequenced +local formattednumber=number.formatted +local sparseexponent=number.sparseexponent + ]] +else + environment={ + global=global or _G, + lpeg=lpeg, + type=type, + tostring=tostring, + tonumber=tonumber, + format=string.format, + concat=table.concat, + signed=number.signed, + points=number.points, + basepoints=number.basepoints, + utfchar=utf.char, + utfbyte=utf.byte, + lpegmatch=lpeg.match, + nspaces=string.nspaces, + tracedchar=string.tracedchar, + autosingle=string.autosingle, + autodouble=string.autodouble, + sequenced=table.sequenced, + formattednumber=number.formatted, + sparseexponent=number.sparseexponent, + } +end +local arguments={ "a1" } +setmetatable(arguments,{ __index=function(t,k) + local v=t[k-1]..",a"..k + t[k]=v + return v + end +}) +local prefix_any=C((S("+- .")+R("09"))^0) +local prefix_tab=P("{")*C((1-P("}"))^0)*P("}")+C((1-R("az","AZ","09","%%"))^0) +local format_s=function(f) + n=n+1 + if f and f~="" then + return format("format('%%%ss',a%s)",f,n) + else + return format("(a%s or '')",n) + end +end +local format_S=function(f) + n=n+1 + if f and f~="" then + return format("format('%%%ss',tostring(a%s))",f,n) + else + return format("tostring(a%s)",n) + end +end +local format_q=function() + n=n+1 + return format("(a%s and format('%%q',a%s) or '')",n,n) +end +local format_Q=function() + n=n+1 + return format("format('%%q',tostring(a%s))",n) +end +local format_i=function(f) + n=n+1 + if f and f~="" then + return format("format('%%%si',a%s)",f,n) + else + return format("format('%%i',a%s)",n) + end +end +local format_d=format_i +local format_I=function(f) + n=n+1 + return format("format('%%s%%%si',signed(a%s))",f,n) +end +local format_f=function(f) + n=n+1 + return format("format('%%%sf',a%s)",f,n) +end +local format_F=function(f) + n=n+1 + if not f or f=="" then + return format("(((a%s > -0.0000000005 and a%s < 0.0000000005) and '0') or format((a%s %% 1 == 0) and '%%i' or '%%.9f',a%s))",n,n,n,n) + else + return format("format((a%s %% 1 == 0) and '%%i' or '%%%sf',a%s)",n,f,n) + end +end +local format_g=function(f) + n=n+1 + return format("format('%%%sg',a%s)",f,n) +end +local format_G=function(f) + n=n+1 + return format("format('%%%sG',a%s)",f,n) +end +local format_e=function(f) + n=n+1 + return format("format('%%%se',a%s)",f,n) +end +local format_E=function(f) + n=n+1 + return format("format('%%%sE',a%s)",f,n) +end +local format_j=function(f) + n=n+1 + return format("sparseexponent('%%%se',a%s)",f,n) +end +local format_J=function(f) + n=n+1 + return format("sparseexponent('%%%sE',a%s)",f,n) +end +local format_x=function(f) + n=n+1 + return format("format('%%%sx',a%s)",f,n) +end +local format_X=function(f) + n=n+1 + return format("format('%%%sX',a%s)",f,n) +end +local format_o=function(f) + n=n+1 + return format("format('%%%so',a%s)",f,n) +end +local format_c=function() + n=n+1 + return format("utfchar(a%s)",n) +end +local format_C=function() + n=n+1 + return format("tracedchar(a%s)",n) +end +local format_r=function(f) + n=n+1 + return format("format('%%%s.0f',a%s)",f,n) +end +local format_h=function(f) + n=n+1 + if f=="-" then + f=sub(f,2) + return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) + else + return format("format('0x%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) + end +end +local format_H=function(f) + n=n+1 + if f=="-" then + f=sub(f,2) + return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) + else + return format("format('0x%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) + end +end +local format_u=function(f) + n=n+1 + if f=="-" then + f=sub(f,2) + return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) + else + return format("format('u+%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) + end +end +local format_U=function(f) + n=n+1 + if f=="-" then + f=sub(f,2) + return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) + else + return format("format('U+%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) + end +end +local format_p=function() + n=n+1 + return format("points(a%s)",n) +end +local format_b=function() + n=n+1 + return format("basepoints(a%s)",n) +end +local format_t=function(f) + n=n+1 + if f and f~="" then + return format("concat(a%s,%q)",n,f) + else + return format("concat(a%s)",n) + end +end +local format_T=function(f) + n=n+1 + if f and f~="" then + return format("sequenced(a%s,%q)",n,f) + else + return format("sequenced(a%s)",n) + end +end +local format_l=function() + n=n+1 + return format("(a%s and 'true' or 'false')",n) +end +local format_L=function() + n=n+1 + return format("(a%s and 'TRUE' or 'FALSE')",n) +end +local format_N=function() + n=n+1 + return format("tostring(tonumber(a%s) or a%s)",n,n) +end +local format_a=function(f) + n=n+1 + if f and f~="" then + return format("autosingle(a%s,%q)",n,f) + else + return format("autosingle(a%s)",n) + end +end +local format_A=function(f) + n=n+1 + if f and f~="" then + return format("autodouble(a%s,%q)",n,f) + else + return format("autodouble(a%s)",n) + end +end +local format_w=function(f) + n=n+1 + f=tonumber(f) + if f then + return format("nspaces[%s+a%s]",f,n) + else + return format("nspaces[a%s]",n) + end +end +local format_W=function(f) + return format("nspaces[%s]",tonumber(f) or 0) +end +local digit=patterns.digit +local period=patterns.period +local three=digit*digit*digit +local splitter=Cs ( + (((1-(three^1*period))^1+C(three))*(Carg(1)*three)^1+C((1-period)^1))*(P(1)/""*Carg(2))*C(2) +) +patterns.formattednumber=splitter +function number.formatted(n,sep1,sep2) + local s=type(s)=="string" and n or format("%0.2f",n) + if sep1==true then + return lpegmatch(splitter,s,1,".",",") + elseif sep1=="." then + return lpegmatch(splitter,s,1,sep1,sep2 or ",") + elseif sep1=="," then + return lpegmatch(splitter,s,1,sep1,sep2 or ".") + else + return lpegmatch(splitter,s,1,sep1 or ",",sep2 or ".") + end +end +local format_m=function(f) + n=n+1 + if not f or f=="" then + f="," + end + return format([[formattednumber(a%s,%q,".")]],n,f) +end +local format_M=function(f) + n=n+1 + if not f or f=="" then + f="." + end + return format([[formattednumber(a%s,%q,",")]],n,f) +end +local format_z=function(f) + n=n+(tonumber(f) or 1) + return "''" +end +local format_rest=function(s) + return format("%q",s) +end +local format_extension=function(extensions,f,name) + local extension=extensions[name] or "tostring(%s)" + local f=tonumber(f) or 1 + if f==0 then + return extension + elseif f==1 then + n=n+1 + local a="a"..n + return format(extension,a,a) + elseif f<0 then + local a="a"..(n+f+1) + return format(extension,a,a) + else + local t={} + for i=1,f do + n=n+1 + t[#t+1]="a"..n + end + return format(extension,unpack(t)) + end +end +local builder=Cs { "start", + start=( + ( + P("%")/""*( + V("!") ++V("s")+V("q")+V("i")+V("d")+V("f")+V("F")+V("g")+V("G")+V("e")+V("E")+V("x")+V("X")+V("o") ++V("c")+V("C")+V("S") ++V("Q") ++V("N") ++V("r")+V("h")+V("H")+V("u")+V("U")+V("p")+V("b")+V("t")+V("T")+V("l")+V("L")+V("I")+V("w") ++V("W") ++V("a") ++V("A") ++V("j")+V("J") ++V("m")+V("M") ++V("z") + )+V("*") + )*(P(-1)+Carg(1)) + )^0, + ["s"]=(prefix_any*P("s"))/format_s, + ["q"]=(prefix_any*P("q"))/format_q, + ["i"]=(prefix_any*P("i"))/format_i, + ["d"]=(prefix_any*P("d"))/format_d, + ["f"]=(prefix_any*P("f"))/format_f, + ["F"]=(prefix_any*P("F"))/format_F, + ["g"]=(prefix_any*P("g"))/format_g, + ["G"]=(prefix_any*P("G"))/format_G, + ["e"]=(prefix_any*P("e"))/format_e, + ["E"]=(prefix_any*P("E"))/format_E, + ["x"]=(prefix_any*P("x"))/format_x, + ["X"]=(prefix_any*P("X"))/format_X, + ["o"]=(prefix_any*P("o"))/format_o, + ["S"]=(prefix_any*P("S"))/format_S, + ["Q"]=(prefix_any*P("Q"))/format_S, + ["N"]=(prefix_any*P("N"))/format_N, + ["c"]=(prefix_any*P("c"))/format_c, + ["C"]=(prefix_any*P("C"))/format_C, + ["r"]=(prefix_any*P("r"))/format_r, + ["h"]=(prefix_any*P("h"))/format_h, + ["H"]=(prefix_any*P("H"))/format_H, + ["u"]=(prefix_any*P("u"))/format_u, + ["U"]=(prefix_any*P("U"))/format_U, + ["p"]=(prefix_any*P("p"))/format_p, + ["b"]=(prefix_any*P("b"))/format_b, + ["t"]=(prefix_tab*P("t"))/format_t, + ["T"]=(prefix_tab*P("T"))/format_T, + ["l"]=(prefix_any*P("l"))/format_l, + ["L"]=(prefix_any*P("L"))/format_L, + ["I"]=(prefix_any*P("I"))/format_I, + ["w"]=(prefix_any*P("w"))/format_w, + ["W"]=(prefix_any*P("W"))/format_W, + ["j"]=(prefix_any*P("j"))/format_j, + ["J"]=(prefix_any*P("J"))/format_J, + ["m"]=(prefix_tab*P("m"))/format_m, + ["M"]=(prefix_tab*P("M"))/format_M, + ["z"]=(prefix_any*P("z"))/format_z, + ["a"]=(prefix_any*P("a"))/format_a, + ["A"]=(prefix_any*P("A"))/format_A, + ["*"]=Cs(((1-P("%"))^1+P("%%")/"%%")^1)/format_rest, + ["?"]=Cs(((1-P("%"))^1 )^1)/format_rest, + ["!"]=Carg(2)*prefix_any*P("!")*C((1-P("!"))^1)*P("!")/format_extension, +} +local direct=Cs ( + P("%")*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*P(-1)/[[local format = string.format return function(str) return format("%0",str) end]] +) +local function make(t,str) + local f + local p + local p=lpegmatch(direct,str) + if p then + f=loadstripped(p)() + else + n=0 + p=lpegmatch(builder,str,1,t._connector_,t._extensions_) + if n>0 then + p=format(template,preamble,t._preamble_,arguments[n],p) + f=loadstripped(p,t._environment_)() + else + f=function() return str end + end + end + t[str]=f + return f +end +local function use(t,fmt,...) + return t[fmt](...) +end +strings.formatters={} +if _LUAVERSION<5.2 then + function strings.formatters.new(noconcat) + local t={ _type_="formatter",_connector_=noconcat and "," or "..",_extensions_={},_preamble_=preamble,_environment_={} } + setmetatable(t,{ __index=make,__call=use }) + return t + end +else + function strings.formatters.new(noconcat) + local e={} + for k,v in next,environment do + e[k]=v + end + local t={ _type_="formatter",_connector_=noconcat and "," or "..",_extensions_={},_preamble_="",_environment_=e } + setmetatable(t,{ __index=make,__call=use }) + return t + end +end +local formatters=strings.formatters.new() +string.formatters=formatters +string.formatter=function(str,...) return formatters[str](...) end +local function add(t,name,template,preamble) + if type(t)=="table" and t._type_=="formatter" then + t._extensions_[name]=template or "%s" + if type(preamble)=="string" then + t._preamble_=preamble.."\n"..t._preamble_ + elseif type(preamble)=="table" then + for k,v in next,preamble do + t._environment_[k]=v + end + end + end +end +strings.formatters.add=add +patterns.xmlescape=Cs((P("<")/"<"+P(">")/">"+P("&")/"&"+P('"')/"""+P(1))^0) +patterns.texescape=Cs((C(S("#$%\\{}"))/"\\%1"+P(1))^0) +patterns.luaescape=Cs(((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0) +patterns.luaquoted=Cs(Cc('"')*((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0*Cc('"')) +if _LUAVERSION<5.2 then + add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],"local xmlescape = lpeg.patterns.xmlescape") + add(formatters,"tex",[[lpegmatch(texescape,%s)]],"local texescape = lpeg.patterns.texescape") + add(formatters,"lua",[[lpegmatch(luaescape,%s)]],"local luaescape = lpeg.patterns.luaescape") +else + add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],{ xmlescape=lpeg.patterns.xmlescape }) + add(formatters,"tex",[[lpegmatch(texescape,%s)]],{ texescape=lpeg.patterns.texescape }) + add(formatters,"lua",[[lpegmatch(luaescape,%s)]],{ luaescape=lpeg.patterns.luaescape }) +end +local dquote=patterns.dquote +local equote=patterns.escaped+dquote/'\\"'+1 +local space=patterns.space +local cquote=Cc('"') +local pattern=Cs(dquote*(equote-P(-2))^0*dquote) ++Cs(cquote*(equote-space)^0*space*equote^0*cquote) +function string.optionalquoted(str) + return lpegmatch(pattern,str) or str +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['luat-basics-gen']={ + version=1.100, + comment="companion to luatex-*.tex", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end +local dummyfunction=function() +end +local dummyreporter=function(c) + return function(...) + (texio.reporter or texio.write_nl)(c.." : "..string.formatters(...)) + end +end +statistics={ + register=dummyfunction, + starttiming=dummyfunction, + stoptiming=dummyfunction, + elapsedtime=nil, +} +directives={ + register=dummyfunction, + enable=dummyfunction, + disable=dummyfunction, +} +trackers={ + register=dummyfunction, + enable=dummyfunction, + disable=dummyfunction, +} +experiments={ + register=dummyfunction, + enable=dummyfunction, + disable=dummyfunction, +} +storage={ + register=dummyfunction, + shared={}, +} +logs={ + new=dummyreporter, + reporter=dummyreporter, + messenger=dummyreporter, + report=dummyfunction, +} +callbacks={ + register=function(n,f) return callback.register(n,f) end, +} +utilities={ + storage={ + allocate=function(t) return t or {} end, + mark=function(t) return t or {} end, + }, +} +characters=characters or { + data={} +} +texconfig.kpse_init=true +resolvers=resolvers or {} +local remapper={ + otf="opentype fonts", + ttf="truetype fonts", + ttc="truetype fonts", + dfont="truetype fonts", + cid="cid maps", + cidmap="cid maps", + fea="font feature files", + pfa="type1 fonts", + pfb="type1 fonts", + afm="afm", +} +function resolvers.findfile(name,fileformat) + name=string.gsub(name,"\\","/") + if not fileformat or fileformat=="" then + fileformat=file.suffix(name) + if fileformat=="" then + fileformat="tex" + end + end + fileformat=string.lower(fileformat) + fileformat=remapper[fileformat] or fileformat + local found=kpse.find_file(name,fileformat) + if not found or found=="" then + found=kpse.find_file(name,"other text files") + end + return found +end +resolvers.findbinfile=resolvers.findfile +function resolvers.loadbinfile(filename,filetype) + local data=io.loaddata(filename) + return true,data,#data +end +function resolvers.resolve(s) + return s +end +function resolvers.unresolve(s) + return s +end +caches={} +local writable=nil +local readables={} +local usingjit=jit +if not caches.namespace or caches.namespace=="" or caches.namespace=="context" then + caches.namespace='generic' +end +do + local cachepaths=kpse.expand_var('$TEXMFCACHE') or "" + if cachepaths=="" or cachepaths=="$TEXMFCACHE" then + cachepaths=kpse.expand_var('$TEXMFVAR') or "" + end + if cachepaths=="" or cachepaths=="$TEXMFVAR" then + cachepaths=kpse.expand_var('$VARTEXMF') or "" + end + if cachepaths=="" then + local fallbacks={ "TMPDIR","TEMPDIR","TMP","TEMP","HOME","HOMEPATH" } + for i=1,#fallbacks do + cachepaths=os.getenv(fallbacks[i]) or "" + if cachepath~="" and lfs.isdir(cachepath) then + break + end + end + end + if cachepaths=="" then + cachepaths="." + end + cachepaths=string.split(cachepaths,os.type=="windows" and ";" or ":") + for i=1,#cachepaths do + local cachepath=cachepaths[i] + if not lfs.isdir(cachepath) then + lfs.mkdirs(cachepath) + if lfs.isdir(cachepath) then + texio.write(string.format("(created cache path: %s)",cachepath)) + end + end + if file.is_writable(cachepath) then + writable=file.join(cachepath,"luatex-cache") + lfs.mkdir(writable) + writable=file.join(writable,caches.namespace) + lfs.mkdir(writable) + break + end + end + for i=1,#cachepaths do + if file.is_readable(cachepaths[i]) then + readables[#readables+1]=file.join(cachepaths[i],"luatex-cache",caches.namespace) + end + end + if not writable then + texio.write_nl("quiting: fix your writable cache path") + os.exit() + elseif #readables==0 then + texio.write_nl("quiting: fix your readable cache path") + os.exit() + elseif #readables==1 and readables[1]==writable then + texio.write(string.format("(using cache: %s)",writable)) + else + texio.write(string.format("(using write cache: %s)",writable)) + texio.write(string.format("(using read cache: %s)",table.concat(readables," "))) + end +end +function caches.getwritablepath(category,subcategory) + local path=file.join(writable,category) + lfs.mkdir(path) + path=file.join(path,subcategory) + lfs.mkdir(path) + return path +end +function caches.getreadablepaths(category,subcategory) + local t={} + for i=1,#readables do + t[i]=file.join(readables[i],category,subcategory) + end + return t +end +local function makefullname(path,name) + if path and path~="" then + return file.addsuffix(file.join(path,name),"lua"),file.addsuffix(file.join(path,name),usingjit and "lub" or "luc") + end +end +function caches.is_writable(path,name) + local fullname=makefullname(path,name) + return fullname and file.is_writable(fullname) +end +function caches.loaddata(paths,name) + for i=1,#paths do + local data=false + local luaname,lucname=makefullname(paths[i],name) + if lucname and not lfs.isfile(lucname) and type(caches.compile)=="function" then + texio.write(string.format("(compiling luc: %s)",lucname)) + data=loadfile(luaname) + if data then + data=data() + end + if data then + caches.compile(data,luaname,lucname) + return data + end + end + if lucname and lfs.isfile(lucname) then + texio.write(string.format("(load luc: %s)",lucname)) + data=loadfile(lucname) + if data then + data=data() + end + if data then + return data + else + texio.write(string.format("(loading failed: %s)",lucname)) + end + end + if luaname and lfs.isfile(luaname) then + texio.write(string.format("(load lua: %s)",luaname)) + data=loadfile(luaname) + if data then + data=data() + end + if data then + return data + end + end + end +end +function caches.savedata(path,name,data) + local luaname,lucname=makefullname(path,name) + if luaname then + texio.write(string.format("(save: %s)",luaname)) + table.tofile(luaname,data,true) + if lucname and type(caches.compile)=="function" then + os.remove(lucname) + texio.write(string.format("(save: %s)",lucname)) + caches.compile(data,luaname,lucname) + end + end +end +function caches.compile(data,luaname,lucname) + local d=io.loaddata(luaname) + if not d or d=="" then + d=table.serialize(data,true) + end + if d and d~="" then + local f=io.open(lucname,'wb') + if f then + local s=loadstring(d) + if s then + f:write(string.dump(s,true)) + end + f:close() + end + end +end +function table.setmetatableindex(t,f) + if type(t)~="table" then + f=f or t + t={} + end + setmetatable(t,{ __index=f }) + return t +end +arguments={} +if arg then + for i=1,#arg do + local k,v=string.match(arg[i],"^%-%-([^=]+)=?(.-)$") + if k and v then + arguments[k]=v + end + end +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['data-con']={ + version=1.100, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local format,lower,gsub=string.format,string.lower,string.gsub +local trace_cache=false trackers.register("resolvers.cache",function(v) trace_cache=v end) +local trace_containers=false trackers.register("resolvers.containers",function(v) trace_containers=v end) +local trace_storage=false trackers.register("resolvers.storage",function(v) trace_storage=v end) +containers=containers or {} +local containers=containers +containers.usecache=true +local report_containers=logs.reporter("resolvers","containers") +local allocated={} +local mt={ + __index=function(t,k) + if k=="writable" then + local writable=caches.getwritablepath(t.category,t.subcategory) or { "." } + t.writable=writable + return writable + elseif k=="readables" then + local readables=caches.getreadablepaths(t.category,t.subcategory) or { "." } + t.readables=readables + return readables + end + end, + __storage__=true +} +function containers.define(category,subcategory,version,enabled) + if category and subcategory then + local c=allocated[category] + if not c then + c={} + allocated[category]=c + end + local s=c[subcategory] + if not s then + s={ + category=category, + subcategory=subcategory, + storage={}, + enabled=enabled, + version=version or math.pi, + trace=false, + } + setmetatable(s,mt) + c[subcategory]=s + end + return s + end +end +function containers.is_usable(container,name) + return container.enabled and caches and caches.is_writable(container.writable,name) +end +function containers.is_valid(container,name) + if name and name~="" then + local storage=container.storage[name] + return storage and storage.cache_version==container.version + else + return false + end +end +function containers.read(container,name) + local storage=container.storage + local stored=storage[name] + if not stored and container.enabled and caches and containers.usecache then + stored=caches.loaddata(container.readables,name) + if stored and stored.cache_version==container.version then + if trace_cache or trace_containers then + report_containers("action %a, category %a, name %a","load",container.subcategory,name) + end + else + stored=nil + end + storage[name]=stored + elseif stored then + if trace_cache or trace_containers then + report_containers("action %a, category %a, name %a","reuse",container.subcategory,name) + end + end + return stored +end +function containers.write(container,name,data) + if data then + data.cache_version=container.version + if container.enabled and caches then + local unique,shared=data.unique,data.shared + data.unique,data.shared=nil,nil + caches.savedata(container.writable,name,data) + if trace_cache or trace_containers then + report_containers("action %a, category %a, name %a","save",container.subcategory,name) + end + data.unique,data.shared=unique,shared + end + if trace_cache or trace_containers then + report_containers("action %a, category %a, name %a","store",container.subcategory,name) + end + container.storage[name]=data + end + return data +end +function containers.content(container,name) + return container.storage[name] +end +function containers.cleanname(name) + return (gsub(lower(name),"[^%w\128-\255]+","-")) +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['luatex-fonts-nod']={ + version=1.001, + comment="companion to luatex-fonts.lua", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end +if tex.attribute[0]~=0 then + texio.write_nl("log","!") + texio.write_nl("log","! Attribute 0 is reserved for ConTeXt's font feature management and has to be") + texio.write_nl("log","! set to zero. Also, some attributes in the range 1-255 are used for special") + texio.write_nl("log","! purposes so setting them at the TeX end might break the font handler.") + texio.write_nl("log","!") + tex.attribute[0]=0 +end +attributes=attributes or {} +attributes.unsetvalue=-0x7FFFFFFF +local numbers,last={},127 +attributes.private=attributes.private or function(name) + local number=numbers[name] + if not number then + if last<255 then + last=last+1 + end + number=last + numbers[name]=number + end + return number +end +nodes={} +nodes.pool={} +nodes.handlers={} +local nodecodes={} for k,v in next,node.types () do nodecodes[string.gsub(v,"_","")]=k end +local whatcodes={} for k,v in next,node.whatsits() do whatcodes[string.gsub(v,"_","")]=k end +local glyphcodes={ [0]="character","glyph","ligature","ghost","left","right" } +local disccodes={ [0]="discretionary","explicit","automatic","regular","first","second" } +nodes.nodecodes=nodecodes +nodes.whatcodes=whatcodes +nodes.whatsitcodes=whatcodes +nodes.glyphcodes=glyphcodes +nodes.disccodes=disccodes +local free_node=node.free +local remove_node=node.remove +local new_node=node.new +local traverse_id=node.traverse_id +nodes.handlers.protectglyphs=node.protect_glyphs +nodes.handlers.unprotectglyphs=node.unprotect_glyphs +local math_code=nodecodes.math +local end_of_math=node.end_of_math +function node.end_of_math(n) + if n.id==math_code and n.subtype==1 then + return n + else + return end_of_math(n) + end +end +function nodes.remove(head,current,free_too) + local t=current + head,current=remove_node(head,current) + if t then + if free_too then + free_node(t) + t=nil + else + t.next,t.prev=nil,nil + end + end + return head,current,t +end +function nodes.delete(head,current) + return nodes.remove(head,current,true) +end +function nodes.pool.kern(k) + local n=new_node("kern",1) + n.kern=k + return n +end +local getfield=node.getfield +local setfield=node.setfield +nodes.getfield=getfield +nodes.setfield=setfield +nodes.getattr=getfield +nodes.setattr=setfield +nodes.tostring=node.tostring or tostring +nodes.copy=node.copy +nodes.copy_list=node.copy_list +nodes.delete=node.delete +nodes.dimensions=node.dimensions +nodes.end_of_math=node.end_of_math +nodes.flush_list=node.flush_list +nodes.flush_node=node.flush_node +nodes.free=node.free +nodes.insert_after=node.insert_after +nodes.insert_before=node.insert_before +nodes.hpack=node.hpack +nodes.new=node.new +nodes.tail=node.tail +nodes.traverse=node.traverse +nodes.traverse_id=node.traverse_id +nodes.slide=node.slide +nodes.vpack=node.vpack +nodes.first_glyph=node.first_glyph +nodes.first_character=node.first_character +nodes.has_glyph=node.has_glyph or node.first_glyph +nodes.current_attr=node.current_attr +nodes.do_ligature_n=node.do_ligature_n +nodes.has_field=node.has_field +nodes.last_node=node.last_node +nodes.usedlist=node.usedlist +nodes.protrusion_skippable=node.protrusion_skippable +nodes.write=node.write +nodes.has_attribute=node.has_attribute +nodes.set_attribute=node.set_attribute +nodes.unset_attribute=node.unset_attribute +nodes.protect_glyphs=node.protect_glyphs +nodes.unprotect_glyphs=node.unprotect_glyphs +nodes.kerning=node.kerning +nodes.ligaturing=node.ligaturing +nodes.mlist_to_hlist=node.mlist_to_hlist +local direct=node.direct +local nuts={} +nodes.nuts=nuts +local tonode=direct.tonode +local tonut=direct.todirect +nodes.tonode=tonode +nodes.tonut=tonut +nuts.tonode=tonode +nuts.tonut=tonut +local getfield=direct.getfield +local setfield=direct.setfield +nuts.getfield=getfield +nuts.setfield=setfield +nuts.getnext=direct.getnext +nuts.getprev=direct.getprev +nuts.getid=direct.getid +nuts.getattr=getfield +nuts.setattr=setfield +nuts.getfont=direct.getfont +nuts.getsubtype=direct.getsubtype +nuts.getchar=direct.getchar +nuts.insert_before=direct.insert_before +nuts.insert_after=direct.insert_after +nuts.delete=direct.delete +nuts.copy=direct.copy +nuts.tail=direct.tail +nuts.flush_list=direct.flush_list +nuts.end_of_math=direct.end_of_math +nuts.traverse=direct.traverse +nuts.traverse_id=direct.traverse_id +nuts.getprop=nuts.getattr +nuts.setprop=nuts.setattr +local new_nut=direct.new +nuts.new=new_nut +nuts.pool={} +function nuts.pool.kern(k) + local n=new_nut("kern",1) + setfield(n,"kern",k) + return n +end +local propertydata=direct.get_properties_table() +nodes.properties={ data=propertydata } +direct.set_properties_mode(true,true) +function direct.set_properties_mode() end +nuts.getprop=function(n,k) + local p=propertydata[n] + if p then + return p[k] + end +end +nuts.setprop=function(n,k,v) + if v then + local p=propertydata[n] + if p then + p[k]=v + else + propertydata[n]={ [k]=v } + end + end +end +nodes.setprop=nodes.setproperty +nodes.getprop=nodes.getproperty + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-ini']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local allocate=utilities.storage.allocate +local report_defining=logs.reporter("fonts","defining") +fonts=fonts or {} +local fonts=fonts +fonts.hashes={ identifiers=allocate() } +fonts.tables=fonts.tables or {} +fonts.helpers=fonts.helpers or {} +fonts.tracers=fonts.tracers or {} +fonts.specifiers=fonts.specifiers or {} +fonts.analyzers={} +fonts.readers={} +fonts.definers={ methods={} } +fonts.loggers={ register=function() end } +fontloader.totable=fontloader.to_table + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-con']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local next,tostring,rawget=next,tostring,rawget +local format,match,lower,gsub=string.format,string.match,string.lower,string.gsub +local utfbyte=utf.byte +local sort,insert,concat,sortedkeys,serialize,fastcopy=table.sort,table.insert,table.concat,table.sortedkeys,table.serialize,table.fastcopy +local derivetable=table.derive +local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end) +local trace_scaling=false trackers.register("fonts.scaling",function(v) trace_scaling=v end) +local report_defining=logs.reporter("fonts","defining") +local fonts=fonts +local constructors=fonts.constructors or {} +fonts.constructors=constructors +local handlers=fonts.handlers or {} +fonts.handlers=handlers +local allocate=utilities.storage.allocate +local setmetatableindex=table.setmetatableindex +constructors.dontembed=allocate() +constructors.autocleanup=true +constructors.namemode="fullpath" +constructors.version=1.01 +constructors.cache=containers.define("fonts","constructors",constructors.version,false) +constructors.privateoffset=0xF0000 +constructors.cacheintex=true +constructors.keys={ + properties={ + encodingbytes="number", + embedding="number", + cidinfo={}, + format="string", + fontname="string", + fullname="string", + filename="filename", + psname="string", + name="string", + virtualized="boolean", + hasitalics="boolean", + autoitalicamount="basepoints", + nostackmath="boolean", + noglyphnames="boolean", + mode="string", + hasmath="boolean", + mathitalics="boolean", + textitalics="boolean", + finalized="boolean", + }, + parameters={ + mathsize="number", + scriptpercentage="float", + scriptscriptpercentage="float", + units="cardinal", + designsize="scaledpoints", + expansion={ + stretch="integerscale", + shrink="integerscale", + step="integerscale", + auto="boolean", + }, + protrusion={ + auto="boolean", + }, + slantfactor="float", + extendfactor="float", + factor="float", + hfactor="float", + vfactor="float", + size="scaledpoints", + units="scaledpoints", + scaledpoints="scaledpoints", + slantperpoint="scaledpoints", + spacing={ + width="scaledpoints", + stretch="scaledpoints", + shrink="scaledpoints", + extra="scaledpoints", + }, + xheight="scaledpoints", + quad="scaledpoints", + ascender="scaledpoints", + descender="scaledpoints", + synonyms={ + space="spacing.width", + spacestretch="spacing.stretch", + spaceshrink="spacing.shrink", + extraspace="spacing.extra", + x_height="xheight", + space_stretch="spacing.stretch", + space_shrink="spacing.shrink", + extra_space="spacing.extra", + em="quad", + ex="xheight", + slant="slantperpoint", + }, + }, + description={ + width="basepoints", + height="basepoints", + depth="basepoints", + boundingbox={}, + }, + character={ + width="scaledpoints", + height="scaledpoints", + depth="scaledpoints", + italic="scaledpoints", + }, +} +local designsizes=allocate() +constructors.designsizes=designsizes +local loadedfonts=allocate() +constructors.loadedfonts=loadedfonts +local factors={ + pt=65536.0, + bp=65781.8, +} +function constructors.setfactor(f) + constructors.factor=factors[f or 'pt'] or factors.pt +end +constructors.setfactor() +function constructors.scaled(scaledpoints,designsize) + if scaledpoints<0 then + if designsize then + local factor=constructors.factor + if designsize>factor then + return (- scaledpoints/1000)*designsize + else + return (- scaledpoints/1000)*designsize*factor + end + else + return (- scaledpoints/1000)*10*factor + end + else + return scaledpoints + end +end +function constructors.cleanuptable(tfmdata) + if constructors.autocleanup and tfmdata.properties.virtualized then + for k,v in next,tfmdata.characters do + if v.commands then v.commands=nil end + end + end +end +function constructors.calculatescale(tfmdata,scaledpoints) + local parameters=tfmdata.parameters + if scaledpoints<0 then + scaledpoints=(- scaledpoints/1000)*(tfmdata.designsize or parameters.designsize) + end + return scaledpoints,scaledpoints/(parameters.units or 1000) +end +local unscaled={ + ScriptPercentScaleDown=true, + ScriptScriptPercentScaleDown=true, + RadicalDegreeBottomRaisePercent=true +} +function constructors.assignmathparameters(target,original) + local mathparameters=original.mathparameters + if mathparameters and next(mathparameters) then + local targetparameters=target.parameters + local targetproperties=target.properties + local targetmathparameters={} + local factor=targetproperties.math_is_scaled and 1 or targetparameters.factor + for name,value in next,mathparameters do + if unscaled[name] then + targetmathparameters[name]=value + else + targetmathparameters[name]=value*factor + end + end + if not targetmathparameters.FractionDelimiterSize then + targetmathparameters.FractionDelimiterSize=1.01*targetparameters.size + end + if not mathparameters.FractionDelimiterDisplayStyleSize then + targetmathparameters.FractionDelimiterDisplayStyleSize=2.40*targetparameters.size + end + target.mathparameters=targetmathparameters + end +end +function constructors.beforecopyingcharacters(target,original) +end +function constructors.aftercopyingcharacters(target,original) +end +constructors.sharefonts=false +constructors.nofsharedfonts=0 +local sharednames={} +function constructors.trytosharefont(target,tfmdata) + if constructors.sharefonts then + local characters=target.characters + local n=1 + local t={ target.psname } + local u=sortedkeys(characters) + for i=1,#u do + local k=u[i] + n=n+1;t[n]=k + n=n+1;t[n]=characters[k].index or k + end + local h=md5.HEX(concat(t," ")) + local s=sharednames[h] + if s then + if trace_defining then + report_defining("font %a uses backend resources of font %a",target.fullname,s) + end + target.fullname=s + constructors.nofsharedfonts=constructors.nofsharedfonts+1 + target.properties.sharedwith=s + else + sharednames[h]=target.fullname + end + end +end +function constructors.enhanceparameters(parameters) + local xheight=parameters.x_height + local quad=parameters.quad + local space=parameters.space + local stretch=parameters.space_stretch + local shrink=parameters.space_shrink + local extra=parameters.extra_space + local slant=parameters.slant + parameters.xheight=xheight + parameters.spacestretch=stretch + parameters.spaceshrink=shrink + parameters.extraspace=extra + parameters.em=quad + parameters.ex=xheight + parameters.slantperpoint=slant + parameters.spacing={ + width=space, + stretch=stretch, + shrink=shrink, + extra=extra, + } +end +function constructors.scale(tfmdata,specification) + local target={} + if tonumber(specification) then + specification={ size=specification } + end + target.specification=specification + local scaledpoints=specification.size + local relativeid=specification.relativeid + local properties=tfmdata.properties or {} + local goodies=tfmdata.goodies or {} + local resources=tfmdata.resources or {} + local descriptions=tfmdata.descriptions or {} + local characters=tfmdata.characters or {} + local changed=tfmdata.changed or {} + local shared=tfmdata.shared or {} + local parameters=tfmdata.parameters or {} + local mathparameters=tfmdata.mathparameters or {} + local targetcharacters={} + local targetdescriptions=derivetable(descriptions) + local targetparameters=derivetable(parameters) + local targetproperties=derivetable(properties) + local targetgoodies=goodies + target.characters=targetcharacters + target.descriptions=targetdescriptions + target.parameters=targetparameters + target.properties=targetproperties + target.goodies=targetgoodies + target.shared=shared + target.resources=resources + target.unscaled=tfmdata + local mathsize=tonumber(specification.mathsize) or 0 + local textsize=tonumber(specification.textsize) or scaledpoints + local forcedsize=tonumber(parameters.mathsize ) or 0 + local extrafactor=tonumber(specification.factor ) or 1 + if (mathsize==2 or forcedsize==2) and parameters.scriptpercentage then + scaledpoints=parameters.scriptpercentage*textsize/100 + elseif (mathsize==3 or forcedsize==3) and parameters.scriptscriptpercentage then + scaledpoints=parameters.scriptscriptpercentage*textsize/100 + elseif forcedsize>1000 then + scaledpoints=forcedsize + end + targetparameters.mathsize=mathsize + targetparameters.textsize=textsize + targetparameters.forcedsize=forcedsize + targetparameters.extrafactor=extrafactor + local tounicode=fonts.mappings.tounicode + local defaultwidth=resources.defaultwidth or 0 + local defaultheight=resources.defaultheight or 0 + local defaultdepth=resources.defaultdepth or 0 + local units=parameters.units or 1000 + if target.fonts then + target.fonts=fastcopy(target.fonts) + end + targetproperties.language=properties.language or "dflt" + targetproperties.script=properties.script or "dflt" + targetproperties.mode=properties.mode or "base" + local askedscaledpoints=scaledpoints + local scaledpoints,delta=constructors.calculatescale(tfmdata,scaledpoints,nil,specification) + local hdelta=delta + local vdelta=delta + target.designsize=parameters.designsize + target.units_per_em=units + local direction=properties.direction or tfmdata.direction or 0 + target.direction=direction + properties.direction=direction + target.size=scaledpoints + target.encodingbytes=properties.encodingbytes or 1 + target.embedding=properties.embedding or "subset" + target.tounicode=1 + target.cidinfo=properties.cidinfo + target.format=properties.format + target.cache=constructors.cacheintex and "yes" or "renew" + local fontname=properties.fontname or tfmdata.fontname + local fullname=properties.fullname or tfmdata.fullname + local filename=properties.filename or tfmdata.filename + local psname=properties.psname or tfmdata.psname + local name=properties.name or tfmdata.name + if not psname or psname=="" then + psname=fontname or (fullname and fonts.names.cleanname(fullname)) + end + target.fontname=fontname + target.fullname=fullname + target.filename=filename + target.psname=psname + target.name=name + properties.fontname=fontname + properties.fullname=fullname + properties.filename=filename + properties.psname=psname + properties.name=name + local expansion=parameters.expansion + if expansion then + target.stretch=expansion.stretch + target.shrink=expansion.shrink + target.step=expansion.step + target.auto_expand=expansion.auto + end + local protrusion=parameters.protrusion + if protrusion then + target.auto_protrude=protrusion.auto + end + local extendfactor=parameters.extendfactor or 0 + if extendfactor~=0 and extendfactor~=1 then + hdelta=hdelta*extendfactor + target.extend=extendfactor*1000 + else + target.extend=1000 + end + local slantfactor=parameters.slantfactor or 0 + if slantfactor~=0 then + target.slant=slantfactor*1000 + else + target.slant=0 + end + targetparameters.factor=delta + targetparameters.hfactor=hdelta + targetparameters.vfactor=vdelta + targetparameters.size=scaledpoints + targetparameters.units=units + targetparameters.scaledpoints=askedscaledpoints + local isvirtual=properties.virtualized or tfmdata.type=="virtual" + local hasquality=target.auto_expand or target.auto_protrude + local hasitalics=properties.hasitalics + local autoitalicamount=properties.autoitalicamount + local stackmath=not properties.nostackmath + local nonames=properties.noglyphnames + local haskerns=properties.haskerns or properties.mode=="base" + local hasligatures=properties.hasligatures or properties.mode=="base" + if changed and not next(changed) then + changed=false + end + target.type=isvirtual and "virtual" or "real" + target.postprocessors=tfmdata.postprocessors + local targetslant=(parameters.slant or parameters[1] or 0)*factors.pt + local targetspace=(parameters.space or parameters[2] or 0)*hdelta + local targetspace_stretch=(parameters.space_stretch or parameters[3] or 0)*hdelta + local targetspace_shrink=(parameters.space_shrink or parameters[4] or 0)*hdelta + local targetx_height=(parameters.x_height or parameters[5] or 0)*vdelta + local targetquad=(parameters.quad or parameters[6] or 0)*hdelta + local targetextra_space=(parameters.extra_space or parameters[7] or 0)*hdelta + targetparameters.slant=targetslant + targetparameters.space=targetspace + targetparameters.space_stretch=targetspace_stretch + targetparameters.space_shrink=targetspace_shrink + targetparameters.x_height=targetx_height + targetparameters.quad=targetquad + targetparameters.extra_space=targetextra_space + local ascender=parameters.ascender + if ascender then + targetparameters.ascender=delta*ascender + end + local descender=parameters.descender + if descender then + targetparameters.descender=delta*descender + end + constructors.enhanceparameters(targetparameters) + local protrusionfactor=(targetquad~=0 and 1000/targetquad) or 0 + local scaledwidth=defaultwidth*hdelta + local scaledheight=defaultheight*vdelta + local scaleddepth=defaultdepth*vdelta + local hasmath=(properties.hasmath or next(mathparameters)) and true + if hasmath then + constructors.assignmathparameters(target,tfmdata) + properties.hasmath=true + target.nomath=false + target.MathConstants=target.mathparameters + else + properties.hasmath=false + target.nomath=true + target.mathparameters=nil + end + local italickey="italic" + local useitalics=true + if hasmath then + autoitalicamount=false + elseif properties.textitalics then + italickey="italic_correction" + useitalics=false + if properties.delaytextitalics then + autoitalicamount=false + end + end + if trace_defining then + report_defining("defining tfm, name %a, fullname %a, filename %a, hscale %a, vscale %a, math %a, italics %a", + name,fullname,filename,hdelta,vdelta, + hasmath and "enabled" or "disabled",useitalics and "enabled" or "disabled") + end + constructors.beforecopyingcharacters(target,tfmdata) + local sharedkerns={} + for unicode,character in next,characters do + local chr,description,index + if changed then + local c=changed[unicode] + if c then + description=descriptions[c] or descriptions[unicode] or character + character=characters[c] or character + index=description.index or c + else + description=descriptions[unicode] or character + index=description.index or unicode + end + else + description=descriptions[unicode] or character + index=description.index or unicode + end + local width=description.width + local height=description.height + local depth=description.depth + if width then width=hdelta*width else width=scaledwidth end + if height then height=vdelta*height else height=scaledheight end + if depth and depth~=0 then + depth=delta*depth + if nonames then + chr={ + index=index, + height=height, + depth=depth, + width=width, + } + else + chr={ + name=description.name, + index=index, + height=height, + depth=depth, + width=width, + } + end + else + if nonames then + chr={ + index=index, + height=height, + width=width, + } + else + chr={ + name=description.name, + index=index, + height=height, + width=width, + } + end + end + local isunicode=description.unicode + if isunicode then + chr.unicode=isunicode + chr.tounicode=tounicode(isunicode) + end + if hasquality then + local ve=character.expansion_factor + if ve then + chr.expansion_factor=ve*1000 + end + local vl=character.left_protruding + if vl then + chr.left_protruding=protrusionfactor*width*vl + end + local vr=character.right_protruding + if vr then + chr.right_protruding=protrusionfactor*width*vr + end + end + if autoitalicamount then + local vi=description.italic + if not vi then + local vi=description.boundingbox[3]-description.width+autoitalicamount + if vi>0 then + chr[italickey]=vi*hdelta + end + elseif vi~=0 then + chr[italickey]=vi*hdelta + end + elseif hasitalics then + local vi=description.italic + if vi and vi~=0 then + chr[italickey]=vi*hdelta + end + end + if hasmath then + local vn=character.next + if vn then + chr.next=vn + else + local vv=character.vert_variants + if vv then + local t={} + for i=1,#vv do + local vvi=vv[i] + t[i]={ + ["start"]=(vvi["start"] or 0)*vdelta, + ["end"]=(vvi["end"] or 0)*vdelta, + ["advance"]=(vvi["advance"] or 0)*vdelta, + ["extender"]=vvi["extender"], + ["glyph"]=vvi["glyph"], + } + end + chr.vert_variants=t + else + local hv=character.horiz_variants + if hv then + local t={} + for i=1,#hv do + local hvi=hv[i] + t[i]={ + ["start"]=(hvi["start"] or 0)*hdelta, + ["end"]=(hvi["end"] or 0)*hdelta, + ["advance"]=(hvi["advance"] or 0)*hdelta, + ["extender"]=hvi["extender"], + ["glyph"]=hvi["glyph"], + } + end + chr.horiz_variants=t + end + end + end + local va=character.top_accent + if va then + chr.top_accent=vdelta*va + end + if stackmath then + local mk=character.mathkerns + if mk then + local kerns={} + local v=mk.top_right if v then local k={} for i=1,#v do local vi=v[i] + k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern } + end kerns.top_right=k end + local v=mk.top_left if v then local k={} for i=1,#v do local vi=v[i] + k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern } + end kerns.top_left=k end + local v=mk.bottom_left if v then local k={} for i=1,#v do local vi=v[i] + k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern } + end kerns.bottom_left=k end + local v=mk.bottom_right if v then local k={} for i=1,#v do local vi=v[i] + k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern } + end kerns.bottom_right=k end + chr.mathkern=kerns + end + end + end + if haskerns then + local vk=character.kerns + if vk then + local s=sharedkerns[vk] + if not s then + s={} + for k,v in next,vk do s[k]=v*hdelta end + sharedkerns[vk]=s + end + chr.kerns=s + end + end + if hasligatures then + local vl=character.ligatures + if vl then + if true then + chr.ligatures=vl + else + local tt={} + for i,l in next,vl do + tt[i]=l + end + chr.ligatures=tt + end + end + end + if isvirtual then + local vc=character.commands + if vc then + local ok=false + for i=1,#vc do + local key=vc[i][1] + if key=="right" or key=="down" then + ok=true + break + end + end + if ok then + local tt={} + for i=1,#vc do + local ivc=vc[i] + local key=ivc[1] + if key=="right" then + tt[i]={ key,ivc[2]*hdelta } + elseif key=="down" then + tt[i]={ key,ivc[2]*vdelta } + elseif key=="rule" then + tt[i]={ key,ivc[2]*vdelta,ivc[3]*hdelta } + else + tt[i]=ivc + end + end + chr.commands=tt + else + chr.commands=vc + end + chr.index=nil + end + end + targetcharacters[unicode]=chr + end + constructors.aftercopyingcharacters(target,tfmdata) + constructors.trytosharefont(target,tfmdata) + return target +end +function constructors.finalize(tfmdata) + if tfmdata.properties and tfmdata.properties.finalized then + return + end + if not tfmdata.characters then + return nil + end + if not tfmdata.goodies then + tfmdata.goodies={} + end + local parameters=tfmdata.parameters + if not parameters then + return nil + end + if not parameters.expansion then + parameters.expansion={ + stretch=tfmdata.stretch or 0, + shrink=tfmdata.shrink or 0, + step=tfmdata.step or 0, + auto=tfmdata.auto_expand or false, + } + end + if not parameters.protrusion then + parameters.protrusion={ + auto=auto_protrude + } + end + if not parameters.size then + parameters.size=tfmdata.size + end + if not parameters.extendfactor then + parameters.extendfactor=tfmdata.extend or 0 + end + if not parameters.slantfactor then + parameters.slantfactor=tfmdata.slant or 0 + end + if not parameters.designsize then + parameters.designsize=tfmdata.designsize or (factors.pt*10) + end + if not parameters.units then + parameters.units=tfmdata.units_per_em or 1000 + end + if not tfmdata.descriptions then + local descriptions={} + setmetatableindex(descriptions,function(t,k) local v={} t[k]=v return v end) + tfmdata.descriptions=descriptions + end + local properties=tfmdata.properties + if not properties then + properties={} + tfmdata.properties=properties + end + if not properties.virtualized then + properties.virtualized=tfmdata.type=="virtual" + end + if not tfmdata.properties then + tfmdata.properties={ + fontname=tfmdata.fontname, + filename=tfmdata.filename, + fullname=tfmdata.fullname, + name=tfmdata.name, + psname=tfmdata.psname, + encodingbytes=tfmdata.encodingbytes or 1, + embedding=tfmdata.embedding or "subset", + tounicode=tfmdata.tounicode or 1, + cidinfo=tfmdata.cidinfo or nil, + format=tfmdata.format or "type1", + direction=tfmdata.direction or 0, + } + end + if not tfmdata.resources then + tfmdata.resources={} + end + if not tfmdata.shared then + tfmdata.shared={} + end + if not properties.hasmath then + properties.hasmath=not tfmdata.nomath + end + tfmdata.MathConstants=nil + tfmdata.postprocessors=nil + tfmdata.fontname=nil + tfmdata.filename=nil + tfmdata.fullname=nil + tfmdata.name=nil + tfmdata.psname=nil + tfmdata.encodingbytes=nil + tfmdata.embedding=nil + tfmdata.tounicode=nil + tfmdata.cidinfo=nil + tfmdata.format=nil + tfmdata.direction=nil + tfmdata.type=nil + tfmdata.nomath=nil + tfmdata.designsize=nil + tfmdata.size=nil + tfmdata.stretch=nil + tfmdata.shrink=nil + tfmdata.step=nil + tfmdata.auto_expand=nil + tfmdata.auto_protrude=nil + tfmdata.extend=nil + tfmdata.slant=nil + tfmdata.units_per_em=nil + tfmdata.cache=nil + properties.finalized=true + return tfmdata +end +local hashmethods={} +constructors.hashmethods=hashmethods +function constructors.hashfeatures(specification) + local features=specification.features + if features then + local t,tn={},0 + for category,list in next,features do + if next(list) then + local hasher=hashmethods[category] + if hasher then + local hash=hasher(list) + if hash then + tn=tn+1 + t[tn]=category..":"..hash + end + end + end + end + if tn>0 then + return concat(t," & ") + end + end + return "unknown" +end +hashmethods.normal=function(list) + local s={} + local n=0 + for k,v in next,list do + if not k then + elseif k=="number" or k=="features" then + else + n=n+1 + s[n]=k + end + end + if n>0 then + sort(s) + for i=1,n do + local k=s[i] + s[i]=k..'='..tostring(list[k]) + end + return concat(s,"+") + end +end +function constructors.hashinstance(specification,force) + local hash,size,fallbacks=specification.hash,specification.size,specification.fallbacks + if force or not hash then + hash=constructors.hashfeatures(specification) + specification.hash=hash + end + if size<1000 and designsizes[hash] then + size=math.round(constructors.scaled(size,designsizes[hash])) + specification.size=size + end + if fallbacks then + return hash..' @ '..tostring(size)..' @ '..fallbacks + else + return hash..' @ '..tostring(size) + end +end +function constructors.setname(tfmdata,specification) + if constructors.namemode=="specification" then + local specname=specification.specification + if specname then + tfmdata.properties.name=specname + if trace_defining then + report_otf("overloaded fontname %a",specname) + end + end + end +end +function constructors.checkedfilename(data) + local foundfilename=data.foundfilename + if not foundfilename then + local askedfilename=data.filename or "" + if askedfilename~="" then + askedfilename=resolvers.resolve(askedfilename) + foundfilename=resolvers.findbinfile(askedfilename,"") or "" + if foundfilename=="" then + report_defining("source file %a is not found",askedfilename) + foundfilename=resolvers.findbinfile(file.basename(askedfilename),"") or "" + if foundfilename~="" then + report_defining("using source file %a due to cache mismatch",foundfilename) + end + end + end + data.foundfilename=foundfilename + end + return foundfilename +end +local formats=allocate() +fonts.formats=formats +setmetatableindex(formats,function(t,k) + local l=lower(k) + if rawget(t,k) then + t[k]=l + return l + end + return rawget(t,file.suffix(l)) +end) +local locations={} +local function setindeed(mode,target,group,name,action,position) + local t=target[mode] + if not t then + report_defining("fatal error in setting feature %a, group %a, mode %a",name,group,mode) + os.exit() + elseif position then + insert(t,position,{ name=name,action=action }) + else + for i=1,#t do + local ti=t[i] + if ti.name==name then + ti.action=action + return + end + end + insert(t,{ name=name,action=action }) + end +end +local function set(group,name,target,source) + target=target[group] + if not target then + report_defining("fatal target error in setting feature %a, group %a",name,group) + os.exit() + end + local source=source[group] + if not source then + report_defining("fatal source error in setting feature %a, group %a",name,group) + os.exit() + end + local node=source.node + local base=source.base + local position=source.position + if node then + setindeed("node",target,group,name,node,position) + end + if base then + setindeed("base",target,group,name,base,position) + end +end +local function register(where,specification) + local name=specification.name + if name and name~="" then + local default=specification.default + local description=specification.description + local initializers=specification.initializers + local processors=specification.processors + local manipulators=specification.manipulators + local modechecker=specification.modechecker + if default then + where.defaults[name]=default + end + if description and description~="" then + where.descriptions[name]=description + end + if initializers then + set('initializers',name,where,specification) + end + if processors then + set('processors',name,where,specification) + end + if manipulators then + set('manipulators',name,where,specification) + end + if modechecker then + where.modechecker=modechecker + end + end +end +constructors.registerfeature=register +function constructors.getfeatureaction(what,where,mode,name) + what=handlers[what].features + if what then + where=what[where] + if where then + mode=where[mode] + if mode then + for i=1,#mode do + local m=mode[i] + if m.name==name then + return m.action + end + end + end + end + end +end +function constructors.newhandler(what) + local handler=handlers[what] + if not handler then + handler={} + handlers[what]=handler + end + return handler +end +function constructors.newfeatures(what) + local handler=handlers[what] + local features=handler.features + if not features then + local tables=handler.tables + local statistics=handler.statistics + features=allocate { + defaults={}, + descriptions=tables and tables.features or {}, + used=statistics and statistics.usedfeatures or {}, + initializers={ base={},node={} }, + processors={ base={},node={} }, + manipulators={ base={},node={} }, + } + features.register=function(specification) return register(features,specification) end + handler.features=features + end + return features +end +function constructors.checkedfeatures(what,features) + local defaults=handlers[what].features.defaults + if features and next(features) then + features=fastcopy(features) + for key,value in next,defaults do + if features[key]==nil then + features[key]=value + end + end + return features + else + return fastcopy(defaults) + end +end +function constructors.initializefeatures(what,tfmdata,features,trace,report) + if features and next(features) then + local properties=tfmdata.properties or {} + local whathandler=handlers[what] + local whatfeatures=whathandler.features + local whatinitializers=whatfeatures.initializers + local whatmodechecker=whatfeatures.modechecker + local mode=properties.mode or (whatmodechecker and whatmodechecker(tfmdata,features,features.mode)) or features.mode or "base" + properties.mode=mode + features.mode=mode + local done={} + while true do + local redo=false + local initializers=whatfeatures.initializers[mode] + if initializers then + for i=1,#initializers do + local step=initializers[i] + local feature=step.name + local value=features[feature] + if not value then + elseif done[feature] then + else + local action=step.action + if trace then + report("initializing feature %a to %a for mode %a for font %a",feature, + value,mode,tfmdata.properties.fullname) + end + action(tfmdata,value,features) + if mode~=properties.mode or mode~=features.mode then + if whatmodechecker then + properties.mode=whatmodechecker(tfmdata,features,properties.mode) + features.mode=properties.mode + end + if mode~=properties.mode then + mode=properties.mode + redo=true + end + end + done[feature]=true + end + if redo then + break + end + end + if not redo then + break + end + else + break + end + end + properties.mode=mode + return true + else + return false + end +end +function constructors.collectprocessors(what,tfmdata,features,trace,report) + local processes,nofprocesses={},0 + if features and next(features) then + local properties=tfmdata.properties + local whathandler=handlers[what] + local whatfeatures=whathandler.features + local whatprocessors=whatfeatures.processors + local mode=properties.mode + local processors=whatprocessors[mode] + if processors then + for i=1,#processors do + local step=processors[i] + local feature=step.name + if features[feature] then + local action=step.action + if trace then + report("installing feature processor %a for mode %a for font %a",feature,mode,tfmdata.properties.fullname) + end + if action then + nofprocesses=nofprocesses+1 + processes[nofprocesses]=action + end + end + end + elseif trace then + report("no feature processors for mode %a for font %a",mode,properties.fullname) + end + end + return processes +end +function constructors.applymanipulators(what,tfmdata,features,trace,report) + if features and next(features) then + local properties=tfmdata.properties + local whathandler=handlers[what] + local whatfeatures=whathandler.features + local whatmanipulators=whatfeatures.manipulators + local mode=properties.mode + local manipulators=whatmanipulators[mode] + if manipulators then + for i=1,#manipulators do + local step=manipulators[i] + local feature=step.name + local value=features[feature] + if value then + local action=step.action + if trace then + report("applying feature manipulator %a for mode %a for font %a",feature,mode,properties.fullname) + end + if action then + action(tfmdata,feature,value) + end + end + end + end + end +end +function constructors.addcoreunicodes(unicodes) + if not unicodes then + unicodes={} + end + unicodes.space=0x0020 + unicodes.hyphen=0x002D + unicodes.zwj=0x200D + unicodes.zwnj=0x200C + return unicodes +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['luatex-font-enc']={ + version=1.001, + comment="companion to luatex-*.tex", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end +local fonts=fonts +fonts.encodings={} +fonts.encodings.agl={} +fonts.encodings.known={} +setmetatable(fonts.encodings.agl,{ __index=function(t,k) + if k=="unicodes" then + texio.write(" ") + local unicodes=dofile(resolvers.findfile("font-age.lua")) + fonts.encodings.agl={ unicodes=unicodes } + return unicodes + else + return nil + end +end }) + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-cid']={ + version=1.001, + comment="companion to font-otf.lua (cidmaps)", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local format,match,lower=string.format,string.match,string.lower +local tonumber=tonumber +local P,S,R,C,V,lpegmatch=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.V,lpeg.match +local fonts,logs,trackers=fonts,logs,trackers +local trace_loading=false trackers.register("otf.loading",function(v) trace_loading=v end) +local report_otf=logs.reporter("fonts","otf loading") +local cid={} +fonts.cid=cid +local cidmap={} +local cidmax=10 +local number=C(R("09","af","AF")^1) +local space=S(" \n\r\t") +local spaces=space^0 +local period=P(".") +local periods=period*period +local name=P("/")*C((1-space)^1) +local unicodes,names={},{} +local function do_one(a,b) + unicodes[tonumber(a)]=tonumber(b,16) +end +local function do_range(a,b,c) + c=tonumber(c,16) + for i=tonumber(a),tonumber(b) do + unicodes[i]=c + c=c+1 + end +end +local function do_name(a,b) + names[tonumber(a)]=b +end +local grammar=P { "start", + start=number*spaces*number*V("series"), + series=(spaces*(V("one")+V("range")+V("named")))^1, + one=(number*spaces*number)/do_one, + range=(number*periods*number*spaces*number)/do_range, + named=(number*spaces*name)/do_name +} +local function loadcidfile(filename) + local data=io.loaddata(filename) + if data then + unicodes,names={},{} + lpegmatch(grammar,data) + local supplement,registry,ordering=match(filename,"^(.-)%-(.-)%-()%.(.-)$") + return { + supplement=supplement, + registry=registry, + ordering=ordering, + filename=filename, + unicodes=unicodes, + names=names, + } + end +end +cid.loadfile=loadcidfile +local template="%s-%s-%s.cidmap" +local function locate(registry,ordering,supplement) + local filename=format(template,registry,ordering,supplement) + local hashname=lower(filename) + local found=cidmap[hashname] + if not found then + if trace_loading then + report_otf("checking cidmap, registry %a, ordering %a, supplement %a, filename %a",registry,ordering,supplement,filename) + end + local fullname=resolvers.findfile(filename,'cid') or "" + if fullname~="" then + found=loadcidfile(fullname) + if found then + if trace_loading then + report_otf("using cidmap file %a",filename) + end + cidmap[hashname]=found + found.usedname=file.basename(filename) + end + end + end + return found +end +function cid.getmap(specification) + if not specification then + report_otf("invalid cidinfo specification, table expected") + return + end + local registry=specification.registry + local ordering=specification.ordering + local supplement=specification.supplement + local filename=format(registry,ordering,supplement) + local lowername=lower(filename) + local found=cidmap[lowername] + if found then + return found + end + if ordering=="Identity" then + local found={ + supplement=supplement, + registry=registry, + ordering=ordering, + filename=filename, + unicodes={}, + names={}, + } + cidmap[lowername]=found + return found + end + if trace_loading then + report_otf("cidmap needed, registry %a, ordering %a, supplement %a",registry,ordering,supplement) + end + found=locate(registry,ordering,supplement) + if not found then + local supnum=tonumber(supplement) + local cidnum=nil + if supnum0 then + for s=supnum-1,0,-1 do + local c=locate(registry,ordering,s) + if c then + found,cidnum=c,s + break + end + end + end + registry=lower(registry) + ordering=lower(ordering) + if found and cidnum>0 then + for s=0,cidnum-1 do + local filename=format(template,registry,ordering,s) + if not cidmap[filename] then + cidmap[filename]=found + end + end + end + end + return found +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-map']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local tonumber,next,type=tonumber,next,type +local match,format,find,concat,gsub,lower=string.match,string.format,string.find,table.concat,string.gsub,string.lower +local P,R,S,C,Ct,Cc,lpegmatch=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cc,lpeg.match +local utfbyte=utf.byte +local floor=math.floor +local formatters=string.formatters +local trace_loading=false trackers.register("fonts.loading",function(v) trace_loading=v end) +local trace_mapping=false trackers.register("fonts.mapping",function(v) trace_unimapping=v end) +local report_fonts=logs.reporter("fonts","loading") +local fonts=fonts or {} +local mappings=fonts.mappings or {} +fonts.mappings=mappings +local function loadlumtable(filename) + local lumname=file.replacesuffix(file.basename(filename),"lum") + local lumfile=resolvers.findfile(lumname,"map") or "" + if lumfile~="" and lfs.isfile(lumfile) then + if trace_loading or trace_mapping then + report_fonts("loading map table %a",lumfile) + end + lumunic=dofile(lumfile) + return lumunic,lumfile + end +end +local hex=R("AF","09") +local hexfour=(hex*hex*hex*hex)/function(s) return tonumber(s,16) end +local hexsix=(hex*hex*hex*hex*hex*hex)/function(s) return tonumber(s,16) end +local dec=(R("09")^1)/tonumber +local period=P(".") +local unicode=P("uni")*(hexfour*(period+P(-1))*Cc(false)+Ct(hexfour^1)*Cc(true)) +local ucode=P("u")*(hexsix*(period+P(-1))*Cc(false)+Ct(hexsix^1)*Cc(true)) +local index=P("index")*dec*Cc(false) +local parser=unicode+ucode+index +local parsers={} +local function makenameparser(str) + if not str or str=="" then + return parser + else + local p=parsers[str] + if not p then + p=P(str)*period*dec*Cc(false) + parsers[str]=p + end + return p + end +end +local f_single=formatters["%04X"] +local f_double=formatters["%04X%04X"] +local function tounicode16(unicode,name) + if unicode<0x10000 then + return f_single(unicode) + elseif unicode<0x1FFFFFFFFF then + return f_double(floor(unicode/1024),unicode%1024+0xDC00) + else + report_fonts("can't convert %a in %a into tounicode",unicode,name) + end +end +local function tounicode16sequence(unicodes,name) + local t={} + for l=1,#unicodes do + local u=unicodes[l] + if u<0x10000 then + t[l]=f_single(u) + elseif unicode<0x1FFFFFFFFF then + t[l]=f_double(floor(u/1024),u%1024+0xDC00) + else + report_fonts ("can't convert %a in %a into tounicode",u,name) + return + end + end + return concat(t) +end +local function tounicode(unicode,name) + if type(unicode)=="table" then + local t={} + for l=1,#unicode do + local u=unicode[l] + if u<0x10000 then + t[l]=f_single(u) + elseif u<0x1FFFFFFFFF then + t[l]=f_double(floor(u/1024),u%1024+0xDC00) + else + report_fonts ("can't convert %a in %a into tounicode",u,name) + return + end + end + return concat(t) + else + if unicode<0x10000 then + return f_single(unicode) + elseif unicode<0x1FFFFFFFFF then + return f_double(floor(unicode/1024),unicode%1024+0xDC00) + else + report_fonts("can't convert %a in %a into tounicode",unicode,name) + end + end +end +local function fromunicode16(str) + if #str==4 then + return tonumber(str,16) + else + local l,r=match(str,"(....)(....)") + return (tonumber(l,16))*0x400+tonumber(r,16)-0xDC00 + end +end +mappings.loadlumtable=loadlumtable +mappings.makenameparser=makenameparser +mappings.tounicode=tounicode +mappings.tounicode16=tounicode16 +mappings.tounicode16sequence=tounicode16sequence +mappings.fromunicode16=fromunicode16 +local ligseparator=P("_") +local varseparator=P(".") +local namesplitter=Ct(C((1-ligseparator-varseparator)^1)*(ligseparator*C((1-ligseparator-varseparator)^1))^0) +local overloads={ + IJ={ name="I_J",unicode={ 0x49,0x4A },mess=0x0132 }, + ij={ name="i_j",unicode={ 0x69,0x6A },mess=0x0133 }, + ff={ name="f_f",unicode={ 0x66,0x66 },mess=0xFB00 }, + fi={ name="f_i",unicode={ 0x66,0x69 },mess=0xFB01 }, + fl={ name="f_l",unicode={ 0x66,0x6C },mess=0xFB02 }, + ffi={ name="f_f_i",unicode={ 0x66,0x66,0x69 },mess=0xFB03 }, + ffl={ name="f_f_l",unicode={ 0x66,0x66,0x6C },mess=0xFB04 }, + fj={ name="f_j",unicode={ 0x66,0x6A } }, + fk={ name="f_k",unicode={ 0x66,0x6B } }, +} +for k,v in next,overloads do + local name=v.name + local mess=v.mess + if name then + overloads[name]=v + end + if mess then + overloads[mess]=v + end +end +mappings.overloads=overloads +function mappings.addtounicode(data,filename) + local resources=data.resources + local properties=data.properties + local descriptions=data.descriptions + local unicodes=resources.unicodes + local lookuptypes=resources.lookuptypes + if not unicodes then + return + end + unicodes['space']=unicodes['space'] or 32 + unicodes['hyphen']=unicodes['hyphen'] or 45 + unicodes['zwj']=unicodes['zwj'] or 0x200D + unicodes['zwnj']=unicodes['zwnj'] or 0x200C + local private=fonts.constructors.privateoffset + local unicodevector=fonts.encodings.agl.unicodes + local missing={} + local lumunic,uparser,oparser + local cidinfo,cidnames,cidcodes,usedmap + cidinfo=properties.cidinfo + usedmap=cidinfo and fonts.cid.getmap(cidinfo) + if usedmap then + oparser=usedmap and makenameparser(cidinfo.ordering) + cidnames=usedmap.names + cidcodes=usedmap.unicodes + end + uparser=makenameparser() + local ns,nl=0,0 + for unic,glyph in next,descriptions do + local index=glyph.index + local name=glyph.name + local r=overloads[name] + if r then + glyph.unicode=r.unicode + elseif unic==-1 or unic>=private or (unic>=0xE000 and unic<=0xF8FF) or unic==0xFFFE or unic==0xFFFF then + local unicode=lumunic and lumunic[name] or unicodevector[name] + if unicode then + glyph.unicode=unicode + ns=ns+1 + end + if (not unicode) and usedmap then + local foundindex=lpegmatch(oparser,name) + if foundindex then + unicode=cidcodes[foundindex] + if unicode then + glyph.unicode=unicode + ns=ns+1 + else + local reference=cidnames[foundindex] + if reference then + local foundindex=lpegmatch(oparser,reference) + if foundindex then + unicode=cidcodes[foundindex] + if unicode then + glyph.unicode=unicode + ns=ns+1 + end + end + if not unicode or unicode=="" then + local foundcodes,multiple=lpegmatch(uparser,reference) + if foundcodes then + glyph.unicode=foundcodes + if multiple then + nl=nl+1 + unicode=true + else + ns=ns+1 + unicode=foundcodes + end + end + end + end + end + end + end + if not unicode or unicode=="" then + local split=lpegmatch(namesplitter,name) + local nsplit=split and #split or 0 + local t,n={},0 + unicode=true + for l=1,nsplit do + local base=split[l] + local u=unicodes[base] or unicodevector[base] + if not u then + break + elseif type(u)=="table" then + if u[1]>=private then + unicode=false + break + end + n=n+1 + t[n]=u[1] + else + if u>=private then + unicode=false + break + end + n=n+1 + t[n]=u + end + end + if n==0 then + elseif n==1 then + glyph.unicode=t[1] + else + glyph.unicode=t + end + nl=nl+1 + end + if not unicode or unicode=="" then + local foundcodes,multiple=lpegmatch(uparser,name) + if foundcodes then + glyph.unicode=foundcodes + if multiple then + nl=nl+1 + unicode=true + else + ns=ns+1 + unicode=foundcodes + end + end + end + local r=overloads[unicode] + if r then + unicode=r.unicode + glyph.unicode=unicode + end + if not unicode then + missing[name]=true + end + end + end + if next(missing) then + local guess={} + local function check(gname,code,unicode) + local description=descriptions[code] + local variant=description.name + if variant==gname then + return + end + local unic=unicodes[variant] + if unic==-1 or unic>=private or (unic>=0xE000 and unic<=0xF8FF) or unic==0xFFFE or unic==0xFFFF then + else + return + end + if descriptions[code].unicode then + return + end + local g=guess[variant] + if g then + g[gname]=unicode + else + guess[variant]={ [gname]=unicode } + end + end + for unicode,description in next,descriptions do + local slookups=description.slookups + if slookups then + local gname=description.name + for tag,data in next,slookups do + local lookuptype=lookuptypes[tag] + if lookuptype=="alternate" then + for i=1,#data do + check(gname,data[i],unicode) + end + elseif lookuptype=="substitution" then + check(gname,data,unicode) + end + end + end + local mlookups=description.mlookups + if mlookups then + local gname=description.name + for tag,list in next,mlookups do + local lookuptype=lookuptypes[tag] + if lookuptype=="alternate" then + for i=1,#list do + local data=list[i] + for i=1,#data do + check(gname,data[i],unicode) + end + end + elseif lookuptype=="substitution" then + for i=1,#list do + check(gname,list[i],unicode) + end + end + end + end + end + local done=true + while done do + done=false + for k,v in next,guess do + if type(v)~="number" then + for kk,vv in next,v do + if vv==-1 or vv>=private or (vv>=0xE000 and vv<=0xF8FF) or vv==0xFFFE or vv==0xFFFF then + local uu=guess[kk] + if type(uu)=="number" then + guess[k]=uu + done=true + end + else + guess[k]=vv + done=true + end + end + end + end + end + local orphans=0 + local guessed=0 + for k,v in next,guess do + if type(v)=="number" then + descriptions[unicodes[k]].unicode=descriptions[v].unicode or v + guessed=guessed+1 + else + local t=nil + local l=lower(k) + local u=unicodes[l] + if not u then + orphans=orphans+1 + elseif u==-1 or u>=private or (u>=0xE000 and u<=0xF8FF) or u==0xFFFE or u==0xFFFF then + local unicode=descriptions[u].unicode + if unicode then + descriptions[unicodes[k]].unicode=unicode + guessed=guessed+1 + else + orphans=orphans+1 + end + else + orphans=orphans+1 + end + end + end + if trace_loading and orphans>0 or guessed>0 then + report_fonts("%s glyphs with no related unicode, %s guessed, %s orphans",guessed+orphans,guessed,orphans) + end + end + if trace_mapping then + for unic,glyph in table.sortedhash(descriptions) do + local name=glyph.name + local index=glyph.index + local unicode=glyph.unicode + if unicode then + if type(unicode)=="table" then + local unicodes={} + for i=1,#unicode do + unicodes[i]=formatters("%U",unicode[i]) + end + report_fonts("internal slot %U, name %a, unicode %U, tounicode % t",index,name,unic,unicodes) + else + report_fonts("internal slot %U, name %a, unicode %U, tounicode %U",index,name,unic,unicode) + end + else + report_fonts("internal slot %U, name %a, unicode %U",index,name,unic) + end + end + end + if trace_loading and (ns>0 or nl>0) then + report_fonts("%s tounicode entries added, ligatures %s",nl+ns,ns) + end +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['luatex-fonts-syn']={ + version=1.001, + comment="companion to luatex-*.tex", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end +local fonts=fonts +fonts.names=fonts.names or {} +fonts.names.version=1.001 +fonts.names.basename="luatex-fonts-names" +fonts.names.new_to_old={} +fonts.names.old_to_new={} +fonts.names.cache=containers.define("fonts","data",fonts.names.version,true) +local data,loaded=nil,false +local fileformats={ "lua","tex","other text files" } +function fonts.names.reportmissingbase() + texio.write("") + fonts.names.reportmissingbase=nil +end +function fonts.names.reportmissingname() + texio.write("") + fonts.names.reportmissingname=nil +end +function fonts.names.resolve(name,sub) + if not loaded then + local basename=fonts.names.basename + if basename and basename~="" then + data=containers.read(fonts.names.cache,basename) + if not data then + basename=file.addsuffix(basename,"lua") + for i=1,#fileformats do + local format=fileformats[i] + local foundname=resolvers.findfile(basename,format) or "" + if foundname~="" then + data=dofile(foundname) + texio.write("") + break + end + end + end + end + loaded=true + end + if type(data)=="table" and data.version==fonts.names.version then + local condensed=string.gsub(string.lower(name),"[^%a%d]","") + local found=data.mappings and data.mappings[condensed] + if found then + local fontname,filename,subfont=found[1],found[2],found[3] + if subfont then + return filename,fontname + else + return filename,false + end + elseif fonts.names.reportmissingname then + fonts.names.reportmissingname() + return name,false + end + elseif fonts.names.reportmissingbase then + fonts.names.reportmissingbase() + end +end +fonts.names.resolvespec=fonts.names.resolve +function fonts.names.getfilename(askedname,suffix) + return "" +end +function fonts.names.ignoredfile(filename) + return false +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-tfm']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local next=next +local match=string.match +local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end) +local trace_features=false trackers.register("tfm.features",function(v) trace_features=v end) +local report_defining=logs.reporter("fonts","defining") +local report_tfm=logs.reporter("fonts","tfm loading") +local findbinfile=resolvers.findbinfile +local fonts=fonts +local handlers=fonts.handlers +local readers=fonts.readers +local constructors=fonts.constructors +local encodings=fonts.encodings +local tfm=constructors.newhandler("tfm") +local tfmfeatures=constructors.newfeatures("tfm") +local registertfmfeature=tfmfeatures.register +constructors.resolvevirtualtoo=false +fonts.formats.tfm="type1" +function tfm.setfeatures(tfmdata,features) + local okay=constructors.initializefeatures("tfm",tfmdata,features,trace_features,report_tfm) + if okay then + return constructors.collectprocessors("tfm",tfmdata,features,trace_features,report_tfm) + else + return {} + end +end +local function read_from_tfm(specification) + local filename=specification.filename + local size=specification.size + if trace_defining then + report_defining("loading tfm file %a at size %s",filename,size) + end + local tfmdata=font.read_tfm(filename,size) + if tfmdata then + local features=specification.features and specification.features.normal or {} + local resources=tfmdata.resources or {} + local properties=tfmdata.properties or {} + local parameters=tfmdata.parameters or {} + local shared=tfmdata.shared or {} + properties.name=tfmdata.name + properties.fontname=tfmdata.fontname + properties.psname=tfmdata.psname + properties.filename=specification.filename + properties.format=fonts.formats.tfm + parameters.size=size + shared.rawdata={} + shared.features=features + shared.processes=next(features) and tfm.setfeatures(tfmdata,features) or nil + tfmdata.properties=properties + tfmdata.resources=resources + tfmdata.parameters=parameters + tfmdata.shared=shared + parameters.slant=parameters.slant or parameters[1] or 0 + parameters.space=parameters.space or parameters[2] or 0 + parameters.space_stretch=parameters.space_stretch or parameters[3] or 0 + parameters.space_shrink=parameters.space_shrink or parameters[4] or 0 + parameters.x_height=parameters.x_height or parameters[5] or 0 + parameters.quad=parameters.quad or parameters[6] or 0 + parameters.extra_space=parameters.extra_space or parameters[7] or 0 + constructors.enhanceparameters(parameters) + if constructors.resolvevirtualtoo then + fonts.loggers.register(tfmdata,file.suffix(filename),specification) + local vfname=findbinfile(specification.name,'ovf') + if vfname and vfname~="" then + local vfdata=font.read_vf(vfname,size) + if vfdata then + local chars=tfmdata.characters + for k,v in next,vfdata.characters do + chars[k].commands=v.commands + end + properties.virtualized=true + tfmdata.fonts=vfdata.fonts + end + end + end + local allfeatures=tfmdata.shared.features or specification.features.normal + constructors.applymanipulators("tfm",tfmdata,allfeatures.normal,trace_features,report_tfm) + if not features.encoding then + local encoding,filename=match(properties.filename,"^(.-)%-(.*)$") + if filename and encoding and encodings.known and encodings.known[encoding] then + features.encoding=encoding + end + end + properties.haskerns=true + properties.haslogatures=true + resources.unicodes={} + resources.lookuptags={} + return tfmdata + end +end +local function check_tfm(specification,fullname) + local foundname=findbinfile(fullname,'tfm') or "" + if foundname=="" then + foundname=findbinfile(fullname,'ofm') or "" + end + if foundname=="" then + foundname=fonts.names.getfilename(fullname,"tfm") or "" + end + if foundname~="" then + specification.filename=foundname + specification.format="ofm" + return read_from_tfm(specification) + elseif trace_defining then + report_defining("loading tfm with name %a fails",specification.name) + end +end +readers.check_tfm=check_tfm +function readers.tfm(specification) + local fullname=specification.filename or "" + if fullname=="" then + local forced=specification.forced or "" + if forced~="" then + fullname=specification.name.."."..forced + else + fullname=specification.name + end + end + return check_tfm(specification,fullname) +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-afm']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local fonts,logs,trackers,containers,resolvers=fonts,logs,trackers,containers,resolvers +local next,type,tonumber=next,type,tonumber +local format,match,gmatch,lower,gsub,strip=string.format,string.match,string.gmatch,string.lower,string.gsub,string.strip +local abs=math.abs +local P,S,C,R,lpegmatch,patterns=lpeg.P,lpeg.S,lpeg.C,lpeg.R,lpeg.match,lpeg.patterns +local derivetable=table.derive +local trace_features=false trackers.register("afm.features",function(v) trace_features=v end) +local trace_indexing=false trackers.register("afm.indexing",function(v) trace_indexing=v end) +local trace_loading=false trackers.register("afm.loading",function(v) trace_loading=v end) +local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end) +local report_afm=logs.reporter("fonts","afm loading") +local setmetatableindex=table.setmetatableindex +local findbinfile=resolvers.findbinfile +local definers=fonts.definers +local readers=fonts.readers +local constructors=fonts.constructors +local afm=constructors.newhandler("afm") +local pfb=constructors.newhandler("pfb") +local afmfeatures=constructors.newfeatures("afm") +local registerafmfeature=afmfeatures.register +afm.version=1.500 +afm.cache=containers.define("fonts","afm",afm.version,true) +afm.autoprefixed=true +afm.helpdata={} +afm.syncspace=true +afm.addligatures=true +afm.addtexligatures=true +afm.addkerns=true +local overloads=fonts.mappings.overloads +local applyruntimefixes=fonts.treatments and fonts.treatments.applyfixes +local function setmode(tfmdata,value) + if value then + tfmdata.properties.mode=lower(value) + end +end +registerafmfeature { + name="mode", + description="mode", + initializers={ + base=setmode, + node=setmode, + } +} +local comment=P("Comment") +local spacing=patterns.spacer +local lineend=patterns.newline +local words=C((1-lineend)^1) +local number=C((R("09")+S("."))^1)/tonumber*spacing^0 +local data=lpeg.Carg(1) +local pattern=( + comment*spacing*( + data*( + ("CODINGSCHEME"*spacing*words )/function(fd,a) end+("DESIGNSIZE"*spacing*number*words )/function(fd,a) fd[ 1]=a end+("CHECKSUM"*spacing*number*words )/function(fd,a) fd[ 2]=a end+("SPACE"*spacing*number*"plus"*number*"minus"*number)/function(fd,a,b,c) fd[ 3],fd[ 4],fd[ 5]=a,b,c end+("QUAD"*spacing*number )/function(fd,a) fd[ 6]=a end+("EXTRASPACE"*spacing*number )/function(fd,a) fd[ 7]=a end+("NUM"*spacing*number*number*number )/function(fd,a,b,c) fd[ 8],fd[ 9],fd[10]=a,b,c end+("DENOM"*spacing*number*number )/function(fd,a,b ) fd[11],fd[12]=a,b end+("SUP"*spacing*number*number*number )/function(fd,a,b,c) fd[13],fd[14],fd[15]=a,b,c end+("SUB"*spacing*number*number )/function(fd,a,b) fd[16],fd[17]=a,b end+("SUPDROP"*spacing*number )/function(fd,a) fd[18]=a end+("SUBDROP"*spacing*number )/function(fd,a) fd[19]=a end+("DELIM"*spacing*number*number )/function(fd,a,b) fd[20],fd[21]=a,b end+("AXISHEIGHT"*spacing*number )/function(fd,a) fd[22]=a end + )+(1-lineend)^0 + )+(1-comment)^1 +)^0 +local function scan_comment(str) + local fd={} + lpegmatch(pattern,str,1,fd) + return fd +end +local keys={} +function keys.FontName (data,line) data.metadata.fontname=strip (line) + data.metadata.fullname=strip (line) end +function keys.ItalicAngle (data,line) data.metadata.italicangle=tonumber (line) end +function keys.IsFixedPitch(data,line) data.metadata.isfixedpitch=toboolean(line,true) end +function keys.CharWidth (data,line) data.metadata.charwidth=tonumber (line) end +function keys.XHeight (data,line) data.metadata.xheight=tonumber (line) end +function keys.Descender (data,line) data.metadata.descender=tonumber (line) end +function keys.Ascender (data,line) data.metadata.ascender=tonumber (line) end +function keys.Comment (data,line) + line=lower(line) + local designsize=match(line,"designsize[^%d]*(%d+)") + if designsize then data.metadata.designsize=tonumber(designsize) end +end +local function get_charmetrics(data,charmetrics,vector) + local characters=data.characters + local chr,ind={},0 + for k,v in gmatch(charmetrics,"([%a]+) +(.-) *;") do + if k=='C' then + v=tonumber(v) + if v<0 then + ind=ind+1 + else + ind=v + end + chr={ + index=ind + } + elseif k=='WX' then + chr.width=tonumber(v) + elseif k=='N' then + characters[v]=chr + elseif k=='B' then + local llx,lly,urx,ury=match(v,"^ *(.-) +(.-) +(.-) +(.-)$") + chr.boundingbox={ tonumber(llx),tonumber(lly),tonumber(urx),tonumber(ury) } + elseif k=='L' then + local plus,becomes=match(v,"^(.-) +(.-)$") + local ligatures=chr.ligatures + if ligatures then + ligatures[plus]=becomes + else + chr.ligatures={ [plus]=becomes } + end + end + end +end +local function get_kernpairs(data,kernpairs) + local characters=data.characters + for one,two,value in gmatch(kernpairs,"KPX +(.-) +(.-) +(.-)\n") do + local chr=characters[one] + if chr then + local kerns=chr.kerns + if kerns then + kerns[two]=tonumber(value) + else + chr.kerns={ [two]=tonumber(value) } + end + end + end +end +local function get_variables(data,fontmetrics) + for key,rest in gmatch(fontmetrics,"(%a+) *(.-)[\n\r]") do + local keyhandler=keys[key] + if keyhandler then + keyhandler(data,rest) + end + end +end +local function get_indexes(data,pfbname) + data.resources.filename=resolvers.unresolve(pfbname) + local pfbblob=fontloader.open(pfbname) + if pfbblob then + local characters=data.characters + local pfbdata=fontloader.to_table(pfbblob) + if pfbdata then + local glyphs=pfbdata.glyphs + if glyphs then + if trace_loading then + report_afm("getting index data from %a",pfbname) + end + for index,glyph in next,glyphs do + local name=glyph.name + if name then + local char=characters[name] + if char then + if trace_indexing then + report_afm("glyph %a has index %a",name,index) + end + char.index=index + end + end + end + elseif trace_loading then + report_afm("no glyph data in pfb file %a",pfbname) + end + elseif trace_loading then + report_afm("no data in pfb file %a",pfbname) + end + fontloader.close(pfbblob) + elseif trace_loading then + report_afm("invalid pfb file %a",pfbname) + end +end +local function readafm(filename) + local ok,afmblob,size=resolvers.loadbinfile(filename) + if ok and afmblob then + local data={ + resources={ + filename=resolvers.unresolve(filename), + version=afm.version, + creator="context mkiv", + }, + properties={ + hasitalics=false, + }, + goodies={}, + metadata={ + filename=file.removesuffix(file.basename(filename)) + }, + characters={ + }, + descriptions={ + }, + } + afmblob=gsub(afmblob,"StartCharMetrics(.-)EndCharMetrics",function(charmetrics) + if trace_loading then + report_afm("loading char metrics") + end + get_charmetrics(data,charmetrics,vector) + return "" + end) + afmblob=gsub(afmblob,"StartKernPairs(.-)EndKernPairs",function(kernpairs) + if trace_loading then + report_afm("loading kern pairs") + end + get_kernpairs(data,kernpairs) + return "" + end) + afmblob=gsub(afmblob,"StartFontMetrics%s+([%d%.]+)(.-)EndFontMetrics",function(version,fontmetrics) + if trace_loading then + report_afm("loading variables") + end + data.afmversion=version + get_variables(data,fontmetrics) + data.fontdimens=scan_comment(fontmetrics) + return "" + end) + return data + else + if trace_loading then + report_afm("no valid afm file %a",filename) + end + return nil + end +end +local addkerns,addligatures,addtexligatures,unify,normalize,fixnames +function afm.load(filename) + filename=resolvers.findfile(filename,'afm') or "" + if filename~="" and not fonts.names.ignoredfile(filename) then + local name=file.removesuffix(file.basename(filename)) + local data=containers.read(afm.cache,name) + local attr=lfs.attributes(filename) + local size,time=attr.size or 0,attr.modification or 0 + local pfbfile=file.replacesuffix(name,"pfb") + local pfbname=resolvers.findfile(pfbfile,"pfb") or "" + if pfbname=="" then + pfbname=resolvers.findfile(file.basename(pfbfile),"pfb") or "" + end + local pfbsize,pfbtime=0,0 + if pfbname~="" then + local attr=lfs.attributes(pfbname) + pfbsize=attr.size or 0 + pfbtime=attr.modification or 0 + end + if not data or data.size~=size or data.time~=time or data.pfbsize~=pfbsize or data.pfbtime~=pfbtime then + report_afm("reading %a",filename) + data=readafm(filename) + if data then + if pfbname~="" then + get_indexes(data,pfbname) + elseif trace_loading then + report_afm("no pfb file for %a",filename) + end + report_afm("unifying %a",filename) + unify(data,filename) + if afm.addligatures then + report_afm("add ligatures") + addligatures(data) + end + if afm.addtexligatures then + report_afm("add tex ligatures") + addtexligatures(data) + end + if afm.addkerns then + report_afm("add extra kerns") + addkerns(data) + end + normalize(data) + fixnames(data) + report_afm("add tounicode data") + fonts.mappings.addtounicode(data,filename) + data.size=size + data.time=time + data.pfbsize=pfbsize + data.pfbtime=pfbtime + report_afm("saving %a in cache",name) + data.resources.unicodes=nil + data=containers.write(afm.cache,name,data) + data=containers.read(afm.cache,name) + end + if applyruntimefixes and data then + applyruntimefixes(filename,data) + end + end + return data + else + return nil + end +end +local uparser=fonts.mappings.makenameparser() +unify=function(data,filename) + local unicodevector=fonts.encodings.agl.unicodes + local unicodes,names={},{} + local private=constructors.privateoffset + local descriptions=data.descriptions + for name,blob in next,data.characters do + local code=unicodevector[name] + if not code then + code=lpegmatch(uparser,name) + if not code then + code=private + private=private+1 + report_afm("assigning private slot %U for unknown glyph name %a",code,name) + end + end + local index=blob.index + unicodes[name]=code + names[name]=index + blob.name=name + descriptions[code]={ + boundingbox=blob.boundingbox, + width=blob.width, + kerns=blob.kerns, + index=index, + name=name, + } + end + for unicode,description in next,descriptions do + local kerns=description.kerns + if kerns then + local krn={} + for name,kern in next,kerns do + local unicode=unicodes[name] + if unicode then + krn[unicode]=kern + else + end + end + description.kerns=krn + end + end + data.characters=nil + local resources=data.resources + local filename=resources.filename or file.removesuffix(file.basename(filename)) + resources.filename=resolvers.unresolve(filename) + resources.unicodes=unicodes + resources.marks={} + resources.private=private +end +normalize=function(data) +end +fixnames=function(data) + for k,v in next,data.descriptions do + local n=v.name + local r=overloads[n] + if r then + local name=r.name + if trace_indexing then + report_afm("renaming characters %a to %a",n,name) + end + v.name=name + v.unicode=r.unicode + end + end +end +local addthem=function(rawdata,ligatures) + if ligatures then + local descriptions=rawdata.descriptions + local resources=rawdata.resources + local unicodes=resources.unicodes + for ligname,ligdata in next,ligatures do + local one=descriptions[unicodes[ligname]] + if one then + for _,pair in next,ligdata do + local two,three=unicodes[pair[1]],unicodes[pair[2]] + if two and three then + local ol=one.ligatures + if ol then + if not ol[two] then + ol[two]=three + end + else + one.ligatures={ [two]=three } + end + end + end + end + end + end +end +addligatures=function(rawdata) addthem(rawdata,afm.helpdata.ligatures ) end +addtexligatures=function(rawdata) addthem(rawdata,afm.helpdata.texligatures) end +addkerns=function(rawdata) + local descriptions=rawdata.descriptions + local resources=rawdata.resources + local unicodes=resources.unicodes + local function do_it_left(what) + if what then + for unicode,description in next,descriptions do + local kerns=description.kerns + if kerns then + local extrakerns + for complex,simple in next,what do + complex=unicodes[complex] + simple=unicodes[simple] + if complex and simple then + local ks=kerns[simple] + if ks and not kerns[complex] then + if extrakerns then + extrakerns[complex]=ks + else + extrakerns={ [complex]=ks } + end + end + end + end + if extrakerns then + description.extrakerns=extrakerns + end + end + end + end + end + local function do_it_copy(what) + if what then + for complex,simple in next,what do + complex=unicodes[complex] + simple=unicodes[simple] + if complex and simple then + local complexdescription=descriptions[complex] + if complexdescription then + local simpledescription=descriptions[complex] + if simpledescription then + local extrakerns + local kerns=simpledescription.kerns + if kerns then + for unicode,kern in next,kerns do + if extrakerns then + extrakerns[unicode]=kern + else + extrakerns={ [unicode]=kern } + end + end + end + local extrakerns=simpledescription.extrakerns + if extrakerns then + for unicode,kern in next,extrakerns do + if extrakerns then + extrakerns[unicode]=kern + else + extrakerns={ [unicode]=kern } + end + end + end + if extrakerns then + complexdescription.extrakerns=extrakerns + end + end + end + end + end + end + end + do_it_left(afm.helpdata.leftkerned) + do_it_left(afm.helpdata.bothkerned) + do_it_copy(afm.helpdata.bothkerned) + do_it_copy(afm.helpdata.rightkerned) +end +local function adddimensions(data) + if data then + for unicode,description in next,data.descriptions do + local bb=description.boundingbox + if bb then + local ht,dp=bb[4],-bb[2] + if ht==0 or ht<0 then + else + description.height=ht + end + if dp==0 or dp<0 then + else + description.depth=dp + end + end + end + end +end +local function copytotfm(data) + if data and data.descriptions then + local metadata=data.metadata + local resources=data.resources + local properties=derivetable(data.properties) + local descriptions=derivetable(data.descriptions) + local goodies=derivetable(data.goodies) + local characters={} + local parameters={} + local unicodes=resources.unicodes + for unicode,description in next,data.descriptions do + characters[unicode]={} + end + local filename=constructors.checkedfilename(resources) + local fontname=metadata.fontname or metadata.fullname + local fullname=metadata.fullname or metadata.fontname + local endash=0x0020 + local emdash=0x2014 + local spacer="space" + local spaceunits=500 + local monospaced=metadata.isfixedpitch + local charwidth=metadata.charwidth + local italicangle=metadata.italicangle + local charxheight=metadata.xheight and metadata.xheight>0 and metadata.xheight + properties.monospaced=monospaced + parameters.italicangle=italicangle + parameters.charwidth=charwidth + parameters.charxheight=charxheight + if properties.monospaced then + if descriptions[endash] then + spaceunits,spacer=descriptions[endash].width,"space" + end + if not spaceunits and descriptions[emdash] then + spaceunits,spacer=descriptions[emdash].width,"emdash" + end + if not spaceunits and charwidth then + spaceunits,spacer=charwidth,"charwidth" + end + else + if descriptions[endash] then + spaceunits,spacer=descriptions[endash].width,"space" + end + if not spaceunits and charwidth then + spaceunits,spacer=charwidth,"charwidth" + end + end + spaceunits=tonumber(spaceunits) + if spaceunits<200 then + end + parameters.slant=0 + parameters.space=spaceunits + parameters.space_stretch=500 + parameters.space_shrink=333 + parameters.x_height=400 + parameters.quad=1000 + if italicangle and italicangle~=0 then + parameters.italicangle=italicangle + parameters.italicfactor=math.cos(math.rad(90+italicangle)) + parameters.slant=- math.tan(italicangle*math.pi/180) + end + if monospaced then + parameters.space_stretch=0 + parameters.space_shrink=0 + elseif afm.syncspace then + parameters.space_stretch=spaceunits/2 + parameters.space_shrink=spaceunits/3 + end + parameters.extra_space=parameters.space_shrink + if charxheight then + parameters.x_height=charxheight + else + local x=0x0078 + if x then + local x=descriptions[x] + if x then + parameters.x_height=x.height + end + end + end + local fd=data.fontdimens + if fd and fd[8] and fd[9] and fd[10] then + for k,v in next,fd do + parameters[k]=v + end + end + parameters.designsize=(metadata.designsize or 10)*65536 + parameters.ascender=abs(metadata.ascender or 0) + parameters.descender=abs(metadata.descender or 0) + parameters.units=1000 + properties.spacer=spacer + properties.encodingbytes=2 + properties.format=fonts.formats[filename] or "type1" + properties.filename=filename + properties.fontname=fontname + properties.fullname=fullname + properties.psname=fullname + properties.name=filename or fullname or fontname + if next(characters) then + return { + characters=characters, + descriptions=descriptions, + parameters=parameters, + resources=resources, + properties=properties, + goodies=goodies, + } + end + end + return nil +end +function afm.setfeatures(tfmdata,features) + local okay=constructors.initializefeatures("afm",tfmdata,features,trace_features,report_afm) + if okay then + return constructors.collectprocessors("afm",tfmdata,features,trace_features,report_afm) + else + return {} + end +end +local function addtables(data) + local resources=data.resources + local lookuptags=resources.lookuptags + local unicodes=resources.unicodes + if not lookuptags then + lookuptags={} + resources.lookuptags=lookuptags + end + setmetatableindex(lookuptags,function(t,k) + local v=type(k)=="number" and ("lookup "..k) or k + t[k]=v + return v + end) + if not unicodes then + unicodes={} + resources.unicodes=unicodes + setmetatableindex(unicodes,function(t,k) + setmetatableindex(unicodes,nil) + for u,d in next,data.descriptions do + local n=d.name + if n then + t[n]=u + end + end + return rawget(t,k) + end) + end + constructors.addcoreunicodes(unicodes) +end +local function afmtotfm(specification) + local afmname=specification.filename or specification.name + if specification.forced=="afm" or specification.format=="afm" then + if trace_loading then + report_afm("forcing afm format for %a",afmname) + end + else + local tfmname=findbinfile(afmname,"ofm") or "" + if tfmname~="" then + if trace_loading then + report_afm("fallback from afm to tfm for %a",afmname) + end + return + end + end + if afmname~="" then + local features=constructors.checkedfeatures("afm",specification.features.normal) + specification.features.normal=features + constructors.hashinstance(specification,true) + specification=definers.resolve(specification) + local cache_id=specification.hash + local tfmdata=containers.read(constructors.cache,cache_id) + if not tfmdata then + local rawdata=afm.load(afmname) + if rawdata and next(rawdata) then + addtables(rawdata) + adddimensions(rawdata) + tfmdata=copytotfm(rawdata) + if tfmdata and next(tfmdata) then + local shared=tfmdata.shared + if not shared then + shared={} + tfmdata.shared=shared + end + shared.rawdata=rawdata + shared.features=features + shared.processes=afm.setfeatures(tfmdata,features) + end + elseif trace_loading then + report_afm("no (valid) afm file found with name %a",afmname) + end + tfmdata=containers.write(constructors.cache,cache_id,tfmdata) + end + return tfmdata + end +end +local function read_from_afm(specification) + local tfmdata=afmtotfm(specification) + if tfmdata then + tfmdata.properties.name=specification.name + tfmdata=constructors.scale(tfmdata,specification) + local allfeatures=tfmdata.shared.features or specification.features.normal + constructors.applymanipulators("afm",tfmdata,allfeatures,trace_features,report_afm) + fonts.loggers.register(tfmdata,'afm',specification) + end + return tfmdata +end +local function prepareligatures(tfmdata,ligatures,value) + if value then + local descriptions=tfmdata.descriptions + local hasligatures=false + for unicode,character in next,tfmdata.characters do + local description=descriptions[unicode] + local dligatures=description.ligatures + if dligatures then + local cligatures=character.ligatures + if not cligatures then + cligatures={} + character.ligatures=cligatures + end + for unicode,ligature in next,dligatures do + cligatures[unicode]={ + char=ligature, + type=0 + } + end + hasligatures=true + end + end + tfmdata.properties.hasligatures=hasligatures + end +end +local function preparekerns(tfmdata,kerns,value) + if value then + local rawdata=tfmdata.shared.rawdata + local resources=rawdata.resources + local unicodes=resources.unicodes + local descriptions=tfmdata.descriptions + local haskerns=false + for u,chr in next,tfmdata.characters do + local d=descriptions[u] + local newkerns=d[kerns] + if newkerns then + local kerns=chr.kerns + if not kerns then + kerns={} + chr.kerns=kerns + end + for k,v in next,newkerns do + local uk=unicodes[k] + if uk then + kerns[uk]=v + end + end + haskerns=true + end + end + tfmdata.properties.haskerns=haskerns + end +end +local list={ + [0x0027]=0x2019, +} +local function texreplacements(tfmdata,value) + local descriptions=tfmdata.descriptions + local characters=tfmdata.characters + for k,v in next,list do + characters [k]=characters [v] + descriptions[k]=descriptions[v] + end +end +local function ligatures (tfmdata,value) prepareligatures(tfmdata,'ligatures',value) end +local function texligatures(tfmdata,value) prepareligatures(tfmdata,'texligatures',value) end +local function kerns (tfmdata,value) preparekerns (tfmdata,'kerns',value) end +local function extrakerns (tfmdata,value) preparekerns (tfmdata,'extrakerns',value) end +registerafmfeature { + name="liga", + description="traditional ligatures", + initializers={ + base=ligatures, + node=ligatures, + } +} +registerafmfeature { + name="kern", + description="intercharacter kerning", + initializers={ + base=kerns, + node=kerns, + } +} +registerafmfeature { + name="extrakerns", + description="additional intercharacter kerning", + initializers={ + base=extrakerns, + node=extrakerns, + } +} +registerafmfeature { + name='tlig', + description='tex ligatures', + initializers={ + base=texligatures, + node=texligatures, + } +} +registerafmfeature { + name='trep', + description='tex replacements', + initializers={ + base=texreplacements, + node=texreplacements, + } +} +local check_tfm=readers.check_tfm +fonts.formats.afm="type1" +fonts.formats.pfb="type1" +local function check_afm(specification,fullname) + local foundname=findbinfile(fullname,'afm') or "" + if foundname=="" then + foundname=fonts.names.getfilename(fullname,"afm") or "" + end + if foundname=="" and afm.autoprefixed then + local encoding,shortname=match(fullname,"^(.-)%-(.*)$") + if encoding and shortname and fonts.encodings.known[encoding] then + shortname=findbinfile(shortname,'afm') or "" + if shortname~="" then + foundname=shortname + if trace_defining then + report_afm("stripping encoding prefix from filename %a",afmname) + end + end + end + end + if foundname~="" then + specification.filename=foundname + specification.format="afm" + return read_from_afm(specification) + end +end +function readers.afm(specification,method) + local fullname,tfmdata=specification.filename or "",nil + if fullname=="" then + local forced=specification.forced or "" + if forced~="" then + tfmdata=check_afm(specification,specification.name.."."..forced) + end + if not tfmdata then + method=method or definers.method or "afm or tfm" + if method=="tfm" then + tfmdata=check_tfm(specification,specification.name) + elseif method=="afm" then + tfmdata=check_afm(specification,specification.name) + elseif method=="tfm or afm" then + tfmdata=check_tfm(specification,specification.name) or check_afm(specification,specification.name) + else + tfmdata=check_afm(specification,specification.name) or check_tfm(specification,specification.name) + end + end + else + tfmdata=check_afm(specification,fullname) + end + return tfmdata +end +function readers.pfb(specification,method) + local original=specification.specification + if trace_defining then + report_afm("using afm reader for %a",original) + end + specification.specification=gsub(original,"%.pfb",".afm") + specification.forced="afm" + return readers.afm(specification,method) +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-afk']={ + version=1.001, + comment="companion to font-afm.lua", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files", + dataonly=true, +} +local allocate=utilities.storage.allocate +fonts.handlers.afm.helpdata={ + ligatures=allocate { + ['f']={ + { 'f','ff' }, + { 'i','fi' }, + { 'l','fl' }, + }, + ['ff']={ + { 'i','ffi' } + }, + ['fi']={ + { 'i','fii' } + }, + ['fl']={ + { 'i','fli' } + }, + ['s']={ + { 't','st' } + }, + ['i']={ + { 'j','ij' } + }, + }, + texligatures=allocate { + ['quoteleft']={ + { 'quoteleft','quotedblleft' } + }, + ['quoteright']={ + { 'quoteright','quotedblright' } + }, + ['hyphen']={ + { 'hyphen','endash' } + }, + ['endash']={ + { 'hyphen','emdash' } + } + }, + leftkerned=allocate { + AEligature="A",aeligature="a", + OEligature="O",oeligature="o", + IJligature="I",ijligature="i", + AE="A",ae="a", + OE="O",oe="o", + IJ="I",ij="i", + Ssharp="S",ssharp="s", + }, + rightkerned=allocate { + AEligature="E",aeligature="e", + OEligature="E",oeligature="e", + IJligature="J",ijligature="j", + AE="E",ae="e", + OE="E",oe="e", + IJ="J",ij="j", + Ssharp="S",ssharp="s", + }, + bothkerned=allocate { + Acircumflex="A",acircumflex="a", + Ccircumflex="C",ccircumflex="c", + Ecircumflex="E",ecircumflex="e", + Gcircumflex="G",gcircumflex="g", + Hcircumflex="H",hcircumflex="h", + Icircumflex="I",icircumflex="i", + Jcircumflex="J",jcircumflex="j", + Ocircumflex="O",ocircumflex="o", + Scircumflex="S",scircumflex="s", + Ucircumflex="U",ucircumflex="u", + Wcircumflex="W",wcircumflex="w", + Ycircumflex="Y",ycircumflex="y", + Agrave="A",agrave="a", + Egrave="E",egrave="e", + Igrave="I",igrave="i", + Ograve="O",ograve="o", + Ugrave="U",ugrave="u", + Ygrave="Y",ygrave="y", + Atilde="A",atilde="a", + Itilde="I",itilde="i", + Otilde="O",otilde="o", + Utilde="U",utilde="u", + Ntilde="N",ntilde="n", + Adiaeresis="A",adiaeresis="a",Adieresis="A",adieresis="a", + Ediaeresis="E",ediaeresis="e",Edieresis="E",edieresis="e", + Idiaeresis="I",idiaeresis="i",Idieresis="I",idieresis="i", + Odiaeresis="O",odiaeresis="o",Odieresis="O",odieresis="o", + Udiaeresis="U",udiaeresis="u",Udieresis="U",udieresis="u", + Ydiaeresis="Y",ydiaeresis="y",Ydieresis="Y",ydieresis="y", + Aacute="A",aacute="a", + Cacute="C",cacute="c", + Eacute="E",eacute="e", + Iacute="I",iacute="i", + Lacute="L",lacute="l", + Nacute="N",nacute="n", + Oacute="O",oacute="o", + Racute="R",racute="r", + Sacute="S",sacute="s", + Uacute="U",uacute="u", + Yacute="Y",yacute="y", + Zacute="Z",zacute="z", + Dstroke="D",dstroke="d", + Hstroke="H",hstroke="h", + Tstroke="T",tstroke="t", + Cdotaccent="C",cdotaccent="c", + Edotaccent="E",edotaccent="e", + Gdotaccent="G",gdotaccent="g", + Idotaccent="I",idotaccent="i", + Zdotaccent="Z",zdotaccent="z", + Amacron="A",amacron="a", + Emacron="E",emacron="e", + Imacron="I",imacron="i", + Omacron="O",omacron="o", + Umacron="U",umacron="u", + Ccedilla="C",ccedilla="c", + Kcedilla="K",kcedilla="k", + Lcedilla="L",lcedilla="l", + Ncedilla="N",ncedilla="n", + Rcedilla="R",rcedilla="r", + Scedilla="S",scedilla="s", + Tcedilla="T",tcedilla="t", + Ohungarumlaut="O",ohungarumlaut="o", + Uhungarumlaut="U",uhungarumlaut="u", + Aogonek="A",aogonek="a", + Eogonek="E",eogonek="e", + Iogonek="I",iogonek="i", + Uogonek="U",uogonek="u", + Aring="A",aring="a", + Uring="U",uring="u", + Abreve="A",abreve="a", + Ebreve="E",ebreve="e", + Gbreve="G",gbreve="g", + Ibreve="I",ibreve="i", + Obreve="O",obreve="o", + Ubreve="U",ubreve="u", + Ccaron="C",ccaron="c", + Dcaron="D",dcaron="d", + Ecaron="E",ecaron="e", + Lcaron="L",lcaron="l", + Ncaron="N",ncaron="n", + Rcaron="R",rcaron="r", + Scaron="S",scaron="s", + Tcaron="T",tcaron="t", + Zcaron="Z",zcaron="z", + dotlessI="I",dotlessi="i", + dotlessJ="J",dotlessj="j", + AEligature="AE",aeligature="ae",AE="AE",ae="ae", + OEligature="OE",oeligature="oe",OE="OE",oe="oe", + IJligature="IJ",ijligature="ij",IJ="IJ",ij="ij", + Lstroke="L",lstroke="l",Lslash="L",lslash="l", + Ostroke="O",ostroke="o",Oslash="O",oslash="o", + Ssharp="SS",ssharp="ss", + Aumlaut="A",aumlaut="a", + Eumlaut="E",eumlaut="e", + Iumlaut="I",iumlaut="i", + Oumlaut="O",oumlaut="o", + Uumlaut="U",uumlaut="u", + } +} + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['luatex-fonts-tfm']={ + version=1.001, + comment="companion to luatex-*.tex", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end +local fonts=fonts +local tfm={} +fonts.handlers.tfm=tfm +fonts.formats.tfm="type1" +function fonts.readers.tfm(specification) + local fullname=specification.filename or "" + if fullname=="" then + local forced=specification.forced or "" + if forced~="" then + fullname=specification.name.."."..forced + else + fullname=specification.name + end + end + local foundname=resolvers.findbinfile(fullname,'tfm') or "" + if foundname=="" then + foundname=resolvers.findbinfile(fullname,'ofm') or "" + end + if foundname~="" then + specification.filename=foundname + specification.format="ofm" + return font.read_tfm(specification.filename,specification.size) + end +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-oti']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local lower=string.lower +local fonts=fonts +local constructors=fonts.constructors +local otf=constructors.newhandler("otf") +local otffeatures=constructors.newfeatures("otf") +local otftables=otf.tables +local registerotffeature=otffeatures.register +local allocate=utilities.storage.allocate +registerotffeature { + name="features", + description="initialization of feature handler", + default=true, +} +local function setmode(tfmdata,value) + if value then + tfmdata.properties.mode=lower(value) + end +end +local function setlanguage(tfmdata,value) + if value then + local cleanvalue=lower(value) + local languages=otftables and otftables.languages + local properties=tfmdata.properties + if not languages then + properties.language=cleanvalue + elseif languages[value] then + properties.language=cleanvalue + else + properties.language="dflt" + end + end +end +local function setscript(tfmdata,value) + if value then + local cleanvalue=lower(value) + local scripts=otftables and otftables.scripts + local properties=tfmdata.properties + if not scripts then + properties.script=cleanvalue + elseif scripts[value] then + properties.script=cleanvalue + else + properties.script="dflt" + end + end +end +registerotffeature { + name="mode", + description="mode", + initializers={ + base=setmode, + node=setmode, + } +} +registerotffeature { + name="language", + description="language", + initializers={ + base=setlanguage, + node=setlanguage, + } +} +registerotffeature { + name="script", + description="script", + initializers={ + base=setscript, + node=setscript, + } +} + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-otf']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local utfbyte=utf.byte +local format,gmatch,gsub,find,match,lower,strip=string.format,string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip +local type,next,tonumber,tostring=type,next,tonumber,tostring +local abs=math.abs +local insert=table.insert +local lpegmatch=lpeg.match +local reversed,concat,remove,sortedkeys=table.reversed,table.concat,table.remove,table.sortedkeys +local ioflush=io.flush +local fastcopy,tohash,derivetable=table.fastcopy,table.tohash,table.derive +local formatters=string.formatters +local P,R,S,C,Ct,lpegmatch=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Ct,lpeg.match +local setmetatableindex=table.setmetatableindex +local allocate=utilities.storage.allocate +local registertracker=trackers.register +local registerdirective=directives.register +local starttiming=statistics.starttiming +local stoptiming=statistics.stoptiming +local elapsedtime=statistics.elapsedtime +local findbinfile=resolvers.findbinfile +local trace_private=false registertracker("otf.private",function(v) trace_private=v end) +local trace_loading=false registertracker("otf.loading",function(v) trace_loading=v end) +local trace_features=false registertracker("otf.features",function(v) trace_features=v end) +local trace_dynamics=false registertracker("otf.dynamics",function(v) trace_dynamics=v end) +local trace_sequences=false registertracker("otf.sequences",function(v) trace_sequences=v end) +local trace_markwidth=false registertracker("otf.markwidth",function(v) trace_markwidth=v end) +local trace_defining=false registertracker("fonts.defining",function(v) trace_defining=v end) +local compact_lookups=true registertracker("otf.compactlookups",function(v) compact_lookups=v end) +local purge_names=true registertracker("otf.purgenames",function(v) purge_names=v end) +local report_otf=logs.reporter("fonts","otf loading") +local fonts=fonts +local otf=fonts.handlers.otf +otf.glists={ "gsub","gpos" } +otf.version=2.802 +otf.cache=containers.define("fonts","otf",otf.version,true) +local fontdata=fonts.hashes.identifiers +local chardata=characters and characters.data +local definers=fonts.definers +local readers=fonts.readers +local constructors=fonts.constructors +local otffeatures=constructors.newfeatures("otf") +local registerotffeature=otffeatures.register +local enhancers=allocate() +otf.enhancers=enhancers +local patches={} +enhancers.patches=patches +local forceload=false +local cleanup=0 +local packdata=true +local syncspace=true +local forcenotdef=false +local includesubfonts=false +local overloadkerns=false +local applyruntimefixes=fonts.treatments and fonts.treatments.applyfixes +local wildcard="*" +local default="dflt" +local fontloaderfields=fontloader.fields +local mainfields=nil +local glyphfields=nil +local formats=fonts.formats +formats.otf="opentype" +formats.ttf="truetype" +formats.ttc="truetype" +formats.dfont="truetype" +registerdirective("fonts.otf.loader.cleanup",function(v) cleanup=tonumber(v) or (v and 1) or 0 end) +registerdirective("fonts.otf.loader.force",function(v) forceload=v end) +registerdirective("fonts.otf.loader.pack",function(v) packdata=v end) +registerdirective("fonts.otf.loader.syncspace",function(v) syncspace=v end) +registerdirective("fonts.otf.loader.forcenotdef",function(v) forcenotdef=v end) +registerdirective("fonts.otf.loader.overloadkerns",function(v) overloadkerns=v end) +function otf.fileformat(filename) + local leader=lower(io.loadchunk(filename,4)) + local suffix=lower(file.suffix(filename)) + if leader=="otto" then + return formats.otf,suffix=="otf" + elseif leader=="ttcf" then + return formats.ttc,suffix=="ttc" + elseif suffix=="ttc" then + return formats.ttc,true + elseif suffix=="dfont" then + return formats.dfont,true + else + return formats.ttf,suffix=="ttf" + end +end +local function otf_format(filename) + local format,okay=otf.fileformat(filename) + if not okay then + report_otf("font %a is actually an %a file",filename,format) + end + return format +end +local function load_featurefile(raw,featurefile) + if featurefile and featurefile~="" then + if trace_loading then + report_otf("using featurefile %a",featurefile) + end + fontloader.apply_featurefile(raw,featurefile) + end +end +local function showfeatureorder(rawdata,filename) + local sequences=rawdata.resources.sequences + if sequences and #sequences>0 then + if trace_loading then + report_otf("font %a has %s sequences",filename,#sequences) + report_otf(" ") + end + for nos=1,#sequences do + local sequence=sequences[nos] + local typ=sequence.type or "no-type" + local name=sequence.name or "no-name" + local subtables=sequence.subtables or { "no-subtables" } + local features=sequence.features + if trace_loading then + report_otf("%3i %-15s %-20s [% t]",nos,name,typ,subtables) + end + if features then + for feature,scripts in next,features do + local tt={} + if type(scripts)=="table" then + for script,languages in next,scripts do + local ttt={} + for language,_ in next,languages do + ttt[#ttt+1]=language + end + tt[#tt+1]=formatters["[%s: % t]"](script,ttt) + end + if trace_loading then + report_otf(" %s: % t",feature,tt) + end + else + if trace_loading then + report_otf(" %s: %S",feature,scripts) + end + end + end + end + end + if trace_loading then + report_otf("\n") + end + elseif trace_loading then + report_otf("font %a has no sequences",filename) + end +end +local valid_fields=table.tohash { + "ascent", + "cidinfo", + "copyright", + "descent", + "design_range_bottom", + "design_range_top", + "design_size", + "encodingchanged", + "extrema_bound", + "familyname", + "fontname", + "fontstyle_id", + "fontstyle_name", + "fullname", + "hasvmetrics", + "horiz_base", + "issans", + "isserif", + "italicangle", + "macstyle", + "onlybitmaps", + "origname", + "os2_version", + "pfminfo", + "serifcheck", + "sfd_version", + "strokedfont", + "strokewidth", + "table_version", + "ttf_tables", + "uni_interp", + "uniqueid", + "units_per_em", + "upos", + "use_typo_metrics", + "uwidth", + "validation_state", + "version", + "vert_base", + "weight", + "weight_width_slope_only", +} +local ordered_enhancers={ + "prepare tables", + "prepare glyphs", + "prepare lookups", + "analyze glyphs", + "analyze math", + "reorganize lookups", + "reorganize mark classes", + "reorganize anchor classes", + "reorganize glyph kerns", + "reorganize glyph lookups", + "reorganize glyph anchors", + "merge kern classes", + "reorganize features", + "reorganize subtables", + "check glyphs", + "check metadata", + "check extra features", + "prepare tounicode", + "check encoding", + "add duplicates", + "cleanup tables", + "compact lookups", + "purge names", +} +local actions=allocate() +local before=allocate() +local after=allocate() +patches.before=before +patches.after=after +local function enhance(name,data,filename,raw) + local enhancer=actions[name] + if enhancer then + if trace_loading then + report_otf("apply enhancement %a to file %a",name,filename) + ioflush() + end + enhancer(data,filename,raw) + else + end +end +function enhancers.apply(data,filename,raw) + local basename=file.basename(lower(filename)) + if trace_loading then + report_otf("%s enhancing file %a","start",filename) + end + ioflush() + for e=1,#ordered_enhancers do + local enhancer=ordered_enhancers[e] + local b=before[enhancer] + if b then + for pattern,action in next,b do + if find(basename,pattern) then + action(data,filename,raw) + end + end + end + enhance(enhancer,data,filename,raw) + local a=after[enhancer] + if a then + for pattern,action in next,a do + if find(basename,pattern) then + action(data,filename,raw) + end + end + end + ioflush() + end + if trace_loading then + report_otf("%s enhancing file %a","stop",filename) + end + ioflush() +end +function patches.register(what,where,pattern,action) + local pw=patches[what] + if pw then + local ww=pw[where] + if ww then + ww[pattern]=action + else + pw[where]={ [pattern]=action} + end + end +end +function patches.report(fmt,...) + if trace_loading then + report_otf("patching: %s",formatters[fmt](...)) + end +end +function enhancers.register(what,action) + actions[what]=action +end +function otf.load(filename,sub,featurefile) + local base=file.basename(file.removesuffix(filename)) + local name=file.removesuffix(base) + local attr=lfs.attributes(filename) + local size=attr and attr.size or 0 + local time=attr and attr.modification or 0 + if featurefile then + name=name.."@"..file.removesuffix(file.basename(featurefile)) + end + if sub=="" then + sub=false + end + local hash=name + if sub then + hash=hash.."-"..sub + end + hash=containers.cleanname(hash) + local featurefiles + if featurefile then + featurefiles={} + for s in gmatch(featurefile,"[^,]+") do + local name=resolvers.findfile(file.addsuffix(s,'fea'),'fea') or "" + if name=="" then + report_otf("loading error, no featurefile %a",s) + else + local attr=lfs.attributes(name) + featurefiles[#featurefiles+1]={ + name=name, + size=attr and attr.size or 0, + time=attr and attr.modification or 0, + } + end + end + if #featurefiles==0 then + featurefiles=nil + end + end + local data=containers.read(otf.cache,hash) + local reload=not data or data.size~=size or data.time~=time + if forceload then + report_otf("forced reload of %a due to hard coded flag",filename) + reload=true + end + if not reload then + local featuredata=data.featuredata + if featurefiles then + if not featuredata or #featuredata~=#featurefiles then + reload=true + else + for i=1,#featurefiles do + local fi,fd=featurefiles[i],featuredata[i] + if fi.name~=fd.name or fi.size~=fd.size or fi.time~=fd.time then + reload=true + break + end + end + end + elseif featuredata then + reload=true + end + if reload then + report_otf("loading: forced reload due to changed featurefile specification %a",featurefile) + end + end + if reload then + report_otf("loading %a, hash %a",filename,hash) + local fontdata,messages + if sub then + fontdata,messages=fontloader.open(filename,sub) + else + fontdata,messages=fontloader.open(filename) + end + if fontdata then + mainfields=mainfields or (fontloaderfields and fontloaderfields(fontdata)) + end + if trace_loading and messages and #messages>0 then + if type(messages)=="string" then + report_otf("warning: %s",messages) + else + for m=1,#messages do + report_otf("warning: %S",messages[m]) + end + end + else + report_otf("loading done") + end + if fontdata then + if featurefiles then + for i=1,#featurefiles do + load_featurefile(fontdata,featurefiles[i].name) + end + end + local unicodes={ + } + local splitter=lpeg.splitter(" ",unicodes) + data={ + size=size, + time=time, + format=otf_format(filename), + featuredata=featurefiles, + resources={ + filename=resolvers.unresolve(filename), + version=otf.version, + creator="context mkiv", + unicodes=unicodes, + indices={ + }, + duplicates={ + }, + variants={ + }, + lookuptypes={}, + }, + warnings={}, + metadata={ + }, + properties={ + }, + descriptions={}, + goodies={}, + helpers={ + tounicodelist=splitter, + tounicodetable=Ct(splitter), + }, + } + starttiming(data) + report_otf("file size: %s",size) + enhancers.apply(data,filename,fontdata) + local packtime={} + if packdata then + if cleanup>0 then + collectgarbage("collect") + end + starttiming(packtime) + enhance("pack",data,filename,nil) + stoptiming(packtime) + end + report_otf("saving %a in cache",filename) + data=containers.write(otf.cache,hash,data) + if cleanup>1 then + collectgarbage("collect") + end + stoptiming(data) + if elapsedtime then + report_otf("preprocessing and caching time %s, packtime %s", + elapsedtime(data),packdata and elapsedtime(packtime) or 0) + end + fontloader.close(fontdata) + if cleanup>3 then + collectgarbage("collect") + end + data=containers.read(otf.cache,hash) + if cleanup>2 then + collectgarbage("collect") + end + else + data=nil + report_otf("loading failed due to read error") + end + end + if data then + if trace_defining then + report_otf("loading from cache using hash %a",hash) + end + enhance("unpack",data,filename,nil,false) + local resources=data.resources + local lookuptags=resources.lookuptags + local unicodes=resources.unicodes + if not lookuptags then + lookuptags={} + resources.lookuptags=lookuptags + end + setmetatableindex(lookuptags,function(t,k) + local v=type(k)=="number" and ("lookup "..k) or k + t[k]=v + return v + end) + if not unicodes then + unicodes={} + resources.unicodes=unicodes + setmetatableindex(unicodes,function(t,k) + setmetatableindex(unicodes,nil) + for u,d in next,data.descriptions do + local n=d.name + if n then + t[n]=u + else + end + end + return rawget(t,k) + end) + end + constructors.addcoreunicodes(unicodes) + if applyruntimefixes then + applyruntimefixes(filename,data) + end + enhance("add dimensions",data,filename,nil,false) + if trace_sequences then + showfeatureorder(data,filename) + end + end + return data +end +local mt={ + __index=function(t,k) + if k=="height" then + local ht=t.boundingbox[4] + return ht<0 and 0 or ht + elseif k=="depth" then + local dp=-t.boundingbox[2] + return dp<0 and 0 or dp + elseif k=="width" then + return 0 + elseif k=="name" then + return forcenotdef and ".notdef" + end + end +} +actions["prepare tables"]=function(data,filename,raw) + data.properties.hasitalics=false +end +actions["add dimensions"]=function(data,filename) + if data then + local descriptions=data.descriptions + local resources=data.resources + local defaultwidth=resources.defaultwidth or 0 + local defaultheight=resources.defaultheight or 0 + local defaultdepth=resources.defaultdepth or 0 + local basename=trace_markwidth and file.basename(filename) + for _,d in next,descriptions do + local bb,wd=d.boundingbox,d.width + if not wd then + d.width=defaultwidth + elseif trace_markwidth and wd~=0 and d.class=="mark" then + report_otf("mark %a with width %b found in %a",d.name or "",wd,basename) + end + if bb then + local ht,dp=bb[4],-bb[2] + if ht==0 or ht<0 then + else + d.height=ht + end + if dp==0 or dp<0 then + else + d.depth=dp + end + end + end + end +end +local function somecopy(old) + if old then + local new={} + if type(old)=="table" then + for k,v in next,old do + if k=="glyphs" then + elseif type(v)=="table" then + new[k]=somecopy(v) + else + new[k]=v + end + end + else + for i=1,#mainfields do + local k=mainfields[i] + local v=old[k] + if k=="glyphs" then + elseif type(v)=="table" then + new[k]=somecopy(v) + else + new[k]=v + end + end + end + return new + else + return {} + end +end +actions["prepare glyphs"]=function(data,filename,raw) + local rawglyphs=raw.glyphs + local rawsubfonts=raw.subfonts + local rawcidinfo=raw.cidinfo + local criterium=constructors.privateoffset + local private=criterium + local resources=data.resources + local metadata=data.metadata + local properties=data.properties + local descriptions=data.descriptions + local unicodes=resources.unicodes + local indices=resources.indices + local duplicates=resources.duplicates + local variants=resources.variants + if rawsubfonts then + metadata.subfonts=includesubfonts and {} + properties.cidinfo=rawcidinfo + if rawcidinfo.registry then + local cidmap=fonts.cid.getmap(rawcidinfo) + if cidmap then + rawcidinfo.usedname=cidmap.usedname + local nofnames,nofunicodes=0,0 + local cidunicodes,cidnames=cidmap.unicodes,cidmap.names + for cidindex=1,#rawsubfonts do + local subfont=rawsubfonts[cidindex] + local cidglyphs=subfont.glyphs + if includesubfonts then + metadata.subfonts[cidindex]=somecopy(subfont) + end + for index=0,subfont.glyphcnt-1 do + local glyph=cidglyphs[index] + if glyph then + local unicode=glyph.unicode + if unicode>=0x00E000 and unicode<=0x00F8FF then + unicode=-1 + elseif unicode>=0x0F0000 and unicode<=0x0FFFFD then + unicode=-1 + elseif unicode>=0x100000 and unicode<=0x10FFFD then + unicode=-1 + end + local name=glyph.name or cidnames[index] + if not unicode or unicode==-1 then + unicode=cidunicodes[index] + end + if unicode and descriptions[unicode] then + if trace_private then + report_otf("preventing glyph %a at index %H to overload unicode %U",name or "noname",index,unicode) + end + unicode=-1 + end + if not unicode or unicode==-1 then + if not name then + name=format("u%06X.ctx",private) + end + unicode=private + unicodes[name]=private + if trace_private then + report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private) + end + private=private+1 + nofnames=nofnames+1 + else + if not name then + name=format("u%06X.ctx",unicode) + end + unicodes[name]=unicode + nofunicodes=nofunicodes+1 + end + indices[index]=unicode + local description={ + boundingbox=glyph.boundingbox, + name=glyph.name or name or "unknown", + cidindex=cidindex, + index=index, + glyph=glyph, + } + descriptions[unicode]=description + else + end + end + end + if trace_loading then + report_otf("cid font remapped, %s unicode points, %s symbolic names, %s glyphs",nofunicodes,nofnames,nofunicodes+nofnames) + end + elseif trace_loading then + report_otf("unable to remap cid font, missing cid file for %a",filename) + end + elseif trace_loading then + report_otf("font %a has no glyphs",filename) + end + else + for index=0,raw.glyphcnt-1 do + local glyph=rawglyphs[index] + if glyph then + local unicode=glyph.unicode + local name=glyph.name + if not unicode or unicode==-1 then + unicode=private + unicodes[name]=private + if trace_private then + report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private) + end + private=private+1 + else + if unicode>criterium then + local taken=descriptions[unicode] + if taken then + if unicode>=private then + private=unicode+1 + else + private=private+1 + end + descriptions[private]=taken + unicodes[taken.name]=private + indices[taken.index]=private + if trace_private then + report_otf("slot %U is moved to %U due to private in font",unicode) + end + else + if unicode>=private then + private=unicode+1 + end + end + end + unicodes[name]=unicode + end + indices[index]=unicode + descriptions[unicode]={ + boundingbox=glyph.boundingbox, + name=name, + index=index, + glyph=glyph, + } + local altuni=glyph.altuni + if altuni then + for i=1,#altuni do + local a=altuni[i] + local u=a.unicode + local v=a.variant + if v then + local vv=variants[v] + if vv then + vv[u]=unicode + else + vv={ [u]=unicode } + variants[v]=vv + end + end + end + end + else + report_otf("potential problem: glyph %U is used but empty",index) + end + end + end + resources.private=private +end +actions["check encoding"]=function(data,filename,raw) + local descriptions=data.descriptions + local resources=data.resources + local properties=data.properties + local unicodes=resources.unicodes + local indices=resources.indices + local duplicates=resources.duplicates + local mapdata=raw.map or {} + local unicodetoindex=mapdata and mapdata.map or {} + local indextounicode=mapdata and mapdata.backmap or {} + local encname=lower(data.enc_name or mapdata.enc_name or "") + local criterium=0xFFFF + local privateoffset=constructors.privateoffset + if find(encname,"unicode") then + if trace_loading then + report_otf("checking embedded unicode map %a",encname) + end + local reported={} + for maybeunicode,index in next,unicodetoindex do + if descriptions[maybeunicode] then + else + local unicode=indices[index] + if not unicode then + elseif maybeunicode==unicode then + elseif unicode>privateoffset then + else + local d=descriptions[unicode] + if d then + local c=d.copies + if c then + c[maybeunicode]=true + else + d.copies={ [maybeunicode]=true } + end + elseif index and not reported[index] then + report_otf("missing index %i",index) + reported[index]=true + end + end + end + end + for unicode,data in next,descriptions do + local d=data.copies + if d then + duplicates[unicode]=sortedkeys(d) + data.copies=nil + end + end + elseif properties.cidinfo then + report_otf("warning: no unicode map, used cidmap %a",properties.cidinfo.usedname) + else + report_otf("warning: non unicode map %a, only using glyph unicode data",encname or "whatever") + end + if mapdata then + mapdata.map={} + mapdata.backmap={} + end +end +actions["add duplicates"]=function(data,filename,raw) + local descriptions=data.descriptions + local resources=data.resources + local properties=data.properties + local unicodes=resources.unicodes + local indices=resources.indices + local duplicates=resources.duplicates + for unicode,d in next,duplicates do + local nofduplicates=#d + if nofduplicates>4 then + if trace_loading then + report_otf("ignoring excessive duplicates of %U (n=%s)",unicode,nofduplicates) + end + else + for i=1,nofduplicates do + local u=d[i] + if not descriptions[u] then + local description=descriptions[unicode] + local n=0 + for _,description in next,descriptions do + local kerns=description.kerns + if kerns then + for _,k in next,kerns do + local ku=k[unicode] + if ku then + k[u]=ku + n=n+1 + end + end + end + end + if u>0 then + local duplicate=table.copy(description) + duplicate.comment=format("copy of U+%05X",unicode) + descriptions[u]=duplicate + if trace_loading then + report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n) + end + end + end + end + end + end +end +actions["analyze glyphs"]=function(data,filename,raw) + local descriptions=data.descriptions + local resources=data.resources + local metadata=data.metadata + local properties=data.properties + local hasitalics=false + local widths={} + local marks={} + for unicode,description in next,descriptions do + local glyph=description.glyph + local italic=glyph.italic_correction + if not italic then + elseif italic==0 then + else + description.italic=italic + hasitalics=true + end + local width=glyph.width + widths[width]=(widths[width] or 0)+1 + local class=glyph.class + if class then + if class=="mark" then + marks[unicode]=true + end + description.class=class + end + end + properties.hasitalics=hasitalics + resources.marks=marks + local wd,most=0,1 + for k,v in next,widths do + if v>most then + wd,most=k,v + end + end + if most>1000 then + if trace_loading then + report_otf("most common width: %s (%s times), sharing (cjk font)",wd,most) + end + for unicode,description in next,descriptions do + if description.width==wd then + else + description.width=description.glyph.width + end + end + resources.defaultwidth=wd + else + for unicode,description in next,descriptions do + description.width=description.glyph.width + end + end +end +actions["reorganize mark classes"]=function(data,filename,raw) + local mark_classes=raw.mark_classes + if mark_classes then + local resources=data.resources + local unicodes=resources.unicodes + local markclasses={} + resources.markclasses=markclasses + for name,class in next,mark_classes do + local t={} + for s in gmatch(class,"[^ ]+") do + t[unicodes[s]]=true + end + markclasses[name]=t + end + end +end +actions["reorganize features"]=function(data,filename,raw) + local features={} + data.resources.features=features + for k,what in next,otf.glists do + local dw=raw[what] + if dw then + local f={} + features[what]=f + for i=1,#dw do + local d=dw[i] + local dfeatures=d.features + if dfeatures then + for i=1,#dfeatures do + local df=dfeatures[i] + local tag=strip(lower(df.tag)) + local ft=f[tag] + if not ft then + ft={} + f[tag]=ft + end + local dscripts=df.scripts + for i=1,#dscripts do + local d=dscripts[i] + local languages=d.langs + local script=strip(lower(d.script)) + local fts=ft[script] if not fts then fts={} ft[script]=fts end + for i=1,#languages do + fts[strip(lower(languages[i]))]=true + end + end + end + end + end + end + end +end +actions["reorganize anchor classes"]=function(data,filename,raw) + local resources=data.resources + local anchor_to_lookup={} + local lookup_to_anchor={} + resources.anchor_to_lookup=anchor_to_lookup + resources.lookup_to_anchor=lookup_to_anchor + local classes=raw.anchor_classes + if classes then + for c=1,#classes do + local class=classes[c] + local anchor=class.name + local lookups=class.lookup + if type(lookups)~="table" then + lookups={ lookups } + end + local a=anchor_to_lookup[anchor] + if not a then + a={} + anchor_to_lookup[anchor]=a + end + for l=1,#lookups do + local lookup=lookups[l] + local l=lookup_to_anchor[lookup] + if l then + l[anchor]=true + else + l={ [anchor]=true } + lookup_to_anchor[lookup]=l + end + a[lookup]=true + end + end + end +end +actions["prepare tounicode"]=function(data,filename,raw) + fonts.mappings.addtounicode(data,filename) +end +local g_directions={ + gsub_contextchain=1, + gpos_contextchain=1, + gsub_reversecontextchain=-1, + gpos_reversecontextchain=-1, +} +actions["reorganize subtables"]=function(data,filename,raw) + local resources=data.resources + local sequences={} + local lookups={} + local chainedfeatures={} + resources.sequences=sequences + resources.lookups=lookups + for _,what in next,otf.glists do + local dw=raw[what] + if dw then + for k=1,#dw do + local gk=dw[k] + local features=gk.features + local typ=gk.type + local chain=g_directions[typ] or 0 + local subtables=gk.subtables + if subtables then + local t={} + for s=1,#subtables do + t[s]=subtables[s].name + end + subtables=t + end + local flags,markclass=gk.flags,nil + if flags then + local t={ + (flags.ignorecombiningmarks and "mark") or false, + (flags.ignoreligatures and "ligature") or false, + (flags.ignorebaseglyphs and "base") or false, + flags.r2l or false, + } + markclass=flags.mark_class + if markclass then + markclass=resources.markclasses[markclass] + end + flags=t + end + local name=gk.name + if not name then + report_otf("skipping weird lookup number %s",k) + elseif features then + local f={} + local o={} + for i=1,#features do + local df=features[i] + local tag=strip(lower(df.tag)) + local ft=f[tag] + if not ft then + ft={} + f[tag]=ft + o[#o+1]=tag + end + local dscripts=df.scripts + for i=1,#dscripts do + local d=dscripts[i] + local languages=d.langs + local script=strip(lower(d.script)) + local fts=ft[script] if not fts then fts={} ft[script]=fts end + for i=1,#languages do + fts[strip(lower(languages[i]))]=true + end + end + end + sequences[#sequences+1]={ + type=typ, + chain=chain, + flags=flags, + name=name, + subtables=subtables, + markclass=markclass, + features=f, + order=o, + } + else + lookups[name]={ + type=typ, + chain=chain, + flags=flags, + subtables=subtables, + markclass=markclass, + } + end + end + end + end +end +actions["prepare lookups"]=function(data,filename,raw) + local lookups=raw.lookups + if lookups then + data.lookups=lookups + end +end +local function t_uncover(splitter,cache,covers) + local result={} + for n=1,#covers do + local cover=covers[n] + local uncovered=cache[cover] + if not uncovered then + uncovered=lpegmatch(splitter,cover) + cache[cover]=uncovered + end + result[n]=uncovered + end + return result +end +local function s_uncover(splitter,cache,cover) + if cover=="" then + return nil + else + local uncovered=cache[cover] + if not uncovered then + uncovered=lpegmatch(splitter,cover) + cache[cover]=uncovered + end + return { uncovered } + end +end +local function t_hashed(t,cache) + if t then + local ht={} + for i=1,#t do + local ti=t[i] + local tih=cache[ti] + if not tih then + local tn=#ti + if tn==1 then + tih={ [ti[1]]=true } + else + tih={} + for i=1,tn do + tih[ti[i]]=true + end + end + cache[ti]=tih + end + ht[i]=tih + end + return ht + else + return nil + end +end +local function s_hashed(t,cache) + if t then + local tf=t[1] + local nf=#tf + if nf==1 then + return { [tf[1]]=true } + else + local ht={} + for i=1,nf do + ht[i]={ [tf[i]]=true } + end + return ht + end + else + return nil + end +end +local function r_uncover(splitter,cache,cover,replacements) + if cover=="" then + return nil + else + local uncovered=cover[1] + local replaced=cache[replacements] + if not replaced then + replaced=lpegmatch(splitter,replacements) + cache[replacements]=replaced + end + local nu,nr=#uncovered,#replaced + local r={} + if nu==nr then + for i=1,nu do + r[uncovered[i]]=replaced[i] + end + end + return r + end +end +actions["reorganize lookups"]=function(data,filename,raw) + if data.lookups then + local splitter=data.helpers.tounicodetable + local t_u_cache={} + local s_u_cache=t_u_cache + local t_h_cache={} + local s_h_cache=t_h_cache + local r_u_cache={} + for _,lookup in next,data.lookups do + local rules=lookup.rules + if rules then + local format=lookup.format + if format=="class" then + local before_class=lookup.before_class + if before_class then + before_class=t_uncover(splitter,t_u_cache,reversed(before_class)) + end + local current_class=lookup.current_class + if current_class then + current_class=t_uncover(splitter,t_u_cache,current_class) + end + local after_class=lookup.after_class + if after_class then + after_class=t_uncover(splitter,t_u_cache,after_class) + end + for i=1,#rules do + local rule=rules[i] + local class=rule.class + local before=class.before + if before then + for i=1,#before do + before[i]=before_class[before[i]] or {} + end + rule.before=t_hashed(before,t_h_cache) + end + local current=class.current + local lookups=rule.lookups + if current then + for i=1,#current do + current[i]=current_class[current[i]] or {} + if lookups and not lookups[i] then + lookups[i]="" + end + end + rule.current=t_hashed(current,t_h_cache) + end + local after=class.after + if after then + for i=1,#after do + after[i]=after_class[after[i]] or {} + end + rule.after=t_hashed(after,t_h_cache) + end + rule.class=nil + end + lookup.before_class=nil + lookup.current_class=nil + lookup.after_class=nil + lookup.format="coverage" + elseif format=="coverage" then + for i=1,#rules do + local rule=rules[i] + local coverage=rule.coverage + if coverage then + local before=coverage.before + if before then + before=t_uncover(splitter,t_u_cache,reversed(before)) + rule.before=t_hashed(before,t_h_cache) + end + local current=coverage.current + if current then + current=t_uncover(splitter,t_u_cache,current) + local lookups=rule.lookups + if lookups then + for i=1,#current do + if not lookups[i] then + lookups[i]="" + end + end + end + rule.current=t_hashed(current,t_h_cache) + end + local after=coverage.after + if after then + after=t_uncover(splitter,t_u_cache,after) + rule.after=t_hashed(after,t_h_cache) + end + rule.coverage=nil + end + end + elseif format=="reversecoverage" then + for i=1,#rules do + local rule=rules[i] + local reversecoverage=rule.reversecoverage + if reversecoverage then + local before=reversecoverage.before + if before then + before=t_uncover(splitter,t_u_cache,reversed(before)) + rule.before=t_hashed(before,t_h_cache) + end + local current=reversecoverage.current + if current then + current=t_uncover(splitter,t_u_cache,current) + rule.current=t_hashed(current,t_h_cache) + end + local after=reversecoverage.after + if after then + after=t_uncover(splitter,t_u_cache,after) + rule.after=t_hashed(after,t_h_cache) + end + local replacements=reversecoverage.replacements + if replacements then + rule.replacements=r_uncover(splitter,r_u_cache,current,replacements) + end + rule.reversecoverage=nil + end + end + elseif format=="glyphs" then + for i=1,#rules do + local rule=rules[i] + local glyphs=rule.glyphs + if glyphs then + local fore=glyphs.fore + if fore and fore~="" then + fore=s_uncover(splitter,s_u_cache,fore) + rule.after=s_hashed(fore,s_h_cache) + end + local back=glyphs.back + if back then + back=s_uncover(splitter,s_u_cache,back) + rule.before=s_hashed(back,s_h_cache) + end + local names=glyphs.names + if names then + names=s_uncover(splitter,s_u_cache,names) + rule.current=s_hashed(names,s_h_cache) + end + rule.glyphs=nil + local lookups=rule.lookups + if lookups then + for i=1,#names do + if not lookups[i] then + lookups[i]="" + end + end + end + end + end + end + end + end + end +end +local function check_variants(unicode,the_variants,splitter,unicodes) + local variants=the_variants.variants + if variants then + local glyphs=lpegmatch(splitter,variants) + local done={ [unicode]=true } + local n=0 + for i=1,#glyphs do + local g=glyphs[i] + if done[g] then + if i>1 then + report_otf("skipping cyclic reference %U in math variant %U",g,unicode) + end + else + if n==0 then + n=1 + variants={ g } + else + n=n+1 + variants[n]=g + end + done[g]=true + end + end + if n==0 then + variants=nil + end + end + local parts=the_variants.parts + if parts then + local p=#parts + if p>0 then + for i=1,p do + local pi=parts[i] + pi.glyph=unicodes[pi.component] or 0 + pi.component=nil + end + else + parts=nil + end + end + local italic_correction=the_variants.italic_correction + if italic_correction and italic_correction==0 then + italic_correction=nil + end + return variants,parts,italic_correction +end +actions["analyze math"]=function(data,filename,raw) + if raw.math then + data.metadata.math=raw.math + local unicodes=data.resources.unicodes + local splitter=data.helpers.tounicodetable + for unicode,description in next,data.descriptions do + local glyph=description.glyph + local mathkerns=glyph.mathkern + local horiz_variants=glyph.horiz_variants + local vert_variants=glyph.vert_variants + local top_accent=glyph.top_accent + if mathkerns or horiz_variants or vert_variants or top_accent then + local math={} + if top_accent then + math.top_accent=top_accent + end + if mathkerns then + for k,v in next,mathkerns do + if not next(v) then + mathkerns[k]=nil + else + for k,v in next,v do + if v==0 then + k[v]=nil + end + end + end + end + math.kerns=mathkerns + end + if horiz_variants then + math.horiz_variants,math.horiz_parts,math.horiz_italic_correction=check_variants(unicode,horiz_variants,splitter,unicodes) + end + if vert_variants then + math.vert_variants,math.vert_parts,math.vert_italic_correction=check_variants(unicode,vert_variants,splitter,unicodes) + end + local italic_correction=description.italic + if italic_correction and italic_correction~=0 then + math.italic_correction=italic_correction + end + description.math=math + end + end + end +end +actions["reorganize glyph kerns"]=function(data,filename,raw) + local descriptions=data.descriptions + local resources=data.resources + local unicodes=resources.unicodes + for unicode,description in next,descriptions do + local kerns=description.glyph.kerns + if kerns then + local newkerns={} + for k,kern in next,kerns do + local name=kern.char + local offset=kern.off + local lookup=kern.lookup + if name and offset and lookup then + local unicode=unicodes[name] + if unicode then + if type(lookup)=="table" then + for l=1,#lookup do + local lookup=lookup[l] + local lookupkerns=newkerns[lookup] + if lookupkerns then + lookupkerns[unicode]=offset + else + newkerns[lookup]={ [unicode]=offset } + end + end + else + local lookupkerns=newkerns[lookup] + if lookupkerns then + lookupkerns[unicode]=offset + else + newkerns[lookup]={ [unicode]=offset } + end + end + elseif trace_loading then + report_otf("problems with unicode %a of kern %a of glyph %U",name,k,unicode) + end + end + end + description.kerns=newkerns + end + end +end +actions["merge kern classes"]=function(data,filename,raw) + local gposlist=raw.gpos + if gposlist then + local descriptions=data.descriptions + local resources=data.resources + local unicodes=resources.unicodes + local splitter=data.helpers.tounicodetable + local ignored=0 + local blocked=0 + for gp=1,#gposlist do + local gpos=gposlist[gp] + local subtables=gpos.subtables + if subtables then + local first_done={} + local split={} + for s=1,#subtables do + local subtable=subtables[s] + local kernclass=subtable.kernclass + local lookup=subtable.lookup or subtable.name + if kernclass then + if #kernclass>0 then + kernclass=kernclass[1] + lookup=type(kernclass.lookup)=="string" and kernclass.lookup or lookup + report_otf("fixing kernclass table of lookup %a",lookup) + end + local firsts=kernclass.firsts + local seconds=kernclass.seconds + local offsets=kernclass.offsets + for n,s in next,firsts do + split[s]=split[s] or lpegmatch(splitter,s) + end + local maxseconds=0 + for n,s in next,seconds do + if n>maxseconds then + maxseconds=n + end + split[s]=split[s] or lpegmatch(splitter,s) + end + for fk=1,#firsts do + local fv=firsts[fk] + local splt=split[fv] + if splt then + local extrakerns={} + local baseoffset=(fk-1)*maxseconds + for sk=2,maxseconds do + local sv=seconds[sk] + local splt=split[sv] + if splt then + local offset=offsets[baseoffset+sk] + if offset then + for i=1,#splt do + extrakerns[splt[i]]=offset + end + end + end + end + for i=1,#splt do + local first_unicode=splt[i] + if first_done[first_unicode] then + report_otf("lookup %a: ignoring further kerns of %C",lookup,first_unicode) + blocked=blocked+1 + else + first_done[first_unicode]=true + local description=descriptions[first_unicode] + if description then + local kerns=description.kerns + if not kerns then + kerns={} + description.kerns=kerns + end + local lookupkerns=kerns[lookup] + if not lookupkerns then + lookupkerns={} + kerns[lookup]=lookupkerns + end + if overloadkerns then + for second_unicode,kern in next,extrakerns do + lookupkerns[second_unicode]=kern + end + else + for second_unicode,kern in next,extrakerns do + local k=lookupkerns[second_unicode] + if not k then + lookupkerns[second_unicode]=kern + elseif k~=kern then + if trace_loading then + report_otf("lookup %a: ignoring overload of kern between %C and %C, rejecting %a, keeping %a",lookup,first_unicode,second_unicode,k,kern) + end + ignored=ignored+1 + end + end + end + elseif trace_loading then + report_otf("no glyph data for %U",first_unicode) + end + end + end + end + end + subtable.kernclass={} + end + end + end + end + if ignored>0 then + report_otf("%s kern overloads ignored",ignored) + end + if blocked>0 then + report_otf("%s succesive kerns blocked",blocked) + end + end +end +actions["check glyphs"]=function(data,filename,raw) + for unicode,description in next,data.descriptions do + description.glyph=nil + end +end +local valid=(R("\x00\x7E")-S("(){}[]<>%/ \n\r\f\v"))^0*P(-1) +local function valid_ps_name(str) + return str and str~="" and #str<64 and lpegmatch(valid,str) and true or false +end +actions["check metadata"]=function(data,filename,raw) + local metadata=data.metadata + for _,k in next,mainfields do + if valid_fields[k] then + local v=raw[k] + if not metadata[k] then + metadata[k]=v + end + end + end + local ttftables=metadata.ttf_tables + if ttftables then + for i=1,#ttftables do + ttftables[i].data="deleted" + end + end + if metadata.validation_state and table.contains(metadata.validation_state,"bad_ps_fontname") then + local function valid(what) + local names=raw.names + for i=1,#names do + local list=names[i] + local names=list.names + if names then + local name=names[what] + if name and valid_ps_name(name) then + return name + end + end + end + end + local function check(what) + local oldname=metadata[what] + if valid_ps_name(oldname) then + report_otf("ignoring warning %a because %s %a is proper ASCII","bad_ps_fontname",what,oldname) + else + local newname=valid(what) + if not newname then + newname=formatters["bad-%s-%s"](what,file.nameonly(filename)) + end + local warning=formatters["overloading %s from invalid ASCII name %a to %a"](what,oldname,newname) + data.warnings[#data.warnings+1]=warning + report_otf(warning) + metadata[what]=newname + end + end + check("fontname") + check("fullname") + end +end +actions["cleanup tables"]=function(data,filename,raw) + local duplicates=data.resources.duplicates + if duplicates then + for k,v in next,duplicates do + if #v==1 then + duplicates[k]=v[1] + end + end + end + data.resources.indices=nil + data.resources.unicodes=nil + data.helpers=nil +end +actions["reorganize glyph lookups"]=function(data,filename,raw) + local resources=data.resources + local unicodes=resources.unicodes + local descriptions=data.descriptions + local splitter=data.helpers.tounicodelist + local lookuptypes=resources.lookuptypes + for unicode,description in next,descriptions do + local lookups=description.glyph.lookups + if lookups then + for tag,lookuplist in next,lookups do + for l=1,#lookuplist do + local lookup=lookuplist[l] + local specification=lookup.specification + local lookuptype=lookup.type + local lt=lookuptypes[tag] + if not lt then + lookuptypes[tag]=lookuptype + elseif lt~=lookuptype then + report_otf("conflicting lookuptypes, %a points to %a and %a",tag,lt,lookuptype) + end + if lookuptype=="ligature" then + lookuplist[l]={ lpegmatch(splitter,specification.components) } + elseif lookuptype=="alternate" then + lookuplist[l]={ lpegmatch(splitter,specification.components) } + elseif lookuptype=="substitution" then + lookuplist[l]=unicodes[specification.variant] + elseif lookuptype=="multiple" then + lookuplist[l]={ lpegmatch(splitter,specification.components) } + elseif lookuptype=="position" then + lookuplist[l]={ + specification.x or 0, + specification.y or 0, + specification.h or 0, + specification.v or 0 + } + elseif lookuptype=="pair" then + local one=specification.offsets[1] + local two=specification.offsets[2] + local paired=unicodes[specification.paired] + if one then + if two then + lookuplist[l]={ paired,{ one.x or 0,one.y or 0,one.h or 0,one.v or 0 },{ two.x or 0,two.y or 0,two.h or 0,two.v or 0 } } + else + lookuplist[l]={ paired,{ one.x or 0,one.y or 0,one.h or 0,one.v or 0 } } + end + else + if two then + lookuplist[l]={ paired,{},{ two.x or 0,two.y or 0,two.h or 0,two.v or 0} } + else + lookuplist[l]={ paired } + end + end + end + end + end + local slookups,mlookups + for tag,lookuplist in next,lookups do + if #lookuplist==1 then + if slookups then + slookups[tag]=lookuplist[1] + else + slookups={ [tag]=lookuplist[1] } + end + else + if mlookups then + mlookups[tag]=lookuplist + else + mlookups={ [tag]=lookuplist } + end + end + end + if slookups then + description.slookups=slookups + end + if mlookups then + description.mlookups=mlookups + end + end + end +end +actions["reorganize glyph anchors"]=function(data,filename,raw) + local descriptions=data.descriptions + for unicode,description in next,descriptions do + local anchors=description.glyph.anchors + if anchors then + for class,data in next,anchors do + if class=="baselig" then + for tag,specification in next,data do + for i=1,#specification do + local si=specification[i] + specification[i]={ si.x or 0,si.y or 0 } + end + end + else + for tag,specification in next,data do + data[tag]={ specification.x or 0,specification.y or 0 } + end + end + end + description.anchors=anchors + end + end +end +local bogusname=(P("uni")+P("u"))*R("AF","09")^4+(P("index")+P("glyph")+S("Ii")*P("dentity")*P(".")^0)*R("09")^1 +local uselessname=(1-bogusname)^0*bogusname +actions["purge names"]=function(data,filename,raw) + if purge_names then + local n=0 + for u,d in next,data.descriptions do + if lpegmatch(uselessname,d.name) then + n=n+1 + d.name=nil + end + end + if n>0 then + report_otf("%s bogus names removed",n) + end + end +end +actions["compact lookups"]=function(data,filename,raw) + if not compact_lookups then + report_otf("not compacting") + return + end + local last=0 + local tags=table.setmetatableindex({}, + function(t,k) + last=last+1 + t[k]=last + return last + end + ) + local descriptions=data.descriptions + local resources=data.resources + for u,d in next,descriptions do + local slookups=d.slookups + if type(slookups)=="table" then + local s={} + for k,v in next,slookups do + s[tags[k]]=v + end + d.slookups=s + end + local mlookups=d.mlookups + if type(mlookups)=="table" then + local m={} + for k,v in next,mlookups do + m[tags[k]]=v + end + d.mlookups=m + end + local kerns=d.kerns + if type(kerns)=="table" then + local t={} + for k,v in next,kerns do + t[tags[k]]=v + end + d.kerns=t + end + end + local lookups=data.lookups + if lookups then + local l={} + for k,v in next,lookups do + local rules=v.rules + if rules then + for i=1,#rules do + local l=rules[i].lookups + if type(l)=="table" then + for i=1,#l do + l[i]=tags[l[i]] + end + end + end + end + l[tags[k]]=v + end + data.lookups=l + end + local lookups=resources.lookups + if lookups then + local l={} + for k,v in next,lookups do + local s=v.subtables + if type(s)=="table" then + for i=1,#s do + s[i]=tags[s[i]] + end + end + l[tags[k]]=v + end + resources.lookups=l + end + local sequences=resources.sequences + if sequences then + for i=1,#sequences do + local s=sequences[i] + local n=s.name + if n then + s.name=tags[n] + end + local t=s.subtables + if type(t)=="table" then + for i=1,#t do + t[i]=tags[t[i]] + end + end + end + end + local lookuptypes=resources.lookuptypes + if lookuptypes then + local l={} + for k,v in next,lookuptypes do + l[tags[k]]=v + end + resources.lookuptypes=l + end + local anchor_to_lookup=resources.anchor_to_lookup + if anchor_to_lookup then + for anchor,lookups in next,anchor_to_lookup do + local l={} + for lookup,value in next,lookups do + l[tags[lookup]]=value + end + anchor_to_lookup[anchor]=l + end + end + local lookup_to_anchor=resources.lookup_to_anchor + if lookup_to_anchor then + local l={} + for lookup,value in next,lookup_to_anchor do + l[tags[lookup]]=value + end + resources.lookup_to_anchor=l + end + tags=table.swapped(tags) + report_otf("%s lookup tags compacted",#tags) + resources.lookuptags=tags +end +function otf.setfeatures(tfmdata,features) + local okay=constructors.initializefeatures("otf",tfmdata,features,trace_features,report_otf) + if okay then + return constructors.collectprocessors("otf",tfmdata,features,trace_features,report_otf) + else + return {} + end +end +local function copytotfm(data,cache_id) + if data then + local metadata=data.metadata + local warnings=data.warnings + local resources=data.resources + local properties=derivetable(data.properties) + local descriptions=derivetable(data.descriptions) + local goodies=derivetable(data.goodies) + local characters={} + local parameters={} + local mathparameters={} + local pfminfo=metadata.pfminfo or {} + local resources=data.resources + local unicodes=resources.unicodes + local spaceunits=500 + local spacer="space" + local designsize=metadata.designsize or metadata.design_size or 100 + local mathspecs=metadata.math + if designsize==0 then + designsize=100 + end + if mathspecs then + for name,value in next,mathspecs do + mathparameters[name]=value + end + end + for unicode,_ in next,data.descriptions do + characters[unicode]={} + end + if mathspecs then + for unicode,character in next,characters do + local d=descriptions[unicode] + local m=d.math + if m then + local variants=m.horiz_variants + local parts=m.horiz_parts + if variants then + local c=character + for i=1,#variants do + local un=variants[i] + c.next=un + c=characters[un] + end + c.horiz_variants=parts + elseif parts then + character.horiz_variants=parts + end + local variants=m.vert_variants + local parts=m.vert_parts + if variants then + local c=character + for i=1,#variants do + local un=variants[i] + c.next=un + c=characters[un] + end + c.vert_variants=parts + elseif parts then + character.vert_variants=parts + end + local italic_correction=m.vert_italic_correction + if italic_correction then + character.vert_italic_correction=italic_correction + end + local top_accent=m.top_accent + if top_accent then + character.top_accent=top_accent + end + local kerns=m.kerns + if kerns then + character.mathkerns=kerns + end + end + end + end + local filename=constructors.checkedfilename(resources) + local fontname=metadata.fontname + local fullname=metadata.fullname or fontname + local psname=fontname or fullname + local units=metadata.units_per_em or 1000 + if units==0 then + units=1000 + metadata.units_per_em=1000 + report_otf("changing %a units to %a",0,units) + end + local monospaced=metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose.proportion=="Monospaced") + local charwidth=pfminfo.avgwidth + local charxheight=pfminfo.os2_xheight and pfminfo.os2_xheight>0 and pfminfo.os2_xheight + local italicangle=metadata.italicangle + properties.monospaced=monospaced + parameters.italicangle=italicangle + parameters.charwidth=charwidth + parameters.charxheight=charxheight + local space=0x0020 + local emdash=0x2014 + if monospaced then + if descriptions[space] then + spaceunits,spacer=descriptions[space].width,"space" + end + if not spaceunits and descriptions[emdash] then + spaceunits,spacer=descriptions[emdash].width,"emdash" + end + if not spaceunits and charwidth then + spaceunits,spacer=charwidth,"charwidth" + end + else + if descriptions[space] then + spaceunits,spacer=descriptions[space].width,"space" + end + if not spaceunits and descriptions[emdash] then + spaceunits,spacer=descriptions[emdash].width/2,"emdash/2" + end + if not spaceunits and charwidth then + spaceunits,spacer=charwidth,"charwidth" + end + end + spaceunits=tonumber(spaceunits) or 500 + parameters.slant=0 + parameters.space=spaceunits + parameters.space_stretch=units/2 + parameters.space_shrink=1*units/3 + parameters.x_height=2*units/5 + parameters.quad=units + if spaceunits<2*units/5 then + end + if italicangle and italicangle~=0 then + parameters.italicangle=italicangle + parameters.italicfactor=math.cos(math.rad(90+italicangle)) + parameters.slant=- math.tan(italicangle*math.pi/180) + end + if monospaced then + parameters.space_stretch=0 + parameters.space_shrink=0 + elseif syncspace then + parameters.space_stretch=spaceunits/2 + parameters.space_shrink=spaceunits/3 + end + parameters.extra_space=parameters.space_shrink + if charxheight then + parameters.x_height=charxheight + else + local x=0x0078 + if x then + local x=descriptions[x] + if x then + parameters.x_height=x.height + end + end + end + parameters.designsize=(designsize/10)*65536 + parameters.ascender=abs(metadata.ascent or 0) + parameters.descender=abs(metadata.descent or 0) + parameters.units=units + properties.space=spacer + properties.encodingbytes=2 + properties.format=data.format or otf_format(filename) or formats.otf + properties.noglyphnames=true + properties.filename=filename + properties.fontname=fontname + properties.fullname=fullname + properties.psname=psname + properties.name=filename or fullname + if warnings and #warnings>0 then + report_otf("warnings for font: %s",filename) + report_otf() + for i=1,#warnings do + report_otf(" %s",warnings[i]) + end + report_otf() + end + return { + characters=characters, + descriptions=descriptions, + parameters=parameters, + mathparameters=mathparameters, + resources=resources, + properties=properties, + goodies=goodies, + warnings=warnings, + } + end +end +local function otftotfm(specification) + local cache_id=specification.hash + local tfmdata=containers.read(constructors.cache,cache_id) + if not tfmdata then + local name=specification.name + local sub=specification.sub + local filename=specification.filename + local features=specification.features.normal + local rawdata=otf.load(filename,sub,features and features.featurefile) + if rawdata and next(rawdata) then + local descriptions=rawdata.descriptions + local duplicates=rawdata.resources.duplicates + if duplicates then + local nofduplicates,nofduplicated=0,0 + for parent,list in next,duplicates do + if type(list)=="table" then + local n=#list + for i=1,n do + local unicode=list[i] + if not descriptions[unicode] then + descriptions[unicode]=descriptions[parent] + nofduplicated=nofduplicated+1 + end + end + nofduplicates=nofduplicates+n + else + if not descriptions[list] then + descriptions[list]=descriptions[parent] + nofduplicated=nofduplicated+1 + end + nofduplicates=nofduplicates+1 + end + end + if trace_otf and nofduplicated~=nofduplicates then + report_otf("%i extra duplicates copied out of %i",nofduplicated,nofduplicates) + end + end + rawdata.lookuphash={} + tfmdata=copytotfm(rawdata,cache_id) + if tfmdata and next(tfmdata) then + local features=constructors.checkedfeatures("otf",features) + local shared=tfmdata.shared + if not shared then + shared={} + tfmdata.shared=shared + end + shared.rawdata=rawdata + shared.dynamics={} + tfmdata.changed={} + shared.features=features + shared.processes=otf.setfeatures(tfmdata,features) + end + end + containers.write(constructors.cache,cache_id,tfmdata) + end + return tfmdata +end +local function read_from_otf(specification) + local tfmdata=otftotfm(specification) + if tfmdata then + tfmdata.properties.name=specification.name + tfmdata.properties.sub=specification.sub + tfmdata=constructors.scale(tfmdata,specification) + local allfeatures=tfmdata.shared.features or specification.features.normal + constructors.applymanipulators("otf",tfmdata,allfeatures,trace_features,report_otf) + constructors.setname(tfmdata,specification) + fonts.loggers.register(tfmdata,file.suffix(specification.filename),specification) + end + return tfmdata +end +local function checkmathsize(tfmdata,mathsize) + local mathdata=tfmdata.shared.rawdata.metadata.math + local mathsize=tonumber(mathsize) + if mathdata then + local parameters=tfmdata.parameters + parameters.scriptpercentage=mathdata.ScriptPercentScaleDown + parameters.scriptscriptpercentage=mathdata.ScriptScriptPercentScaleDown + parameters.mathsize=mathsize + end +end +registerotffeature { + name="mathsize", + description="apply mathsize specified in the font", + initializers={ + base=checkmathsize, + node=checkmathsize, + } +} +function otf.collectlookups(rawdata,kind,script,language) + local sequences=rawdata.resources.sequences + if sequences then + local featuremap,featurelist={},{} + for s=1,#sequences do + local sequence=sequences[s] + local features=sequence.features + features=features and features[kind] + features=features and (features[script] or features[default] or features[wildcard]) + features=features and (features[language] or features[default] or features[wildcard]) + if features then + local subtables=sequence.subtables + if subtables then + for s=1,#subtables do + local ss=subtables[s] + if not featuremap[s] then + featuremap[ss]=true + featurelist[#featurelist+1]=ss + end + end + end + end + end + if #featurelist>0 then + return featuremap,featurelist + end + end + return nil,nil +end +local function check_otf(forced,specification,suffix) + local name=specification.name + if forced then + name=specification.forcedname + end + local fullname=findbinfile(name,suffix) or "" + if fullname=="" then + fullname=fonts.names.getfilename(name,suffix) or "" + end + if fullname~="" and not fonts.names.ignoredfile(fullname) then + specification.filename=fullname + return read_from_otf(specification) + end +end +local function opentypereader(specification,suffix) + local forced=specification.forced or "" + if formats[forced] then + return check_otf(true,specification,forced) + else + return check_otf(false,specification,suffix) + end +end +readers.opentype=opentypereader +function readers.otf (specification) return opentypereader(specification,"otf") end +function readers.ttf (specification) return opentypereader(specification,"ttf") end +function readers.ttc (specification) return opentypereader(specification,"ttf") end +function readers.dfont(specification) return opentypereader(specification,"ttf") end +function otf.scriptandlanguage(tfmdata,attr) + local properties=tfmdata.properties + return properties.script or "dflt",properties.language or "dflt" +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-otb']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local concat=table.concat +local format,gmatch,gsub,find,match,lower,strip=string.format,string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip +local type,next,tonumber,tostring,rawget=type,next,tonumber,tostring,rawget +local lpegmatch=lpeg.match +local utfchar=utf.char +local trace_baseinit=false trackers.register("otf.baseinit",function(v) trace_baseinit=v end) +local trace_singles=false trackers.register("otf.singles",function(v) trace_singles=v end) +local trace_multiples=false trackers.register("otf.multiples",function(v) trace_multiples=v end) +local trace_alternatives=false trackers.register("otf.alternatives",function(v) trace_alternatives=v end) +local trace_ligatures=false trackers.register("otf.ligatures",function(v) trace_ligatures=v end) +local trace_ligatures_detail=false trackers.register("otf.ligatures.detail",function(v) trace_ligatures_detail=v end) +local trace_kerns=false trackers.register("otf.kerns",function(v) trace_kerns=v end) +local trace_preparing=false trackers.register("otf.preparing",function(v) trace_preparing=v end) +local report_prepare=logs.reporter("fonts","otf prepare") +local fonts=fonts +local otf=fonts.handlers.otf +local otffeatures=otf.features +local registerotffeature=otffeatures.register +otf.defaultbasealternate="none" +local wildcard="*" +local default="dflt" +local formatters=string.formatters +local f_unicode=formatters["%U"] +local f_uniname=formatters["%U (%s)"] +local f_unilist=formatters["% t (% t)"] +local function gref(descriptions,n) + if type(n)=="number" then + local name=descriptions[n].name + if name then + return f_uniname(n,name) + else + return f_unicode(n) + end + elseif n then + local num,nam,j={},{},0 + for i=1,#n do + local ni=n[i] + if tonumber(ni) then + j=j+1 + local di=descriptions[ni] + num[j]=f_unicode(ni) + nam[j]=di and di.name or "-" + end + end + return f_unilist(num,nam) + else + return "" + end +end +local function cref(feature,lookuptags,lookupname) + if lookupname then + return formatters["feature %a, lookup %a"](feature,lookuptags[lookupname]) + else + return formatters["feature %a"](feature) + end +end +local function report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,comment) + report_prepare("%s: base alternate %s => %s (%S => %S)", + cref(feature,lookuptags,lookupname), + gref(descriptions,unicode), + replacement and gref(descriptions,replacement), + value, + comment) +end +local function report_substitution(feature,lookuptags,lookupname,descriptions,unicode,substitution) + report_prepare("%s: base substitution %s => %S", + cref(feature,lookuptags,lookupname), + gref(descriptions,unicode), + gref(descriptions,substitution)) +end +local function report_ligature(feature,lookuptags,lookupname,descriptions,unicode,ligature) + report_prepare("%s: base ligature %s => %S", + cref(feature,lookuptags,lookupname), + gref(descriptions,ligature), + gref(descriptions,unicode)) +end +local function report_kern(feature,lookuptags,lookupname,descriptions,unicode,otherunicode,value) + report_prepare("%s: base kern %s + %s => %S", + cref(feature,lookuptags,lookupname), + gref(descriptions,unicode), + gref(descriptions,otherunicode), + value) +end +local basemethods={} +local basemethod="" +local function applybasemethod(what,...) + local m=basemethods[basemethod][what] + if m then + return m(...) + end +end +local basehash,basehashes,applied={},1,{} +local function registerbasehash(tfmdata) + local properties=tfmdata.properties + local hash=concat(applied," ") + local base=basehash[hash] + if not base then + basehashes=basehashes+1 + base=basehashes + basehash[hash]=base + end + properties.basehash=base + properties.fullname=properties.fullname.."-"..base + applied={} +end +local function registerbasefeature(feature,value) + applied[#applied+1]=feature.."="..tostring(value) +end +local trace=false +local function finalize_ligatures(tfmdata,ligatures) + local nofligatures=#ligatures + if nofligatures>0 then + local characters=tfmdata.characters + local descriptions=tfmdata.descriptions + local resources=tfmdata.resources + local unicodes=resources.unicodes + local private=resources.private + local alldone=false + while not alldone do + local done=0 + for i=1,nofligatures do + local ligature=ligatures[i] + if ligature then + local unicode,lookupdata=ligature[1],ligature[2] + if trace_ligatures_detail then + report_prepare("building % a into %a",lookupdata,unicode) + end + local size=#lookupdata + local firstcode=lookupdata[1] + local firstdata=characters[firstcode] + local okay=false + if firstdata then + local firstname="ctx_"..firstcode + for i=1,size-1 do + local firstdata=characters[firstcode] + if not firstdata then + firstcode=private + if trace_ligatures_detail then + report_prepare("defining %a as %a",firstname,firstcode) + end + unicodes[firstname]=firstcode + firstdata={ intermediate=true,ligatures={} } + characters[firstcode]=firstdata + descriptions[firstcode]={ name=firstname } + private=private+1 + end + local target + local secondcode=lookupdata[i+1] + local secondname=firstname.."_"..secondcode + if i==size-1 then + target=unicode + if not rawget(unicodes,secondname) then + unicodes[secondname]=unicode + end + okay=true + else + target=rawget(unicodes,secondname) + if not target then + break + end + end + if trace_ligatures_detail then + report_prepare("codes (%a,%a) + (%a,%a) -> %a",firstname,firstcode,secondname,secondcode,target) + end + local firstligs=firstdata.ligatures + if firstligs then + firstligs[secondcode]={ char=target } + else + firstdata.ligatures={ [secondcode]={ char=target } } + end + firstcode=target + firstname=secondname + end + elseif trace_ligatures_detail then + report_prepare("no glyph (%a,%a) for building %a",firstname,firstcode,target) + end + if okay then + ligatures[i]=false + done=done+1 + end + end + end + alldone=done==0 + end + if trace_ligatures_detail then + for k,v in table.sortedhash(characters) do + if v.ligatures then + table.print(v,k) + end + end + end + resources.private=private + return true + end +end +local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist) + local characters=tfmdata.characters + local descriptions=tfmdata.descriptions + local resources=tfmdata.resources + local properties=tfmdata.properties + local changed=tfmdata.changed + local lookuphash=resources.lookuphash + local lookuptypes=resources.lookuptypes + local lookuptags=resources.lookuptags + local ligatures={} + local alternate=tonumber(value) or true and 1 + local defaultalt=otf.defaultbasealternate + local trace_singles=trace_baseinit and trace_singles + local trace_alternatives=trace_baseinit and trace_alternatives + local trace_ligatures=trace_baseinit and trace_ligatures + local actions={ + substitution=function(lookupdata,lookuptags,lookupname,description,unicode) + if trace_singles then + report_substitution(feature,lookuptags,lookupname,descriptions,unicode,lookupdata) + end + changed[unicode]=lookupdata + end, + alternate=function(lookupdata,lookuptags,lookupname,description,unicode) + local replacement=lookupdata[alternate] + if replacement then + changed[unicode]=replacement + if trace_alternatives then + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"normal") + end + elseif defaultalt=="first" then + replacement=lookupdata[1] + changed[unicode]=replacement + if trace_alternatives then + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt) + end + elseif defaultalt=="last" then + replacement=lookupdata[#data] + if trace_alternatives then + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt) + end + else + if trace_alternatives then + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"unknown") + end + end + end, + ligature=function(lookupdata,lookuptags,lookupname,description,unicode) + if trace_ligatures then + report_ligature(feature,lookuptags,lookupname,descriptions,unicode,lookupdata) + end + ligatures[#ligatures+1]={ unicode,lookupdata } + end, + } + for unicode,character in next,characters do + local description=descriptions[unicode] + local lookups=description.slookups + if lookups then + for l=1,#lookuplist do + local lookupname=lookuplist[l] + local lookupdata=lookups[lookupname] + if lookupdata then + local lookuptype=lookuptypes[lookupname] + local action=actions[lookuptype] + if action then + action(lookupdata,lookuptags,lookupname,description,unicode) + end + end + end + end + local lookups=description.mlookups + if lookups then + for l=1,#lookuplist do + local lookupname=lookuplist[l] + local lookuplist=lookups[lookupname] + if lookuplist then + local lookuptype=lookuptypes[lookupname] + local action=actions[lookuptype] + if action then + for i=1,#lookuplist do + action(lookuplist[i],lookuptags,lookupname,description,unicode) + end + end + end + end + end + end + properties.hasligatures=finalize_ligatures(tfmdata,ligatures) +end +local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist) + local characters=tfmdata.characters + local descriptions=tfmdata.descriptions + local resources=tfmdata.resources + local properties=tfmdata.properties + local lookuptags=resources.lookuptags + local sharedkerns={} + local traceindeed=trace_baseinit and trace_kerns + local haskerns=false + for unicode,character in next,characters do + local description=descriptions[unicode] + local rawkerns=description.kerns + if rawkerns then + local s=sharedkerns[rawkerns] + if s==false then + elseif s then + character.kerns=s + else + local newkerns=character.kerns + local done=false + for l=1,#lookuplist do + local lookup=lookuplist[l] + local kerns=rawkerns[lookup] + if kerns then + for otherunicode,value in next,kerns do + if value==0 then + elseif not newkerns then + newkerns={ [otherunicode]=value } + done=true + if traceindeed then + report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,value) + end + elseif not newkerns[otherunicode] then + newkerns[otherunicode]=value + done=true + if traceindeed then + report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,value) + end + end + end + end + end + if done then + sharedkerns[rawkerns]=newkerns + character.kerns=newkerns + haskerns=true + else + sharedkerns[rawkerns]=false + end + end + end + end + properties.haskerns=haskerns +end +basemethods.independent={ + preparesubstitutions=preparesubstitutions, + preparepositionings=preparepositionings, +} +local function makefake(tfmdata,name,present) + local resources=tfmdata.resources + local private=resources.private + local character={ intermediate=true,ligatures={} } + resources.unicodes[name]=private + tfmdata.characters[private]=character + tfmdata.descriptions[private]={ name=name } + resources.private=private+1 + present[name]=private + return character +end +local function make_1(present,tree,name) + for k,v in next,tree do + if k=="ligature" then + present[name]=v + else + make_1(present,v,name.."_"..k) + end + end +end +local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,done,lookuptags,lookupname) + for k,v in next,tree do + if k=="ligature" then + local character=characters[preceding] + if not character then + if trace_baseinit then + report_prepare("weird ligature in lookup %a, current %C, preceding %C",lookuptags[lookupname],v,preceding) + end + character=makefake(tfmdata,name,present) + end + local ligatures=character.ligatures + if ligatures then + ligatures[unicode]={ char=v } + else + character.ligatures={ [unicode]={ char=v } } + end + if done then + local d=done[lookupname] + if not d then + done[lookupname]={ "dummy",v } + else + d[#d+1]=v + end + end + else + local code=present[name] or unicode + local name=name.."_"..k + make_2(present,tfmdata,characters,v,name,code,k,done,lookuptags,lookupname) + end + end +end +local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist) + local characters=tfmdata.characters + local descriptions=tfmdata.descriptions + local resources=tfmdata.resources + local changed=tfmdata.changed + local lookuphash=resources.lookuphash + local lookuptypes=resources.lookuptypes + local lookuptags=resources.lookuptags + local ligatures={} + local alternate=tonumber(value) or true and 1 + local defaultalt=otf.defaultbasealternate + local trace_singles=trace_baseinit and trace_singles + local trace_alternatives=trace_baseinit and trace_alternatives + local trace_ligatures=trace_baseinit and trace_ligatures + for l=1,#lookuplist do + local lookupname=lookuplist[l] + local lookupdata=lookuphash[lookupname] + local lookuptype=lookuptypes[lookupname] + for unicode,data in next,lookupdata do + if lookuptype=="substitution" then + if trace_singles then + report_substitution(feature,lookuptags,lookupname,descriptions,unicode,data) + end + changed[unicode]=data + elseif lookuptype=="alternate" then + local replacement=data[alternate] + if replacement then + changed[unicode]=replacement + if trace_alternatives then + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"normal") + end + elseif defaultalt=="first" then + replacement=data[1] + changed[unicode]=replacement + if trace_alternatives then + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt) + end + elseif defaultalt=="last" then + replacement=data[#data] + if trace_alternatives then + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt) + end + else + if trace_alternatives then + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"unknown") + end + end + elseif lookuptype=="ligature" then + ligatures[#ligatures+1]={ unicode,data,lookupname } + if trace_ligatures then + report_ligature(feature,lookuptags,lookupname,descriptions,unicode,data) + end + end + end + end + local nofligatures=#ligatures + if nofligatures>0 then + local characters=tfmdata.characters + local present={} + local done=trace_baseinit and trace_ligatures and {} + for i=1,nofligatures do + local ligature=ligatures[i] + local unicode,tree=ligature[1],ligature[2] + make_1(present,tree,"ctx_"..unicode) + end + for i=1,nofligatures do + local ligature=ligatures[i] + local unicode,tree,lookupname=ligature[1],ligature[2],ligature[3] + make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,lookuptags,lookupname) + end + end +end +local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist) + local characters=tfmdata.characters + local descriptions=tfmdata.descriptions + local resources=tfmdata.resources + local properties=tfmdata.properties + local lookuphash=resources.lookuphash + local lookuptags=resources.lookuptags + local traceindeed=trace_baseinit and trace_kerns + for l=1,#lookuplist do + local lookupname=lookuplist[l] + local lookupdata=lookuphash[lookupname] + for unicode,data in next,lookupdata do + local character=characters[unicode] + local kerns=character.kerns + if not kerns then + kerns={} + character.kerns=kerns + end + if traceindeed then + for otherunicode,kern in next,data do + if not kerns[otherunicode] and kern~=0 then + kerns[otherunicode]=kern + report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,kern) + end + end + else + for otherunicode,kern in next,data do + if not kerns[otherunicode] and kern~=0 then + kerns[otherunicode]=kern + end + end + end + end + end +end +local function initializehashes(tfmdata) + nodeinitializers.features(tfmdata) +end +basemethods.shared={ + initializehashes=initializehashes, + preparesubstitutions=preparesubstitutions, + preparepositionings=preparepositionings, +} +basemethod="independent" +local function featuresinitializer(tfmdata,value) + if true then + local starttime=trace_preparing and os.clock() + local features=tfmdata.shared.features + local fullname=tfmdata.properties.fullname or "?" + if features then + applybasemethod("initializehashes",tfmdata) + local collectlookups=otf.collectlookups + local rawdata=tfmdata.shared.rawdata + local properties=tfmdata.properties + local script=properties.script + local language=properties.language + local basesubstitutions=rawdata.resources.features.gsub + local basepositionings=rawdata.resources.features.gpos + if basesubstitutions or basepositionings then + local sequences=tfmdata.resources.sequences + for s=1,#sequences do + local sequence=sequences[s] + local sfeatures=sequence.features + if sfeatures then + local order=sequence.order + if order then + for i=1,#order do + local feature=order[i] + local value=features[feature] + if value then + local validlookups,lookuplist=collectlookups(rawdata,feature,script,language) + if not validlookups then + elseif basesubstitutions and basesubstitutions[feature] then + if trace_preparing then + report_prepare("filtering base %s feature %a for %a with value %a","sub",feature,fullname,value) + end + applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist) + registerbasefeature(feature,value) + elseif basepositionings and basepositionings[feature] then + if trace_preparing then + report_prepare("filtering base %a feature %a for %a with value %a","pos",feature,fullname,value) + end + applybasemethod("preparepositionings",tfmdata,feature,value,validlookups,lookuplist) + registerbasefeature(feature,value) + end + end + end + end + end + end + end + registerbasehash(tfmdata) + end + if trace_preparing then + report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,fullname) + end + end +end +registerotffeature { + name="features", + description="features", + default=true, + initializers={ + base=featuresinitializer, + } +} +directives.register("fonts.otf.loader.basemethod",function(v) + if basemethods[v] then + basemethod=v + end +end) + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['node-inj']={ + version=1.001, + comment="companion to node-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files", +} +local next=next +local utfchar=utf.char +local trace_injections=false trackers.register("nodes.injections",function(v) trace_injections=v end) +local report_injections=logs.reporter("nodes","injections") +local attributes,nodes,node=attributes,nodes,node +fonts=fonts +local fontdata=fonts.hashes.identifiers +nodes.injections=nodes.injections or {} +local injections=nodes.injections +local nodecodes=nodes.nodecodes +local glyph_code=nodecodes.glyph +local kern_code=nodecodes.kern +local nuts=nodes.nuts +local nodepool=nuts.pool +local newkern=nodepool.kern +local tonode=nuts.tonode +local tonut=nuts.tonut +local getfield=nuts.getfield +local getnext=nuts.getnext +local getprev=nuts.getprev +local getid=nuts.getid +local getattr=nuts.getattr +local getfont=nuts.getfont +local getsubtype=nuts.getsubtype +local getchar=nuts.getchar +local setfield=nuts.setfield +local setattr=nuts.setattr +local traverse_id=nuts.traverse_id +local insert_node_before=nuts.insert_before +local insert_node_after=nuts.insert_after +local a_kernpair=attributes.private('kernpair') +local a_ligacomp=attributes.private('ligacomp') +local a_markbase=attributes.private('markbase') +local a_markmark=attributes.private('markmark') +local a_markdone=attributes.private('markdone') +local a_cursbase=attributes.private('cursbase') +local a_curscurs=attributes.private('curscurs') +local a_cursdone=attributes.private('cursdone') +local unsetvalue=attributes.unsetvalue +function injections.installnewkern(nk) + newkern=nk or newkern +end +local cursives={} +local marks={} +local kerns={} +function injections.reset(n) +end +function injections.setligaindex(n,index) + setattr(n,a_ligacomp,index) +end +function injections.getligaindex(n,default) + return getattr(n,a_ligacomp) or default +end +function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext) + local dx,dy=factor*(exit[1]-entry[1]),factor*(exit[2]-entry[2]) + local ws,wn=tfmstart.width,tfmnext.width + local bound=#cursives+1 + setattr(start,a_cursbase,bound) + setattr(nxt,a_curscurs,bound) + cursives[bound]={ rlmode,dx,dy,ws,wn } + return dx,dy,bound +end +function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr) + local x,y,w,h=factor*spec[1],factor*spec[2],factor*spec[3],factor*spec[4] + if x~=0 or w~=0 or y~=0 or h~=0 then + local bound=getattr(current,a_kernpair) + if bound then + local kb=kerns[bound] + kb[2],kb[3],kb[4],kb[5]=(kb[2] or 0)+x,(kb[3] or 0)+y,(kb[4] or 0)+w,(kb[5] or 0)+h + else + bound=#kerns+1 + setattr(current,a_kernpair,bound) + kerns[bound]={ rlmode,x,y,w,h,r2lflag,tfmchr.width } + end + return x,y,w,h,bound + end + return x,y,w,h +end +function injections.setkern(current,factor,rlmode,x,tfmchr) + local dx=factor*x + if dx~=0 then + local bound=#kerns+1 + setattr(current,a_kernpair,bound) + kerns[bound]={ rlmode,dx } + return dx,bound + else + return 0,0 + end +end +function injections.setmark(start,base,factor,rlmode,ba,ma) + local dx,dy=factor*(ba[1]-ma[1]),factor*(ba[2]-ma[2]) + local bound=getattr(base,a_markbase) + local index=1 + if bound then + local mb=marks[bound] + if mb then + index=#mb+1 + mb[index]={ dx,dy,rlmode } + setattr(start,a_markmark,bound) + setattr(start,a_markdone,index) + return dx,dy,bound + else + report_injections("possible problem, %U is base mark without data (id %a)",getchar(base),bound) + end + end + index=index or 1 + bound=#marks+1 + setattr(base,a_markbase,bound) + setattr(start,a_markmark,bound) + setattr(start,a_markdone,index) + marks[bound]={ [index]={ dx,dy,rlmode } } + return dx,dy,bound +end +local function dir(n) + return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset" +end +local function trace(head) + report_injections("begin run") + for n in traverse_id(glyph_code,head) do + if getsubtype(n)<256 then + local kp=getattr(n,a_kernpair) + local mb=getattr(n,a_markbase) + local mm=getattr(n,a_markmark) + local md=getattr(n,a_markdone) + local cb=getattr(n,a_cursbase) + local cc=getattr(n,a_curscurs) + local char=getchar(n) + report_injections("font %s, char %U, glyph %c",getfont(n),char,char) + if kp then + local k=kerns[kp] + if k[3] then + report_injections(" pairkern: dir %a, x %p, y %p, w %p, h %p",dir(k[1]),k[2],k[3],k[4],k[5]) + else + report_injections(" kern: dir %a, dx %p",dir(k[1]),k[2]) + end + end + if mb then + report_injections(" markbase: bound %a",mb) + end + if mm then + local m=marks[mm] + if mb then + local m=m[mb] + if m then + report_injections(" markmark: bound %a, index %a, dx %p, dy %p",mm,md,m[1],m[2]) + else + report_injections(" markmark: bound %a, missing index",mm) + end + else + m=m[1] + report_injections(" markmark: bound %a, dx %p, dy %p",mm,m and m[1],m and m[2]) + end + end + if cb then + report_injections(" cursbase: bound %a",cb) + end + if cc then + local c=cursives[cc] + report_injections(" curscurs: bound %a, dir %a, dx %p, dy %p",cc,dir(c[1]),c[2],c[3]) + end + end + end + report_injections("end run") +end +local function show_result(head) + local current=head + local skipping=false + while current do + local id=getid(current) + if id==glyph_code then + report_injections("char: %C, width %p, xoffset %p, yoffset %p", + getchar(current),getfield(current,"width"),getfield(current,"xoffset"),getfield(current,"yoffset")) + skipping=false + elseif id==kern_code then + report_injections("kern: %p",getfield(current,"kern")) + skipping=false + elseif not skipping then + report_injections() + skipping=true + end + current=getnext(current) + end +end +function injections.handler(head,where,keep) + head=tonut(head) + local has_marks,has_cursives,has_kerns=next(marks),next(cursives),next(kerns) + if has_marks or has_cursives then + if trace_injections then + trace(head) + end + local done,ky,rl,valid,cx,wx,mk,nofvalid=false,{},{},{},{},{},{},0 + if has_kerns then + local nf,tm=nil,nil + for n in traverse_id(glyph_code,head) do + if getsubtype(n)<256 then + nofvalid=nofvalid+1 + valid[nofvalid]=n + local f=getfont(n) + if f~=nf then + nf=f + tm=fontdata[nf].resources.marks + end + if tm then + mk[n]=tm[getchar(n)] + end + local k=getattr(n,a_kernpair) + if k then + local kk=kerns[k] + if kk then + local x,y,w,h=kk[2] or 0,kk[3] or 0,kk[4] or 0,kk[5] or 0 + local dy=y-h + if dy~=0 then + ky[n]=dy + end + if w~=0 or x~=0 then + wx[n]=kk + end + rl[n]=kk[1] + end + end + end + end + else + local nf,tm=nil,nil + for n in traverse_id(glyph_code,head) do + if getsubtype(n)<256 then + nofvalid=nofvalid+1 + valid[nofvalid]=n + local f=getfont(n) + if f~=nf then + nf=f + tm=fontdata[nf].resources.marks + end + if tm then + mk[n]=tm[getchar(n)] + end + end + end + end + if nofvalid>0 then + local cx={} + if has_kerns and next(ky) then + for n,k in next,ky do + setfield(n,"yoffset",k) + end + end + if has_cursives then + local p_cursbase,p=nil,nil + local t,d,maxt={},{},0 + for i=1,nofvalid do + local n=valid[i] + if not mk[n] then + local n_cursbase=getattr(n,a_cursbase) + if p_cursbase then + local n_curscurs=getattr(n,a_curscurs) + if p_cursbase==n_curscurs then + local c=cursives[n_curscurs] + if c then + local rlmode,dx,dy,ws,wn=c[1],c[2],c[3],c[4],c[5] + if rlmode>=0 then + dx=dx-ws + else + dx=dx+wn + end + if dx~=0 then + cx[n]=dx + rl[n]=rlmode + end + dy=-dy + maxt=maxt+1 + t[maxt]=p + d[maxt]=dy + else + maxt=0 + end + end + elseif maxt>0 then + local ny=getfield(n,"yoffset") + for i=maxt,1,-1 do + ny=ny+d[i] + local ti=t[i] + setfield(ti,"yoffset",getfield(ti,"yoffset")+ny) + end + maxt=0 + end + if not n_cursbase and maxt>0 then + local ny=getfield(n,"yoffset") + for i=maxt,1,-1 do + ny=ny+d[i] + local ti=t[i] + setfield(ti,"yoffset",ny) + end + maxt=0 + end + p_cursbase,p=n_cursbase,n + end + end + if maxt>0 then + local ny=getfield(n,"yoffset") + for i=maxt,1,-1 do + ny=ny+d[i] + local ti=t[i] + setfield(ti,"yoffset",ny) + end + maxt=0 + end + if not keep then + cursives={} + end + end + if has_marks then + for i=1,nofvalid do + local p=valid[i] + local p_markbase=getattr(p,a_markbase) + if p_markbase then + local mrks=marks[p_markbase] + local nofmarks=#mrks + for n in traverse_id(glyph_code,getnext(p)) do + local n_markmark=getattr(n,a_markmark) + if p_markbase==n_markmark then + local index=getattr(n,a_markdone) or 1 + local d=mrks[index] + if d then + local rlmode=d[3] + local k=wx[p] + local px=getfield(p,"xoffset") + local ox=0 + if k then + local x=k[2] + local w=k[4] + if w then + if rlmode and rlmode>=0 then + ox=px-getfield(p,"width")+d[1]-(w-x) + else + ox=px-d[1]-x + end + else + if rlmode and rlmode>=0 then + ox=px-getfield(p,"width")+d[1] + else + ox=px-d[1]-x + end + end + else + local wp=getfield(p,"width") + local wn=getfield(n,"width") + if rlmode and rlmode>=0 then + ox=px-wp+d[1] + else + ox=px-d[1] + end + if wn~=0 then + insert_node_before(head,n,newkern(-wn/2)) + insert_node_after(head,n,newkern(-wn/2)) + end + end + setfield(n,"xoffset",ox) + local py=getfield(p,"yoffset") + local oy=0 + if mk[p] then + oy=py+d[2] + else + oy=getfield(n,"yoffset")+py+d[2] + end + setfield(n,"yoffset",oy) + if nofmarks==1 then + break + else + nofmarks=nofmarks-1 + end + end + elseif not n_markmark then + break + else + end + end + end + end + if not keep then + marks={} + end + end + if next(wx) then + for n,k in next,wx do + local x=k[2] + local w=k[4] + if w then + local rl=k[1] + local wx=w-x + if rl<0 then + if wx~=0 then + insert_node_before(head,n,newkern(wx)) + end + if x~=0 then + insert_node_after (head,n,newkern(x)) + end + else + if x~=0 then + insert_node_before(head,n,newkern(x)) + end + if wx~=0 then + insert_node_after (head,n,newkern(wx)) + end + end + elseif x~=0 then + insert_node_before(head,n,newkern(x)) + end + end + end + if next(cx) then + for n,k in next,cx do + if k~=0 then + local rln=rl[n] + if rln and rln<0 then + insert_node_before(head,n,newkern(-k)) + else + insert_node_before(head,n,newkern(k)) + end + end + end + end + if not keep then + kerns={} + end + return tonode(head),true + elseif not keep then + kerns,cursives,marks={},{},{} + end + elseif has_kerns then + if trace_injections then + trace(head) + end + for n in traverse_id(glyph_code,head) do + if getsubtype(n)<256 then + local k=getattr(n,a_kernpair) + if k then + local kk=kerns[k] + if kk then + local rl,x,y,w=kk[1],kk[2] or 0,kk[3],kk[4] + if y and y~=0 then + setfield(n,"yoffset",y) + end + if w then + local wx=w-x + if rl<0 then + if wx~=0 then + insert_node_before(head,n,newkern(wx)) + end + if x~=0 then + insert_node_after (head,n,newkern(x)) + end + else + if x~=0 then + insert_node_before(head,n,newkern(x)) + end + if wx~=0 then + insert_node_after(head,n,newkern(wx)) + end + end + else + if x~=0 then + insert_node_before(head,n,newkern(x)) + end + end + end + end + end + end + if not keep then + kerns={} + end + return tonode(head),true + else + end + return tonode(head),false +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-otx']={ + version=1.001, + comment="companion to font-otf.lua (analysing)", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local type=type +if not trackers then trackers={ register=function() end } end +local fonts,nodes,node=fonts,nodes,node +local allocate=utilities.storage.allocate +local otf=fonts.handlers.otf +local analyzers=fonts.analyzers +local initializers=allocate() +local methods=allocate() +analyzers.initializers=initializers +analyzers.methods=methods +analyzers.useunicodemarks=false +local a_state=attributes.private('state') +local nuts=nodes.nuts +local tonut=nuts.tonut +local getfield=nuts.getfield +local getnext=nuts.getnext +local getprev=nuts.getprev +local getid=nuts.getid +local getprop=nuts.getprop +local setprop=nuts.setprop +local getfont=nuts.getfont +local getsubtype=nuts.getsubtype +local getchar=nuts.getchar +local traverse_id=nuts.traverse_id +local traverse_node_list=nuts.traverse +local end_of_math=nuts.end_of_math +local nodecodes=nodes.nodecodes +local glyph_code=nodecodes.glyph +local disc_code=nodecodes.disc +local math_code=nodecodes.math +local fontdata=fonts.hashes.identifiers +local categories=characters and characters.categories or {} +local otffeatures=fonts.constructors.newfeatures("otf") +local registerotffeature=otffeatures.register +local s_init=1 local s_rphf=7 +local s_medi=2 local s_half=8 +local s_fina=3 local s_pref=9 +local s_isol=4 local s_blwf=10 +local s_mark=5 local s_pstf=11 +local s_rest=6 +local states={ + init=s_init, + medi=s_medi, + fina=s_fina, + isol=s_isol, + mark=s_mark, + rest=s_rest, + rphf=s_rphf, + half=s_half, + pref=s_pref, + blwf=s_blwf, + pstf=s_pstf, +} +local features={ + init=s_init, + medi=s_medi, + fina=s_fina, + isol=s_isol, + rphf=s_rphf, + half=s_half, + pref=s_pref, + blwf=s_blwf, + pstf=s_pstf, +} +analyzers.states=states +analyzers.features=features +function analyzers.setstate(head,font) + local useunicodemarks=analyzers.useunicodemarks + local tfmdata=fontdata[font] + local descriptions=tfmdata.descriptions + local first,last,current,n,done=nil,nil,head,0,false + current=tonut(current) + while current do + local id=getid(current) + if id==glyph_code and getfont(current)==font then + done=true + local char=getchar(current) + local d=descriptions[char] + if d then + if d.class=="mark" or (useunicodemarks and categories[char]=="mn") then + done=true + setprop(current,a_state,s_mark) + elseif n==0 then + first,last,n=current,current,1 + setprop(current,a_state,s_init) + else + last,n=current,n+1 + setprop(current,a_state,s_medi) + end + else + if first and first==last then + setprop(last,a_state,s_isol) + elseif last then + setprop(last,a_state,s_fina) + end + first,last,n=nil,nil,0 + end + elseif id==disc_code then + setprop(current,a_state,s_medi) + last=current + else + if first and first==last then + setprop(last,a_state,s_isol) + elseif last then + setprop(last,a_state,s_fina) + end + first,last,n=nil,nil,0 + if id==math_code then + current=end_of_math(current) + end + end + current=getnext(current) + end + if first and first==last then + setprop(last,a_state,s_isol) + elseif last then + setprop(last,a_state,s_fina) + end + return head,done +end +local function analyzeinitializer(tfmdata,value) + local script,language=otf.scriptandlanguage(tfmdata) + local action=initializers[script] + if not action then + elseif type(action)=="function" then + return action(tfmdata,value) + else + local action=action[language] + if action then + return action(tfmdata,value) + end + end +end +local function analyzeprocessor(head,font,attr) + local tfmdata=fontdata[font] + local script,language=otf.scriptandlanguage(tfmdata,attr) + local action=methods[script] + if not action then + elseif type(action)=="function" then + return action(head,font,attr) + else + action=action[language] + if action then + return action(head,font,attr) + end + end + return head,false +end +registerotffeature { + name="analyze", + description="analysis of character classes", + default=true, + initializers={ + node=analyzeinitializer, + }, + processors={ + position=1, + node=analyzeprocessor, + } +} +methods.latn=analyzers.setstate +local tatweel=0x0640 +local zwnj=0x200C +local zwj=0x200D +local isolated={ + [0x0600]=true,[0x0601]=true,[0x0602]=true,[0x0603]=true, + [0x0604]=true, + [0x0608]=true,[0x060B]=true,[0x0621]=true,[0x0674]=true, + [0x06DD]=true, + [0x0856]=true,[0x0858]=true,[0x0857]=true, + [0x07FA]=true, + [zwnj]=true, + [0x08AD]=true, +} +local final={ + [0x0622]=true,[0x0623]=true,[0x0624]=true,[0x0625]=true, + [0x0627]=true,[0x0629]=true,[0x062F]=true,[0x0630]=true, + [0x0631]=true,[0x0632]=true,[0x0648]=true,[0x0671]=true, + [0x0672]=true,[0x0673]=true,[0x0675]=true,[0x0676]=true, + [0x0677]=true,[0x0688]=true,[0x0689]=true,[0x068A]=true, + [0x068B]=true,[0x068C]=true,[0x068D]=true,[0x068E]=true, + [0x068F]=true,[0x0690]=true,[0x0691]=true,[0x0692]=true, + [0x0693]=true,[0x0694]=true,[0x0695]=true,[0x0696]=true, + [0x0697]=true,[0x0698]=true,[0x0699]=true,[0x06C0]=true, + [0x06C3]=true,[0x06C4]=true,[0x06C5]=true,[0x06C6]=true, + [0x06C7]=true,[0x06C8]=true,[0x06C9]=true,[0x06CA]=true, + [0x06CB]=true,[0x06CD]=true,[0x06CF]=true,[0x06D2]=true, + [0x06D3]=true,[0x06D5]=true,[0x06EE]=true,[0x06EF]=true, + [0x0759]=true,[0x075A]=true,[0x075B]=true,[0x076B]=true, + [0x076C]=true,[0x0771]=true,[0x0773]=true,[0x0774]=true, + [0x0778]=true,[0x0779]=true, + [0x08AA]=true,[0x08AB]=true,[0x08AC]=true, + [0xFEF5]=true,[0xFEF7]=true,[0xFEF9]=true,[0xFEFB]=true, + [0x0710]=true,[0x0715]=true,[0x0716]=true,[0x0717]=true, + [0x0718]=true,[0x0719]=true,[0x0728]=true,[0x072A]=true, + [0x072C]=true,[0x071E]=true, + [0x072F]=true,[0x074D]=true, + [0x0840]=true,[0x0849]=true,[0x0854]=true,[0x0846]=true, + [0x084F]=true, + [0x08AE]=true,[0x08B1]=true,[0x08B2]=true, +} +local medial={ + [0x0626]=true,[0x0628]=true,[0x062A]=true,[0x062B]=true, + [0x062C]=true,[0x062D]=true,[0x062E]=true,[0x0633]=true, + [0x0634]=true,[0x0635]=true,[0x0636]=true,[0x0637]=true, + [0x0638]=true,[0x0639]=true,[0x063A]=true,[0x063B]=true, + [0x063C]=true,[0x063D]=true,[0x063E]=true,[0x063F]=true, + [0x0641]=true,[0x0642]=true,[0x0643]=true, + [0x0644]=true,[0x0645]=true,[0x0646]=true,[0x0647]=true, + [0x0649]=true,[0x064A]=true,[0x066E]=true,[0x066F]=true, + [0x0678]=true,[0x0679]=true,[0x067A]=true,[0x067B]=true, + [0x067C]=true,[0x067D]=true,[0x067E]=true,[0x067F]=true, + [0x0680]=true,[0x0681]=true,[0x0682]=true,[0x0683]=true, + [0x0684]=true,[0x0685]=true,[0x0686]=true,[0x0687]=true, + [0x069A]=true,[0x069B]=true,[0x069C]=true,[0x069D]=true, + [0x069E]=true,[0x069F]=true,[0x06A0]=true,[0x06A1]=true, + [0x06A2]=true,[0x06A3]=true,[0x06A4]=true,[0x06A5]=true, + [0x06A6]=true,[0x06A7]=true,[0x06A8]=true,[0x06A9]=true, + [0x06AA]=true,[0x06AB]=true,[0x06AC]=true,[0x06AD]=true, + [0x06AE]=true,[0x06AF]=true,[0x06B0]=true,[0x06B1]=true, + [0x06B2]=true,[0x06B3]=true,[0x06B4]=true,[0x06B5]=true, + [0x06B6]=true,[0x06B7]=true,[0x06B8]=true,[0x06B9]=true, + [0x06BA]=true,[0x06BB]=true,[0x06BC]=true,[0x06BD]=true, + [0x06BE]=true,[0x06BF]=true,[0x06C1]=true,[0x06C2]=true, + [0x06CC]=true,[0x06CE]=true,[0x06D0]=true,[0x06D1]=true, + [0x06FA]=true,[0x06FB]=true,[0x06FC]=true,[0x06FF]=true, + [0x0750]=true,[0x0751]=true,[0x0752]=true,[0x0753]=true, + [0x0754]=true,[0x0755]=true,[0x0756]=true,[0x0757]=true, + [0x0758]=true,[0x075C]=true,[0x075D]=true,[0x075E]=true, + [0x075F]=true,[0x0760]=true,[0x0761]=true,[0x0762]=true, + [0x0763]=true,[0x0764]=true,[0x0765]=true,[0x0766]=true, + [0x0767]=true,[0x0768]=true,[0x0769]=true,[0x076A]=true, + [0x076D]=true,[0x076E]=true,[0x076F]=true,[0x0770]=true, + [0x0772]=true,[0x0775]=true,[0x0776]=true,[0x0777]=true, + [0x077A]=true,[0x077B]=true,[0x077C]=true,[0x077D]=true, + [0x077E]=true,[0x077F]=true, + [0x08A0]=true,[0x08A2]=true,[0x08A4]=true,[0x08A5]=true, + [0x08A6]=true,[0x0620]=true,[0x08A8]=true,[0x08A9]=true, + [0x08A7]=true,[0x08A3]=true, + [0x0712]=true,[0x0713]=true,[0x0714]=true,[0x071A]=true, + [0x071B]=true,[0x071C]=true,[0x071D]=true,[0x071F]=true, + [0x0720]=true,[0x0721]=true,[0x0722]=true,[0x0723]=true, + [0x0724]=true,[0x0725]=true,[0x0726]=true,[0x0727]=true, + [0x0729]=true,[0x072B]=true,[0x072D]=true,[0x072E]=true, + [0x074E]=true,[0x074F]=true, + [0x0841]=true,[0x0842]=true,[0x0843]=true,[0x0844]=true, + [0x0845]=true,[0x0847]=true,[0x0848]=true,[0x0855]=true, + [0x0851]=true,[0x084E]=true,[0x084D]=true,[0x084A]=true, + [0x084B]=true,[0x084C]=true,[0x0850]=true,[0x0852]=true, + [0x0853]=true, + [0x07D7]=true,[0x07E8]=true,[0x07D9]=true,[0x07EA]=true, + [0x07CA]=true,[0x07DB]=true,[0x07CC]=true,[0x07DD]=true, + [0x07CE]=true,[0x07DF]=true,[0x07D4]=true,[0x07E5]=true, + [0x07E9]=true,[0x07E7]=true,[0x07E3]=true,[0x07E2]=true, + [0x07E0]=true,[0x07E1]=true,[0x07DE]=true,[0x07DC]=true, + [0x07D1]=true,[0x07DA]=true,[0x07D8]=true,[0x07D6]=true, + [0x07D2]=true,[0x07D0]=true,[0x07CF]=true,[0x07CD]=true, + [0x07CB]=true,[0x07D3]=true,[0x07E4]=true,[0x07D5]=true, + [0x07E6]=true, + [tatweel]=true,[zwj]=true, + [0x08A1]=true,[0x08AF]=true,[0x08B0]=true, +} +local arab_warned={} +local function warning(current,what) + local char=getchar(current) + if not arab_warned[char] then + log.report("analyze","arab: character %C has no %a class",char,what) + arab_warned[char]=true + end +end +local function finish(first,last) + if last then + if first==last then + local fc=getchar(first) + if medial[fc] or final[fc] then + setprop(first,a_state,s_isol) + else + warning(first,"isol") + setprop(first,a_state,s_error) + end + else + local lc=getchar(last) + if medial[lc] or final[lc] then + setprop(last,a_state,s_fina) + else + warning(last,"fina") + setprop(last,a_state,s_error) + end + end + first,last=nil,nil + elseif first then + local fc=getchar(first) + if medial[fc] or final[fc] then + setprop(first,a_state,s_isol) + else + warning(first,"isol") + setprop(first,a_state,s_error) + end + first=nil + end + return first,last +end +function methods.arab(head,font,attr) + local useunicodemarks=analyzers.useunicodemarks + local tfmdata=fontdata[font] + local marks=tfmdata.resources.marks + local first,last,current,done=nil,nil,head,false + current=tonut(current) + while current do + local id=getid(current) + if id==glyph_code and getfont(current)==font and getsubtype(current)<256 and not getprop(current,a_state) then + done=true + local char=getchar(current) + if marks[char] or (useunicodemarks and categories[char]=="mn") then + setprop(current,a_state,s_mark) + elseif isolated[char] then + first,last=finish(first,last) + setprop(current,a_state,s_isol) + first,last=nil,nil + elseif not first then + if medial[char] then + setprop(current,a_state,s_init) + first,last=first or current,current + elseif final[char] then + setprop(current,a_state,s_isol) + first,last=nil,nil + else + first,last=finish(first,last) + end + elseif medial[char] then + first,last=first or current,current + setprop(current,a_state,s_medi) + elseif final[char] then + if getprop(last,a_state)~=s_init then + setprop(last,a_state,s_medi) + end + setprop(current,a_state,s_fina) + first,last=nil,nil + elseif char>=0x0600 and char<=0x06FF then + setprop(current,a_state,s_rest) + first,last=finish(first,last) + else + first,last=finish(first,last) + end + else + if first or last then + first,last=finish(first,last) + end + if id==math_code then + current=end_of_math(current) + end + end + current=getnext(current) + end + if first or last then + finish(first,last) + end + return head,done +end +methods.syrc=methods.arab +methods.mand=methods.arab +methods.nko=methods.arab +directives.register("otf.analyze.useunicodemarks",function(v) + analyzers.useunicodemarks=v +end) + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-otn']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files", +} +local concat,insert,remove=table.concat,table.insert,table.remove +local gmatch,gsub,find,match,lower,strip=string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip +local type,next,tonumber,tostring=type,next,tonumber,tostring +local lpegmatch=lpeg.match +local random=math.random +local formatters=string.formatters +local logs,trackers,nodes,attributes=logs,trackers,nodes,attributes +local registertracker=trackers.register +local fonts=fonts +local otf=fonts.handlers.otf +local trace_lookups=false registertracker("otf.lookups",function(v) trace_lookups=v end) +local trace_singles=false registertracker("otf.singles",function(v) trace_singles=v end) +local trace_multiples=false registertracker("otf.multiples",function(v) trace_multiples=v end) +local trace_alternatives=false registertracker("otf.alternatives",function(v) trace_alternatives=v end) +local trace_ligatures=false registertracker("otf.ligatures",function(v) trace_ligatures=v end) +local trace_contexts=false registertracker("otf.contexts",function(v) trace_contexts=v end) +local trace_marks=false registertracker("otf.marks",function(v) trace_marks=v end) +local trace_kerns=false registertracker("otf.kerns",function(v) trace_kerns=v end) +local trace_cursive=false registertracker("otf.cursive",function(v) trace_cursive=v end) +local trace_preparing=false registertracker("otf.preparing",function(v) trace_preparing=v end) +local trace_bugs=false registertracker("otf.bugs",function(v) trace_bugs=v end) +local trace_details=false registertracker("otf.details",function(v) trace_details=v end) +local trace_applied=false registertracker("otf.applied",function(v) trace_applied=v end) +local trace_steps=false registertracker("otf.steps",function(v) trace_steps=v end) +local trace_skips=false registertracker("otf.skips",function(v) trace_skips=v end) +local trace_directions=false registertracker("otf.directions",function(v) trace_directions=v end) +local report_direct=logs.reporter("fonts","otf direct") +local report_subchain=logs.reporter("fonts","otf subchain") +local report_chain=logs.reporter("fonts","otf chain") +local report_process=logs.reporter("fonts","otf process") +local report_prepare=logs.reporter("fonts","otf prepare") +local report_warning=logs.reporter("fonts","otf warning") +registertracker("otf.verbose_chain",function(v) otf.setcontextchain(v and "verbose") end) +registertracker("otf.normal_chain",function(v) otf.setcontextchain(v and "normal") end) +registertracker("otf.replacements","otf.singles,otf.multiples,otf.alternatives,otf.ligatures") +registertracker("otf.positions","otf.marks,otf.kerns,otf.cursive") +registertracker("otf.actions","otf.replacements,otf.positions") +registertracker("otf.injections","nodes.injections") +registertracker("*otf.sample","otf.steps,otf.actions,otf.analyzing") +local nuts=nodes.nuts +local tonode=nuts.tonode +local tonut=nuts.tonut +local getfield=nuts.getfield +local setfield=nuts.setfield +local getnext=nuts.getnext +local getprev=nuts.getprev +local getid=nuts.getid +local getattr=nuts.getattr +local setattr=nuts.setattr +local getprop=nuts.getprop +local setprop=nuts.setprop +local getfont=nuts.getfont +local getsubtype=nuts.getsubtype +local getchar=nuts.getchar +local insert_node_after=nuts.insert_after +local delete_node=nuts.delete +local copy_node=nuts.copy +local find_node_tail=nuts.tail +local flush_node_list=nuts.flush_list +local end_of_math=nuts.end_of_math +local setmetatableindex=table.setmetatableindex +local zwnj=0x200C +local zwj=0x200D +local wildcard="*" +local default="dflt" +local nodecodes=nodes.nodecodes +local whatcodes=nodes.whatcodes +local glyphcodes=nodes.glyphcodes +local disccodes=nodes.disccodes +local glyph_code=nodecodes.glyph +local glue_code=nodecodes.glue +local disc_code=nodecodes.disc +local whatsit_code=nodecodes.whatsit +local math_code=nodecodes.math +local dir_code=whatcodes.dir +local localpar_code=whatcodes.localpar +local discretionary_code=disccodes.discretionary +local ligature_code=glyphcodes.ligature +local privateattribute=attributes.private +local a_state=privateattribute('state') +local a_cursbase=privateattribute('cursbase') +local injections=nodes.injections +local setmark=injections.setmark +local setcursive=injections.setcursive +local setkern=injections.setkern +local setpair=injections.setpair +local resetinjection=injections.reset +local setligaindex=injections.setligaindex +local getligaindex=injections.getligaindex +local cursonce=true +local fonthashes=fonts.hashes +local fontdata=fonthashes.identifiers +local otffeatures=fonts.constructors.newfeatures("otf") +local registerotffeature=otffeatures.register +local onetimemessage=fonts.loggers.onetimemessage or function() end +otf.defaultnodealternate="none" +local tfmdata=false +local characters=false +local descriptions=false +local resources=false +local marks=false +local currentfont=false +local lookuptable=false +local anchorlookups=false +local lookuptypes=false +local lookuptags=false +local handlers={} +local rlmode=0 +local featurevalue=false +local checkstep=(nodes and nodes.tracers and nodes.tracers.steppers.check) or function() end +local registerstep=(nodes and nodes.tracers and nodes.tracers.steppers.register) or function() end +local registermessage=(nodes and nodes.tracers and nodes.tracers.steppers.message) or function() end +local function logprocess(...) + if trace_steps then + registermessage(...) + end + report_direct(...) +end +local function logwarning(...) + report_direct(...) +end +local f_unicode=formatters["%U"] +local f_uniname=formatters["%U (%s)"] +local f_unilist=formatters["% t (% t)"] +local function gref(n) + if type(n)=="number" then + local description=descriptions[n] + local name=description and description.name + if name then + return f_uniname(n,name) + else + return f_unicode(n) + end + elseif n then + local num,nam={},{} + for i=1,#n do + local ni=n[i] + if tonumber(ni) then + local di=descriptions[ni] + num[i]=f_unicode(ni) + nam[i]=di and di.name or "-" + end + end + return f_unilist(num,nam) + else + return "" + end +end +local function cref(kind,chainname,chainlookupname,lookupname,index) + if index then + return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookuptags[lookupname],index) + elseif lookupname then + return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookuptags[lookupname]) + elseif chainlookupname then + return formatters["feature %a, chain %a, sub %a"](kind,lookuptags[chainname],lookuptags[chainlookupname]) + elseif chainname then + return formatters["feature %a, chain %a"](kind,lookuptags[chainname]) + else + return formatters["feature %a"](kind) + end +end +local function pref(kind,lookupname) + return formatters["feature %a, lookup %a"](kind,lookuptags[lookupname]) +end +local function copy_glyph(g) + local components=getfield(g,"components") + if components then + setfield(g,"components",nil) + local n=copy_node(g) + setfield(g,"components",components) + return n + else + return copy_node(g) + end +end +local function markstoligature(kind,lookupname,head,start,stop,char) + if start==stop and getchar(start)==char then + return head,start + else + local prev=getprev(start) + local next=getnext(stop) + setfield(start,"prev",nil) + setfield(stop,"next",nil) + local base=copy_glyph(start) + if head==start then + head=base + end + resetinjection(base) + setfield(base,"char",char) + setfield(base,"subtype",ligature_code) + setfield(base,"components",start) + if prev then + setfield(prev,"next",base) + end + if next then + setfield(next,"prev",base) + end + setfield(base,"next",next) + setfield(base,"prev",prev) + return head,base + end +end +local function getcomponentindex(start) + if getid(start)~=glyph_code then + return 0 + elseif getsubtype(start)==ligature_code then + local i=0 + local components=getfield(start,"components") + while components do + i=i+getcomponentindex(components) + components=getnext(components) + end + return i + elseif not marks[getchar(start)] then + return 1 + else + return 0 + end +end +local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound) + if start==stop and getchar(start)==char then + resetinjection(start) + setfield(start,"char",char) + return head,start + end + local prev=getprev(start) + local next=getnext(stop) + setfield(start,"prev",nil) + setfield(stop,"next",nil) + local base=copy_glyph(start) + if start==head then + head=base + end + resetinjection(base) + setfield(base,"char",char) + setfield(base,"subtype",ligature_code) + setfield(base,"components",start) + if prev then + setfield(prev,"next",base) + end + if next then + setfield(next,"prev",base) + end + setfield(base,"next",next) + setfield(base,"prev",prev) + if not discfound then + local deletemarks=markflag~="mark" + local components=start + local baseindex=0 + local componentindex=0 + local head=base + local current=base + while start do + local char=getchar(start) + if not marks[char] then + baseindex=baseindex+componentindex + componentindex=getcomponentindex(start) + elseif not deletemarks then + setligaindex(start,baseindex+getligaindex(start,componentindex)) + if trace_marks then + logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),getligaindex(start)) + end + head,current=insert_node_after(head,current,copy_node(start)) + elseif trace_marks then + logwarning("%s: delete mark %s",pref(kind,lookupname),gref(char)) + end + start=getnext(start) + end + local start=getnext(current) + while start and getid(start)==glyph_code do + local char=getchar(start) + if marks[char] then + setligaindex(start,baseindex+getligaindex(start,componentindex)) + if trace_marks then + logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),getligaindex(start)) + end + else + break + end + start=getnext(start) + end + end + return head,base +end +function handlers.gsub_single(head,start,kind,lookupname,replacement) + if trace_singles then + logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(getchar(start)),gref(replacement)) + end + resetinjection(start) + setfield(start,"char",replacement) + return head,start,true +end +local function get_alternative_glyph(start,alternatives,value,trace_alternatives) + local n=#alternatives + if value=="random" then + local r=random(1,n) + return alternatives[r],trace_alternatives and formatters["value %a, taking %a"](value,r) + elseif value=="first" then + return alternatives[1],trace_alternatives and formatters["value %a, taking %a"](value,1) + elseif value=="last" then + return alternatives[n],trace_alternatives and formatters["value %a, taking %a"](value,n) + else + value=tonumber(value) + if type(value)~="number" then + return alternatives[1],trace_alternatives and formatters["invalid value %s, taking %a"](value,1) + elseif value>n then + local defaultalt=otf.defaultnodealternate + if defaultalt=="first" then + return alternatives[n],trace_alternatives and formatters["invalid value %s, taking %a"](value,1) + elseif defaultalt=="last" then + return alternatives[1],trace_alternatives and formatters["invalid value %s, taking %a"](value,n) + else + return false,trace_alternatives and formatters["invalid value %a, %s"](value,"out of range") + end + elseif value==0 then + return getchar(start),trace_alternatives and formatters["invalid value %a, %s"](value,"no change") + elseif value<1 then + return alternatives[1],trace_alternatives and formatters["invalid value %a, taking %a"](value,1) + else + return alternatives[value],trace_alternatives and formatters["value %a, taking %a"](value,value) + end + end +end +local function multiple_glyphs(head,start,multiple,ignoremarks) + local nofmultiples=#multiple + if nofmultiples>0 then + resetinjection(start) + setfield(start,"char",multiple[1]) + if nofmultiples>1 then + local sn=getnext(start) + for k=2,nofmultiples do + local n=copy_node(start) + resetinjection(n) + setfield(n,"char",multiple[k]) + setfield(n,"next",sn) + setfield(n,"prev",start) + if sn then + setfield(sn,"prev",n) + end + setfield(start,"next",n) + start=n + end + end + return head,start,true + else + if trace_multiples then + logprocess("no multiple for %s",gref(getchar(start))) + end + return head,start,false + end +end +function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence) + local value=featurevalue==true and tfmdata.shared.features[kind] or featurevalue + local choice,comment=get_alternative_glyph(start,alternative,value,trace_alternatives) + if choice then + if trace_alternatives then + logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(getchar(start)),choice,gref(choice),comment) + end + resetinjection(start) + setfield(start,"char",choice) + else + if trace_alternatives then + logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(getchar(start)),comment) + end + end + return head,start,true +end +function handlers.gsub_multiple(head,start,kind,lookupname,multiple,sequence) + if trace_multiples then + logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(getchar(start)),gref(multiple)) + end + return multiple_glyphs(head,start,multiple,sequence.flags[1]) +end +function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence) + local s,stop,discfound=getnext(start),nil,false + local startchar=getchar(start) + if marks[startchar] then + while s do + local id=getid(s) + if id==glyph_code and getfont(s)==currentfont and getsubtype(s)<256 then + local lg=ligature[getchar(s)] + if lg then + stop=s + ligature=lg + s=getnext(s) + else + break + end + else + break + end + end + if stop then + local lig=ligature.ligature + if lig then + if trace_ligatures then + local stopchar=getchar(stop) + head,start=markstoligature(kind,lookupname,head,start,stop,lig) + logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start))) + else + head,start=markstoligature(kind,lookupname,head,start,stop,lig) + end + return head,start,true + else + end + end + else + local skipmark=sequence.flags[1] + while s do + local id=getid(s) + if id==glyph_code and getsubtype(s)<256 then + if getfont(s)==currentfont then + local char=getchar(s) + if skipmark and marks[char] then + s=getnext(s) + else + local lg=ligature[char] + if lg then + stop=s + ligature=lg + s=getnext(s) + else + break + end + end + else + break + end + elseif id==disc_code then + discfound=true + s=getnext(s) + else + break + end + end + local lig=ligature.ligature + if lig then + if stop then + if trace_ligatures then + local stopchar=getchar(stop) + head,start=toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound) + logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start))) + else + head,start=toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound) + end + else + resetinjection(start) + setfield(start,"char",lig) + if trace_ligatures then + logprocess("%s: replacing %s by (no real) ligature %s case 3",pref(kind,lookupname),gref(startchar),gref(lig)) + end + end + return head,start,true + else + end + end + return head,start,false +end +function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence) + local markchar=getchar(start) + if marks[markchar] then + local base=getprev(start) + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + local basechar=getchar(base) + if marks[basechar] then + while true do + base=getprev(base) + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + basechar=getchar(base) + if not marks[basechar] then + break + end + else + if trace_bugs then + logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar)) + end + return head,start,false + end + end + end + local baseanchors=descriptions[basechar] + if baseanchors then + baseanchors=baseanchors.anchors + end + if baseanchors then + local baseanchors=baseanchors['basechar'] + if baseanchors then + local al=anchorlookups[lookupname] + for anchor,ba in next,baseanchors do + if al[anchor] then + local ma=markanchors[anchor] + if ma then + local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) + if trace_marks then + logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)", + pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) + end + return head,start,true + end + end + end + if trace_bugs then + logwarning("%s, no matching anchors for mark %s and base %s",pref(kind,lookupname),gref(markchar),gref(basechar)) + end + end + elseif trace_bugs then + onetimemessage(currentfont,basechar,"no base anchors",report_fonts) + end + elseif trace_bugs then + logwarning("%s: prev node is no char",pref(kind,lookupname)) + end + elseif trace_bugs then + logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar)) + end + return head,start,false +end +function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence) + local markchar=getchar(start) + if marks[markchar] then + local base=getprev(start) + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + local basechar=getchar(base) + if marks[basechar] then + while true do + base=getprev(base) + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + basechar=getchar(base) + if not marks[basechar] then + break + end + else + if trace_bugs then + logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar)) + end + return head,start,false + end + end + end + local index=getligaindex(start) + local baseanchors=descriptions[basechar] + if baseanchors then + baseanchors=baseanchors.anchors + if baseanchors then + local baseanchors=baseanchors['baselig'] + if baseanchors then + local al=anchorlookups[lookupname] + for anchor,ba in next,baseanchors do + if al[anchor] then + local ma=markanchors[anchor] + if ma then + ba=ba[index] + if ba then + local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) + if trace_marks then + logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)", + pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy) + end + return head,start,true + else + if trace_bugs then + logwarning("%s: no matching anchors for mark %s and baselig %s with index %a",pref(kind,lookupname),gref(markchar),gref(basechar),index) + end + end + end + end + end + if trace_bugs then + logwarning("%s: no matching anchors for mark %s and baselig %s",pref(kind,lookupname),gref(markchar),gref(basechar)) + end + end + end + elseif trace_bugs then + onetimemessage(currentfont,basechar,"no base anchors",report_fonts) + end + elseif trace_bugs then + logwarning("%s: prev node is no char",pref(kind,lookupname)) + end + elseif trace_bugs then + logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar)) + end + return head,start,false +end +function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence) + local markchar=getchar(start) + if marks[markchar] then + local base=getprev(start) + local slc=getligaindex(start) + if slc then + while base do + local blc=getligaindex(base) + if blc and blc~=slc then + base=getprev(base) + else + break + end + end + end + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + local basechar=getchar(base) + local baseanchors=descriptions[basechar] + if baseanchors then + baseanchors=baseanchors.anchors + if baseanchors then + baseanchors=baseanchors['basemark'] + if baseanchors then + local al=anchorlookups[lookupname] + for anchor,ba in next,baseanchors do + if al[anchor] then + local ma=markanchors[anchor] + if ma then + local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) + if trace_marks then + logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)", + pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) + end + return head,start,true + end + end + end + if trace_bugs then + logwarning("%s: no matching anchors for mark %s and basemark %s",pref(kind,lookupname),gref(markchar),gref(basechar)) + end + end + end + elseif trace_bugs then + onetimemessage(currentfont,basechar,"no base anchors",report_fonts) + end + elseif trace_bugs then + logwarning("%s: prev node is no mark",pref(kind,lookupname)) + end + elseif trace_bugs then + logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar)) + end + return head,start,false +end +function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence) + local alreadydone=cursonce and getprop(start,a_cursbase) + if not alreadydone then + local done=false + local startchar=getchar(start) + if marks[startchar] then + if trace_cursive then + logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar)) + end + else + local nxt=getnext(start) + while not done and nxt and getid(nxt)==glyph_code and getfont(nxt)==currentfont and getsubtype(nxt)<256 do + local nextchar=getchar(nxt) + if marks[nextchar] then + nxt=getnext(nxt) + else + local entryanchors=descriptions[nextchar] + if entryanchors then + entryanchors=entryanchors.anchors + if entryanchors then + entryanchors=entryanchors['centry'] + if entryanchors then + local al=anchorlookups[lookupname] + for anchor,entry in next,entryanchors do + if al[anchor] then + local exit=exitanchors[anchor] + if exit then + local dx,dy,bound=setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar]) + if trace_cursive then + logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode) + end + done=true + break + end + end + end + end + end + elseif trace_bugs then + onetimemessage(currentfont,startchar,"no entry anchors",report_fonts) + end + break + end + end + end + return head,start,done + else + if trace_cursive and trace_details then + logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone) + end + return head,start,false + end +end +function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence) + local startchar=getchar(start) + local dx,dy,w,h=setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar]) + if trace_kerns then + logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h) + end + return head,start,false +end +function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence) + local snext=getnext(start) + if not snext then + return head,start,false + else + local prev,done=start,false + local factor=tfmdata.parameters.factor + local lookuptype=lookuptypes[lookupname] + while snext and getid(snext)==glyph_code and getfont(snext)==currentfont and getsubtype(snext)<256 do + local nextchar=getchar(snext) + local krn=kerns[nextchar] + if not krn and marks[nextchar] then + prev=snext + snext=getnext(snext) + else + if not krn then + elseif type(krn)=="table" then + if lookuptype=="pair" then + local a,b=krn[2],krn[3] + if a and #a>0 then + local startchar=getchar(start) + local x,y,w,h=setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar]) + if trace_kerns then + logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h) + end + end + if b and #b>0 then + local startchar=getchar(start) + local x,y,w,h=setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar]) + if trace_kerns then + logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h) + end + end + else + report_process("%s: check this out (old kern stuff)",pref(kind,lookupname)) + end + done=true + elseif krn~=0 then + local k=setkern(snext,factor,rlmode,krn) + if trace_kerns then + logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(getchar(prev)),gref(nextchar)) + end + done=true + end + break + end + end + return head,start,done + end +end +local chainmores={} +local chainprocs={} +local function logprocess(...) + if trace_steps then + registermessage(...) + end + report_subchain(...) +end +local logwarning=report_subchain +local function logprocess(...) + if trace_steps then + registermessage(...) + end + report_chain(...) +end +local logwarning=report_chain +function chainprocs.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname) + logwarning("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname)) + return head,start,false +end +function chainmores.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname,n) + logprocess("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname)) + return head,start,false +end +function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements) + local char=getchar(start) + local replacement=replacements[char] + if replacement then + if trace_singles then + logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement)) + end + resetinjection(start) + setfield(start,"char",replacement) + return head,start,true + else + return head,start,false + end +end +function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex) + local current=start + local subtables=currentlookup.subtables + if #subtables>1 then + logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," ")) + end + while current do + if getid(current)==glyph_code then + local currentchar=getchar(current) + local lookupname=subtables[1] + local replacement=lookuphash[lookupname] + if not replacement then + if trace_bugs then + logwarning("%s: no single hits",cref(kind,chainname,chainlookupname,lookupname,chainindex)) + end + else + replacement=replacement[currentchar] + if not replacement or replacement=="" then + if trace_bugs then + logwarning("%s: no single for %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar)) + end + else + if trace_singles then + logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement)) + end + resetinjection(current) + setfield(current,"char",replacement) + end + end + return head,start,true + elseif current==stop then + break + else + current=getnext(current) + end + end + return head,start,false +end +chainmores.gsub_single=chainprocs.gsub_single +function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) + local startchar=getchar(start) + local subtables=currentlookup.subtables + local lookupname=subtables[1] + local replacements=lookuphash[lookupname] + if not replacements then + if trace_bugs then + logwarning("%s: no multiple hits",cref(kind,chainname,chainlookupname,lookupname)) + end + else + replacements=replacements[startchar] + if not replacements or replacement=="" then + if trace_bugs then + logwarning("%s: no multiple for %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar)) + end + else + if trace_multiples then + logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements)) + end + return multiple_glyphs(head,start,replacements,currentlookup.flags[1]) + end + end + return head,start,false +end +chainmores.gsub_multiple=chainprocs.gsub_multiple +function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) + local current=start + local subtables=currentlookup.subtables + local value=featurevalue==true and tfmdata.shared.features[kind] or featurevalue + while current do + if getid(current)==glyph_code then + local currentchar=getchar(current) + local lookupname=subtables[1] + local alternatives=lookuphash[lookupname] + if not alternatives then + if trace_bugs then + logwarning("%s: no alternative hit",cref(kind,chainname,chainlookupname,lookupname)) + end + else + alternatives=alternatives[currentchar] + if alternatives then + local choice,comment=get_alternative_glyph(current,alternatives,value,trace_alternatives) + if choice then + if trace_alternatives then + logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment) + end + resetinjection(start) + setfield(start,"char",choice) + else + if trace_alternatives then + logwarning("%s: no variant %a for %s, %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(char),comment) + end + end + elseif trace_bugs then + logwarning("%s: no alternative for %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(currentchar),comment) + end + end + return head,start,true + elseif current==stop then + break + else + current=getnext(current) + end + end + return head,start,false +end +chainmores.gsub_alternate=chainprocs.gsub_alternate +function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex) + local startchar=getchar(start) + local subtables=currentlookup.subtables + local lookupname=subtables[1] + local ligatures=lookuphash[lookupname] + if not ligatures then + if trace_bugs then + logwarning("%s: no ligature hits",cref(kind,chainname,chainlookupname,lookupname,chainindex)) + end + else + ligatures=ligatures[startchar] + if not ligatures then + if trace_bugs then + logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar)) + end + else + local s=getnext(start) + local discfound=false + local last=stop + local nofreplacements=0 + local skipmark=currentlookup.flags[1] + while s do + local id=getid(s) + if id==disc_code then + s=getnext(s) + discfound=true + else + local schar=getchar(s) + if skipmark and marks[schar] then + s=getnext(s) + else + local lg=ligatures[schar] + if lg then + ligatures,last,nofreplacements=lg,s,nofreplacements+1 + if s==stop then + break + else + s=getnext(s) + end + else + break + end + end + end + end + local l2=ligatures.ligature + if l2 then + if chainindex then + stop=last + end + if trace_ligatures then + if start==stop then + logprocess("%s: replacing character %s by ligature %s case 3",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2)) + else + logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop)),gref(l2)) + end + end + head,start=toligature(kind,lookupname,head,start,stop,l2,currentlookup.flags[1],discfound) + return head,start,true,nofreplacements + elseif trace_bugs then + if start==stop then + logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar)) + else + logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop))) + end + end + end + end + return head,start,false,0 +end +chainmores.gsub_ligature=chainprocs.gsub_ligature +function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) + local markchar=getchar(start) + if marks[markchar] then + local subtables=currentlookup.subtables + local lookupname=subtables[1] + local markanchors=lookuphash[lookupname] + if markanchors then + markanchors=markanchors[markchar] + end + if markanchors then + local base=getprev(start) + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + local basechar=getchar(base) + if marks[basechar] then + while true do + base=getprev(base) + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + basechar=getchar(base) + if not marks[basechar] then + break + end + else + if trace_bugs then + logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar)) + end + return head,start,false + end + end + end + local baseanchors=descriptions[basechar].anchors + if baseanchors then + local baseanchors=baseanchors['basechar'] + if baseanchors then + local al=anchorlookups[lookupname] + for anchor,ba in next,baseanchors do + if al[anchor] then + local ma=markanchors[anchor] + if ma then + local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) + if trace_marks then + logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)", + cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) + end + return head,start,true + end + end + end + if trace_bugs then + logwarning("%s, no matching anchors for mark %s and base %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar)) + end + end + end + elseif trace_bugs then + logwarning("%s: prev node is no char",cref(kind,chainname,chainlookupname,lookupname)) + end + elseif trace_bugs then + logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar)) + end + elseif trace_bugs then + logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar)) + end + return head,start,false +end +function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) + local markchar=getchar(start) + if marks[markchar] then + local subtables=currentlookup.subtables + local lookupname=subtables[1] + local markanchors=lookuphash[lookupname] + if markanchors then + markanchors=markanchors[markchar] + end + if markanchors then + local base=getprev(start) + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + local basechar=getchar(base) + if marks[basechar] then + while true do + base=getprev(base) + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + basechar=getchar(base) + if not marks[basechar] then + break + end + else + if trace_bugs then + logwarning("%s: no base for mark %s",cref(kind,chainname,chainlookupname,lookupname),markchar) + end + return head,start,false + end + end + end + local index=getligaindex(start) + local baseanchors=descriptions[basechar].anchors + if baseanchors then + local baseanchors=baseanchors['baselig'] + if baseanchors then + local al=anchorlookups[lookupname] + for anchor,ba in next,baseanchors do + if al[anchor] then + local ma=markanchors[anchor] + if ma then + ba=ba[index] + if ba then + local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) + if trace_marks then + logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)", + cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy) + end + return head,start,true + end + end + end + end + if trace_bugs then + logwarning("%s: no matching anchors for mark %s and baselig %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar)) + end + end + end + elseif trace_bugs then + logwarning("feature %s, lookup %s: prev node is no char",kind,lookupname) + end + elseif trace_bugs then + logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar)) + end + elseif trace_bugs then + logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar)) + end + return head,start,false +end +function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) + local markchar=getchar(start) + if marks[markchar] then + local subtables=currentlookup.subtables + local lookupname=subtables[1] + local markanchors=lookuphash[lookupname] + if markanchors then + markanchors=markanchors[markchar] + end + if markanchors then + local base=getprev(start) + local slc=getligaindex(start) + if slc then + while base do + local blc=getligaindex(base) + if blc and blc~=slc then + base=getprev(base) + else + break + end + end + end + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + local basechar=getchar(base) + local baseanchors=descriptions[basechar].anchors + if baseanchors then + baseanchors=baseanchors['basemark'] + if baseanchors then + local al=anchorlookups[lookupname] + for anchor,ba in next,baseanchors do + if al[anchor] then + local ma=markanchors[anchor] + if ma then + local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) + if trace_marks then + logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)", + cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) + end + return head,start,true + end + end + end + if trace_bugs then + logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar)) + end + end + end + elseif trace_bugs then + logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname)) + end + elseif trace_bugs then + logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar)) + end + elseif trace_bugs then + logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar)) + end + return head,start,false +end +function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) + local alreadydone=cursonce and getprop(start,a_cursbase) + if not alreadydone then + local startchar=getchar(start) + local subtables=currentlookup.subtables + local lookupname=subtables[1] + local exitanchors=lookuphash[lookupname] + if exitanchors then + exitanchors=exitanchors[startchar] + end + if exitanchors then + local done=false + if marks[startchar] then + if trace_cursive then + logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar)) + end + else + local nxt=getnext(start) + while not done and nxt and getid(nxt)==glyph_code and getfont(nxt)==currentfont and getsubtype(nxt)<256 do + local nextchar=getchar(nxt) + if marks[nextchar] then + nxt=getnext(nxt) + else + local entryanchors=descriptions[nextchar] + if entryanchors then + entryanchors=entryanchors.anchors + if entryanchors then + entryanchors=entryanchors['centry'] + if entryanchors then + local al=anchorlookups[lookupname] + for anchor,entry in next,entryanchors do + if al[anchor] then + local exit=exitanchors[anchor] + if exit then + local dx,dy,bound=setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar]) + if trace_cursive then + logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode) + end + done=true + break + end + end + end + end + end + elseif trace_bugs then + onetimemessage(currentfont,startchar,"no entry anchors",report_fonts) + end + break + end + end + end + return head,start,done + else + if trace_cursive and trace_details then + logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone) + end + return head,start,false + end + end + return head,start,false +end +function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence) + local startchar=getchar(start) + local subtables=currentlookup.subtables + local lookupname=subtables[1] + local kerns=lookuphash[lookupname] + if kerns then + kerns=kerns[startchar] + if kerns then + local dx,dy,w,h=setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar]) + if trace_kerns then + logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h) + end + end + end + return head,start,false +end +chainmores.gpos_single=chainprocs.gpos_single +function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence) + local snext=getnext(start) + if snext then + local startchar=getchar(start) + local subtables=currentlookup.subtables + local lookupname=subtables[1] + local kerns=lookuphash[lookupname] + if kerns then + kerns=kerns[startchar] + if kerns then + local lookuptype=lookuptypes[lookupname] + local prev,done=start,false + local factor=tfmdata.parameters.factor + while snext and getid(snext)==glyph_code and getfont(snext)==currentfont and getsubtype(snext)<256 do + local nextchar=getchar(snext) + local krn=kerns[nextchar] + if not krn and marks[nextchar] then + prev=snext + snext=getnext(snext) + else + if not krn then + elseif type(krn)=="table" then + if lookuptype=="pair" then + local a,b=krn[2],krn[3] + if a and #a>0 then + local startchar=getchar(start) + local x,y,w,h=setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar]) + if trace_kerns then + logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h) + end + end + if b and #b>0 then + local startchar=getchar(start) + local x,y,w,h=setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar]) + if trace_kerns then + logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h) + end + end + else + report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname)) + local a,b=krn[2],krn[6] + if a and a~=0 then + local k=setkern(snext,factor,rlmode,a) + if trace_kerns then + logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar)) + end + end + if b and b~=0 then + logwarning("%s: ignoring second kern xoff %s",cref(kind,chainname,chainlookupname),b*factor) + end + end + done=true + elseif krn~=0 then + local k=setkern(snext,factor,rlmode,krn) + if trace_kerns then + logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar)) + end + done=true + end + break + end + end + return head,start,done + end + end + end + return head,start,false +end +chainmores.gpos_pair=chainprocs.gpos_pair +local function show_skip(kind,chainname,char,ck,class) + if ck[9] then + logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a, %a => %a",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10]) + else + logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a",cref(kind,chainname),gref(char),class,ck[1],ck[2]) + end +end +local quit_on_no_replacement=true +directives.register("otf.chain.quitonnoreplacement",function(value) + quit_on_no_replacement=value +end) +local function normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash) + local flags=sequence.flags + local done=false + local skipmark=flags[1] + local skipligature=flags[2] + local skipbase=flags[3] + local someskip=skipmark or skipligature or skipbase + local markclass=sequence.markclass + local skipped=false + for k=1,#contexts do + local match=true + local current=start + local last=start + local ck=contexts[k] + local seq=ck[3] + local s=#seq + if s==1 then + match=getid(current)==glyph_code and getfont(current)==currentfont and getsubtype(current)<256 and seq[1][getchar(current)] + else + local f,l=ck[4],ck[5] + if f==1 and f==l then + else + if f==l then + else + local n=f+1 + last=getnext(last) + while n<=l do + if last then + local id=getid(last) + if id==glyph_code then + if getfont(last)==currentfont and getsubtype(last)<256 then + local char=getchar(last) + local ccd=descriptions[char] + if ccd then + local class=ccd.class + if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then + skipped=true + if trace_skips then + show_skip(kind,chainname,char,ck,class) + end + last=getnext(last) + elseif seq[n][char] then + if n1 then + local prev=getprev(start) + if prev then + local n=f-1 + while n>=1 do + if prev then + local id=getid(prev) + if id==glyph_code then + if getfont(prev)==currentfont and getsubtype(prev)<256 then + local char=getchar(prev) + local ccd=descriptions[char] + if ccd then + local class=ccd.class + if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then + skipped=true + if trace_skips then + show_skip(kind,chainname,char,ck,class) + end + elseif seq[n][char] then + n=n -1 + else + match=false + break + end + else + match=false + break + end + else + match=false + break + end + elseif id==disc_code then + elseif seq[n][32] then + n=n -1 + else + match=false + break + end + prev=getprev(prev) + elseif seq[n][32] then + n=n -1 + else + match=false + break + end + end + elseif f==2 then + match=seq[1][32] + else + for n=f-1,1 do + if not seq[n][32] then + match=false + break + end + end + end + end + if match and s>l then + local current=last and getnext(last) + if current then + local n=l+1 + while n<=s do + if current then + local id=getid(current) + if id==glyph_code then + if getfont(current)==currentfont and getsubtype(current)<256 then + local char=getchar(current) + local ccd=descriptions[char] + if ccd then + local class=ccd.class + if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then + skipped=true + if trace_skips then + show_skip(kind,chainname,char,ck,class) + end + elseif seq[n][char] then + n=n+1 + else + match=false + break + end + else + match=false + break + end + else + match=false + break + end + elseif id==disc_code then + elseif seq[n][32] then + n=n+1 + else + match=false + break + end + current=getnext(current) + elseif seq[n][32] then + n=n+1 + else + match=false + break + end + end + elseif s-l==1 then + match=seq[s][32] + else + for n=l+1,s do + if not seq[n][32] then + match=false + break + end + end + end + end + end + if match then + if trace_contexts then + local rule,lookuptype,f,l=ck[1],ck[2],ck[4],ck[5] + local char=getchar(start) + if ck[9] then + logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a, %a => %a", + cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10]) + else + logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a", + cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype) + end + end + local chainlookups=ck[6] + if chainlookups then + local nofchainlookups=#chainlookups + if nofchainlookups==1 then + local chainlookupname=chainlookups[1] + local chainlookup=lookuptable[chainlookupname] + if chainlookup then + local cp=chainprocs[chainlookup.type] + if cp then + local ok + head,start,ok=cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence) + if ok then + done=true + end + else + logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type) + end + else + logprocess("%s is not yet supported",cref(kind,chainname,chainlookupname)) + end + else + local i=1 + while true do + if skipped then + while true do + local char=getchar(start) + local ccd=descriptions[char] + if ccd then + local class=ccd.class + if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then + start=getnext(start) + else + break + end + else + break + end + end + end + local chainlookupname=chainlookups[i] + local chainlookup=lookuptable[chainlookupname] + if not chainlookup then + i=i+1 + else + local cp=chainmores[chainlookup.type] + if not cp then + logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type) + i=i+1 + else + local ok,n + head,start,ok,n=cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence) + if ok then + done=true + i=i+(n or 1) + else + i=i+1 + end + end + end + if i>nofchainlookups then + break + elseif start then + start=getnext(start) + else + end + end + end + else + local replacements=ck[7] + if replacements then + head,start,done=chainprocs.reversesub(head,start,last,kind,chainname,ck,lookuphash,replacements) + else + done=quit_on_no_replacement + if trace_contexts then + logprocess("%s: skipping match",cref(kind,chainname)) + end + end + end + end + end + return head,start,done +end +local verbose_handle_contextchain=function(font,...) + logwarning("no verbose handler installed, reverting to 'normal'") + otf.setcontextchain() + return normal_handle_contextchain(...) +end +otf.chainhandlers={ + normal=normal_handle_contextchain, + verbose=verbose_handle_contextchain, +} +function otf.setcontextchain(method) + if not method or method=="normal" or not otf.chainhandlers[method] then + if handlers.contextchain then + logwarning("installing normal contextchain handler") + end + handlers.contextchain=normal_handle_contextchain + else + logwarning("installing contextchain handler %a",method) + local handler=otf.chainhandlers[method] + handlers.contextchain=function(...) + return handler(currentfont,...) + end + end + handlers.gsub_context=handlers.contextchain + handlers.gsub_contextchain=handlers.contextchain + handlers.gsub_reversecontextchain=handlers.contextchain + handlers.gpos_contextchain=handlers.contextchain + handlers.gpos_context=handlers.contextchain +end +otf.setcontextchain() +local missing={} +local function logprocess(...) + if trace_steps then + registermessage(...) + end + report_process(...) +end +local logwarning=report_process +local function report_missing_cache(typ,lookup) + local f=missing[currentfont] if not f then f={} missing[currentfont]=f end + local t=f[typ] if not t then t={} f[typ]=t end + if not t[lookup] then + t[lookup]=true + logwarning("missing cache for lookup %a, type %a, font %a, name %a",lookup,typ,currentfont,tfmdata.properties.fullname) + end +end +local resolved={} +local lookuphashes={} +setmetatableindex(lookuphashes,function(t,font) + local lookuphash=fontdata[font].resources.lookuphash + if not lookuphash or not next(lookuphash) then + lookuphash=false + end + t[font]=lookuphash + return lookuphash +end) +local autofeatures=fonts.analyzers.features +local function initialize(sequence,script,language,enabled) + local features=sequence.features + if features then + local order=sequence.order + if order then + for i=1,#order do + local kind=order[i] + local valid=enabled[kind] + if valid then + local scripts=features[kind] + local languages=scripts[script] or scripts[wildcard] + if languages and (languages[language] or languages[wildcard]) then + return { valid,autofeatures[kind] or false,sequence.chain or 0,kind,sequence } + end + end + end + else + end + end + return false +end +function otf.dataset(tfmdata,font) + local shared=tfmdata.shared + local properties=tfmdata.properties + local language=properties.language or "dflt" + local script=properties.script or "dflt" + local enabled=shared.features + local res=resolved[font] + if not res then + res={} + resolved[font]=res + end + local rs=res[script] + if not rs then + rs={} + res[script]=rs + end + local rl=rs[language] + if not rl then + rl={ + } + rs[language]=rl + local sequences=tfmdata.resources.sequences + for s=1,#sequences do + local v=enabled and initialize(sequences[s],script,language,enabled) + if v then + rl[#rl+1]=v + end + end + end + return rl +end +local function featuresprocessor(head,font,attr) + local lookuphash=lookuphashes[font] + if not lookuphash then + return head,false + end + head=tonut(head) + if trace_steps then + checkstep(head) + end + tfmdata=fontdata[font] + descriptions=tfmdata.descriptions + characters=tfmdata.characters + resources=tfmdata.resources + marks=resources.marks + anchorlookups=resources.lookup_to_anchor + lookuptable=resources.lookups + lookuptypes=resources.lookuptypes + lookuptags=resources.lookuptags + currentfont=font + rlmode=0 + local sequences=resources.sequences + local done=false + local datasets=otf.dataset(tfmdata,font,attr) + local dirstack={} + for s=1,#datasets do + local dataset=datasets[s] + featurevalue=dataset[1] + local sequence=dataset[5] + local rlparmode=0 + local topstack=0 + local success=false + local attribute=dataset[2] + local chain=dataset[3] + local typ=sequence.type + local subtables=sequence.subtables + if chain<0 then + local handler=handlers[typ] + local start=find_node_tail(head) + while start do + local id=getid(start) + if id==glyph_code then + if getfont(start)==font and getsubtype(start)<256 then + local a=getattr(start,0) + if a then + a=a==attr + else + a=true + end + if a then + for i=1,#subtables do + local lookupname=subtables[i] + local lookupcache=lookuphash[lookupname] + if lookupcache then + local lookupmatch=lookupcache[getchar(start)] + if lookupmatch then + head,start,success=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) + if success then + break + end + end + else + report_missing_cache(typ,lookupname) + end + end + if start then start=getprev(start) end + else + start=getprev(start) + end + else + start=getprev(start) + end + else + start=getprev(start) + end + end + else + local handler=handlers[typ] + local ns=#subtables + local start=head + rlmode=0 + if ns==1 then + local lookupname=subtables[1] + local lookupcache=lookuphash[lookupname] + if not lookupcache then + report_missing_cache(typ,lookupname) + else + local function subrun(start) + local head=start + local done=false + while start do + local id=getid(start) + if id==glyph_code and getfont(start)==font and getsubtype(start)<256 then + local a=getattr(start,0) + if a then + a=(a==attr) and (not attribute or getprop(start,a_state)==attribute) + else + a=not attribute or getprop(start,a_state)==attribute + end + if a then + local lookupmatch=lookupcache[getchar(start)] + if lookupmatch then + local ok + head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) + if ok then + done=true + end + end + if start then start=getnext(start) end + else + start=getnext(start) + end + else + start=getnext(start) + end + end + if done then + success=true + return head + end + end + local function kerndisc(disc) + local prev=getprev(disc) + local next=getnext(disc) + if prev and next then + setfield(prev,"next",next) + local a=getattr(prev,0) + if a then + a=(a==attr) and (not attribute or getprop(prev,a_state)==attribute) + else + a=not attribute or getprop(prev,a_state)==attribute + end + if a then + local lookupmatch=lookupcache[getchar(prev)] + if lookupmatch then + local h,d,ok=handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) + if ok then + done=true + success=true + end + end + end + setfield(prev,"next",disc) + end + return next + end + while start do + local id=getid(start) + if id==glyph_code then + if getfont(start)==font and getsubtype(start)<256 then + local a=getattr(start,0) + if a then + a=(a==attr) and (not attribute or getprop(start,a_state)==attribute) + else + a=not attribute or getprop(start,a_state)==attribute + end + if a then + local lookupmatch=lookupcache[getchar(start)] + if lookupmatch then + local ok + head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) + if ok then + success=true + end + end + if start then start=getnext(start) end + else + start=getnext(start) + end + else + start=getnext(start) + end + elseif id==disc_code then + if getsubtype(start)==discretionary_code then + local pre=getfield(start,"pre") + if pre then + local new=subrun(pre) + if new then setfield(start,"pre",new) end + end + local post=getfield(start,"post") + if post then + local new=subrun(post) + if new then setfield(start,"post",new) end + end + local replace=getfield(start,"replace") + if replace then + local new=subrun(replace) + if new then setfield(start,"replace",new) end + end +elseif typ=="gpos_single" or typ=="gpos_pair" then + kerndisc(start) + end + start=getnext(start) + elseif id==whatsit_code then + local subtype=getsubtype(start) + if subtype==dir_code then + local dir=getfield(start,"dir") + if dir=="+TRT" or dir=="+TLT" then + topstack=topstack+1 + dirstack[topstack]=dir + elseif dir=="-TRT" or dir=="-TLT" then + topstack=topstack-1 + end + local newdir=dirstack[topstack] + if newdir=="+TRT" then + rlmode=-1 + elseif newdir=="+TLT" then + rlmode=1 + else + rlmode=rlparmode + end + if trace_directions then + report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir) + end + elseif subtype==localpar_code then + local dir=getfield(start,"dir") + if dir=="TRT" then + rlparmode=-1 + elseif dir=="TLT" then + rlparmode=1 + else + rlparmode=0 + end + rlmode=rlparmode + if trace_directions then + report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode) + end + end + start=getnext(start) + elseif id==math_code then + start=getnext(end_of_math(start)) + else + start=getnext(start) + end + end + end + else + local function subrun(start) + local head=start + local done=false + while start do + local id=getid(start) + if id==glyph_code and getfont(start)==font and getsubtype(start)<256 then + local a=getattr(start,0) + if a then + a=(a==attr) and (not attribute or getprop(start,a_state)==attribute) + else + a=not attribute or getprop(start,a_state)==attribute + end + if a then + for i=1,ns do + local lookupname=subtables[i] + local lookupcache=lookuphash[lookupname] + if lookupcache then + local lookupmatch=lookupcache[getchar(start)] + if lookupmatch then + local ok + head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) + if ok then + done=true + break + elseif not start then + break + end + end + else + report_missing_cache(typ,lookupname) + end + end + if start then start=getnext(start) end + else + start=getnext(start) + end + else + start=getnext(start) + end + end + if done then + success=true + return head + end + end + local function kerndisc(disc) + local prev=getprev(disc) + local next=getnext(disc) + if prev and next then + setfield(prev,"next",next) + local a=getattr(prev,0) + if a then + a=(a==attr) and (not attribute or getprop(prev,a_state)==attribute) + else + a=not attribute or getprop(prev,a_state)==attribute + end + if a then + for i=1,ns do + local lookupname=subtables[i] + local lookupcache=lookuphash[lookupname] + if lookupcache then + local lookupmatch=lookupcache[getchar(prev)] + if lookupmatch then + local h,d,ok=handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) + if ok then + done=true + break + end + end + else + report_missing_cache(typ,lookupname) + end + end + end + setfield(prev,"next",disc) + end + return next + end + while start do + local id=getid(start) + if id==glyph_code then + if getfont(start)==font and getsubtype(start)<256 then + local a=getattr(start,0) + if a then + a=(a==attr) and (not attribute or getprop(start,a_state)==attribute) + else + a=not attribute or getprop(start,a_state)==attribute + end + if a then + for i=1,ns do + local lookupname=subtables[i] + local lookupcache=lookuphash[lookupname] + if lookupcache then + local lookupmatch=lookupcache[getchar(start)] + if lookupmatch then + local ok + head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) + if ok then + success=true + break + elseif not start then + break + end + end + else + report_missing_cache(typ,lookupname) + end + end + if start then start=getnext(start) end + else + start=getnext(start) + end + else + start=getnext(start) + end + elseif id==disc_code then + if getsubtype(start)==discretionary_code then + local pre=getfield(start,"pre") + if pre then + local new=subrun(pre) + if new then setfield(start,"pre",new) end + end + local post=getfield(start,"post") + if post then + local new=subrun(post) + if new then setfield(start,"post",new) end + end + local replace=getfield(start,"replace") + if replace then + local new=subrun(replace) + if new then setfield(start,"replace",new) end + end +elseif typ=="gpos_single" or typ=="gpos_pair" then + kerndisc(start) + end + start=getnext(start) + elseif id==whatsit_code then + local subtype=getsubtype(start) + if subtype==dir_code then + local dir=getfield(start,"dir") + if dir=="+TRT" or dir=="+TLT" then + topstack=topstack+1 + dirstack[topstack]=dir + elseif dir=="-TRT" or dir=="-TLT" then + topstack=topstack-1 + end + local newdir=dirstack[topstack] + if newdir=="+TRT" then + rlmode=-1 + elseif newdir=="+TLT" then + rlmode=1 + else + rlmode=rlparmode + end + if trace_directions then + report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir) + end + elseif subtype==localpar_code then + local dir=getfield(start,"dir") + if dir=="TRT" then + rlparmode=-1 + elseif dir=="TLT" then + rlparmode=1 + else + rlparmode=0 + end + rlmode=rlparmode + if trace_directions then + report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode) + end + end + start=getnext(start) + elseif id==math_code then + start=getnext(end_of_math(start)) + else + start=getnext(start) + end + end + end + end + if success then + done=true + end + if trace_steps then + registerstep(head) + end + end + head=tonode(head) + return head,done +end +local function generic(lookupdata,lookupname,unicode,lookuphash) + local target=lookuphash[lookupname] + if target then + target[unicode]=lookupdata + else + lookuphash[lookupname]={ [unicode]=lookupdata } + end +end +local action={ + substitution=generic, + multiple=generic, + alternate=generic, + position=generic, + ligature=function(lookupdata,lookupname,unicode,lookuphash) + local target=lookuphash[lookupname] + if not target then + target={} + lookuphash[lookupname]=target + end + for i=1,#lookupdata do + local li=lookupdata[i] + local tu=target[li] + if not tu then + tu={} + target[li]=tu + end + target=tu + end + target.ligature=unicode + end, + pair=function(lookupdata,lookupname,unicode,lookuphash) + local target=lookuphash[lookupname] + if not target then + target={} + lookuphash[lookupname]=target + end + local others=target[unicode] + local paired=lookupdata[1] + if others then + others[paired]=lookupdata + else + others={ [paired]=lookupdata } + target[unicode]=others + end + end, +} +local function prepare_lookups(tfmdata) + local rawdata=tfmdata.shared.rawdata + local resources=rawdata.resources + local lookuphash=resources.lookuphash + local anchor_to_lookup=resources.anchor_to_lookup + local lookup_to_anchor=resources.lookup_to_anchor + local lookuptypes=resources.lookuptypes + local characters=tfmdata.characters + local descriptions=tfmdata.descriptions + for unicode,character in next,characters do + local description=descriptions[unicode] + if description then + local lookups=description.slookups + if lookups then + for lookupname,lookupdata in next,lookups do + action[lookuptypes[lookupname]](lookupdata,lookupname,unicode,lookuphash) + end + end + local lookups=description.mlookups + if lookups then + for lookupname,lookuplist in next,lookups do + local lookuptype=lookuptypes[lookupname] + for l=1,#lookuplist do + local lookupdata=lookuplist[l] + action[lookuptype](lookupdata,lookupname,unicode,lookuphash) + end + end + end + local list=description.kerns + if list then + for lookup,krn in next,list do + local target=lookuphash[lookup] + if target then + target[unicode]=krn + else + lookuphash[lookup]={ [unicode]=krn } + end + end + end + local list=description.anchors + if list then + for typ,anchors in next,list do + if typ=="mark" or typ=="cexit" then + for name,anchor in next,anchors do + local lookups=anchor_to_lookup[name] + if lookups then + for lookup,_ in next,lookups do + local target=lookuphash[lookup] + if target then + target[unicode]=anchors + else + lookuphash[lookup]={ [unicode]=anchors } + end + end + end + end + end + end + end + end + end +end +local function split(replacement,original) + local result={} + for i=1,#replacement do + result[original[i]]=replacement[i] + end + return result +end +local valid={ + coverage={ chainsub=true,chainpos=true,contextsub=true }, + reversecoverage={ reversesub=true }, + glyphs={ chainsub=true,chainpos=true }, +} +local function prepare_contextchains(tfmdata) + local rawdata=tfmdata.shared.rawdata + local resources=rawdata.resources + local lookuphash=resources.lookuphash + local lookuptags=resources.lookuptags + local lookups=rawdata.lookups + if lookups then + for lookupname,lookupdata in next,rawdata.lookups do + local lookuptype=lookupdata.type + if lookuptype then + local rules=lookupdata.rules + if rules then + local format=lookupdata.format + local validformat=valid[format] + if not validformat then + report_prepare("unsupported format %a",format) + elseif not validformat[lookuptype] then + report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookuptags[lookupname]) + else + local contexts=lookuphash[lookupname] + if not contexts then + contexts={} + lookuphash[lookupname]=contexts + end + local t,nt={},0 + for nofrules=1,#rules do + local rule=rules[nofrules] + local current=rule.current + local before=rule.before + local after=rule.after + local replacements=rule.replacements + local sequence={} + local nofsequences=0 + if before then + for n=1,#before do + nofsequences=nofsequences+1 + sequence[nofsequences]=before[n] + end + end + local start=nofsequences+1 + for n=1,#current do + nofsequences=nofsequences+1 + sequence[nofsequences]=current[n] + end + local stop=nofsequences + if after then + for n=1,#after do + nofsequences=nofsequences+1 + sequence[nofsequences]=after[n] + end + end + if sequence[1] then + nt=nt+1 + t[nt]={ nofrules,lookuptype,sequence,start,stop,rule.lookups,replacements } + for unic,_ in next,sequence[start] do + local cu=contexts[unic] + if not cu then + contexts[unic]=t + end + end + end + end + end + else + end + else + report_prepare("missing lookuptype for lookupname %a",lookuptags[lookupname]) + end + end + end +end +local function featuresinitializer(tfmdata,value) + if true then + local rawdata=tfmdata.shared.rawdata + local properties=rawdata.properties + if not properties.initialized then + local starttime=trace_preparing and os.clock() + local resources=rawdata.resources + resources.lookuphash=resources.lookuphash or {} + prepare_contextchains(tfmdata) + prepare_lookups(tfmdata) + properties.initialized=true + if trace_preparing then + report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,tfmdata.properties.fullname) + end + end + end +end +registerotffeature { + name="features", + description="features", + default=true, + initializers={ + position=1, + node=featuresinitializer, + }, + processors={ + node=featuresprocessor, + } +} +otf.handlers=handlers + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-otp']={ + version=1.001, + comment="companion to font-otf.lua (packing)", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local next,type=next,type +local sort,concat=table.sort,table.concat +local sortedhash=table.sortedhash +local trace_packing=false trackers.register("otf.packing",function(v) trace_packing=v end) +local trace_loading=false trackers.register("otf.loading",function(v) trace_loading=v end) +local report_otf=logs.reporter("fonts","otf loading") +fonts=fonts or {} +local handlers=fonts.handlers or {} +fonts.handlers=handlers +local otf=handlers.otf or {} +handlers.otf=otf +local enhancers=otf.enhancers or {} +otf.enhancers=enhancers +local glists=otf.glists or { "gsub","gpos" } +otf.glists=glists +local criterium=1 +local threshold=0 +local function tabstr_normal(t) + local s={} + local n=0 + for k,v in next,t do + n=n+1 + if type(v)=="table" then + s[n]=k..">"..tabstr_normal(v) + elseif v==true then + s[n]=k.."+" + elseif v then + s[n]=k.."="..v + else + s[n]=k.."-" + end + end + if n==0 then + return "" + elseif n==1 then + return s[1] + else + sort(s) + return concat(s,",") + end +end +local function tabstr_flat(t) + local s={} + local n=0 + for k,v in next,t do + n=n+1 + s[n]=k.."="..v + end + if n==0 then + return "" + elseif n==1 then + return s[1] + else + sort(s) + return concat(s,",") + end +end +local function tabstr_mixed(t) + local s={} + local n=#t + if n==0 then + return "" + elseif n==1 then + local k=t[1] + if k==true then + return "++" + elseif k==false then + return "--" + else + return tostring(k) + end + else + for i=1,n do + local k=t[i] + if k==true then + s[i]="++" + elseif k==false then + s[i]="--" + else + s[i]=k + end + end + return concat(s,",") + end +end +local function tabstr_boolean(t) + local s={} + local n=0 + for k,v in next,t do + n=n+1 + if v then + s[n]=k.."+" + else + s[n]=k.."-" + end + end + if n==0 then + return "" + elseif n==1 then + return s[1] + else + sort(s) + return concat(s,",") + end +end +local function packdata(data) + if data then + local h,t,c={},{},{} + local hh,tt,cc={},{},{} + local nt,ntt=0,0 + local function pack_normal(v) + local tag=tabstr_normal(v) + local ht=h[tag] + if ht then + c[ht]=c[ht]+1 + return ht + else + nt=nt+1 + t[nt]=v + h[tag]=nt + c[nt]=1 + return nt + end + end + local function pack_flat(v) + local tag=tabstr_flat(v) + local ht=h[tag] + if ht then + c[ht]=c[ht]+1 + return ht + else + nt=nt+1 + t[nt]=v + h[tag]=nt + c[nt]=1 + return nt + end + end + local function pack_boolean(v) + local tag=tabstr_boolean(v) + local ht=h[tag] + if ht then + c[ht]=c[ht]+1 + return ht + else + nt=nt+1 + t[nt]=v + h[tag]=nt + c[nt]=1 + return nt + end + end + local function pack_indexed(v) + local tag=concat(v," ") + local ht=h[tag] + if ht then + c[ht]=c[ht]+1 + return ht + else + nt=nt+1 + t[nt]=v + h[tag]=nt + c[nt]=1 + return nt + end + end + local function pack_mixed(v) + local tag=tabstr_mixed(v) + local ht=h[tag] + if ht then + c[ht]=c[ht]+1 + return ht + else + nt=nt+1 + t[nt]=v + h[tag]=nt + c[nt]=1 + return nt + end + end + local function pack_final(v) + if c[v]<=criterium then + return t[v] + else + local hv=hh[v] + if hv then + return hv + else + ntt=ntt+1 + tt[ntt]=t[v] + hh[v]=ntt + cc[ntt]=c[v] + return ntt + end + end + end + local function success(stage,pass) + if nt==0 then + if trace_loading or trace_packing then + report_otf("pack quality: nothing to pack") + end + return false + elseif nt>=threshold then + local one,two,rest=0,0,0 + if pass==1 then + for k,v in next,c do + if v==1 then + one=one+1 + elseif v==2 then + two=two+1 + else + rest=rest+1 + end + end + else + for k,v in next,cc do + if v>20 then + rest=rest+1 + elseif v>10 then + two=two+1 + else + one=one+1 + end + end + data.tables=tt + end + if trace_loading or trace_packing then + report_otf("pack quality: stage %s, pass %s, %s packed, 1-10:%s, 11-20:%s, rest:%s (criterium: %s)",stage,pass,one+two+rest,one,two,rest,criterium) + end + return true + else + if trace_loading or trace_packing then + report_otf("pack quality: stage %s, pass %s, %s packed, aborting pack (threshold: %s)",stage,pass,nt,threshold) + end + return false + end + end + local function packers(pass) + if pass==1 then + return pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed + else + return pack_final,pack_final,pack_final,pack_final,pack_final + end + end + local resources=data.resources + local lookuptypes=resources.lookuptypes + for pass=1,2 do + if trace_packing then + report_otf("start packing: stage 1, pass %s",pass) + end + local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass) + for unicode,description in next,data.descriptions do + local boundingbox=description.boundingbox + if boundingbox then + description.boundingbox=pack_indexed(boundingbox) + end + local slookups=description.slookups + if slookups then + for tag,slookup in next,slookups do + local what=lookuptypes[tag] + if what=="pair" then + local t=slookup[2] if t then slookup[2]=pack_indexed(t) end + local t=slookup[3] if t then slookup[3]=pack_indexed(t) end + elseif what~="substitution" then + slookups[tag]=pack_indexed(slookup) + end + end + end + local mlookups=description.mlookups + if mlookups then + for tag,mlookup in next,mlookups do + local what=lookuptypes[tag] + if what=="pair" then + for i=1,#mlookup do + local lookup=mlookup[i] + local t=lookup[2] if t then lookup[2]=pack_indexed(t) end + local t=lookup[3] if t then lookup[3]=pack_indexed(t) end + end + elseif what~="substitution" then + for i=1,#mlookup do + mlookup[i]=pack_indexed(mlookup[i]) + end + end + end + end + local kerns=description.kerns + if kerns then + for tag,kern in next,kerns do + kerns[tag]=pack_flat(kern) + end + end + local math=description.math + if math then + local kerns=math.kerns + if kerns then + for tag,kern in next,kerns do + kerns[tag]=pack_normal(kern) + end + end + end + local anchors=description.anchors + if anchors then + for what,anchor in next,anchors do + if what=="baselig" then + for _,a in next,anchor do + for k=1,#a do + a[k]=pack_indexed(a[k]) + end + end + else + for k,v in next,anchor do + anchor[k]=pack_indexed(v) + end + end + end + end + local altuni=description.altuni + if altuni then + for i=1,#altuni do + altuni[i]=pack_flat(altuni[i]) + end + end + end + local lookups=data.lookups + if lookups then + for _,lookup in next,lookups do + local rules=lookup.rules + if rules then + for i=1,#rules do + local rule=rules[i] + local r=rule.before if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end + local r=rule.after if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end + local r=rule.current if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end + local r=rule.replacements if r then rule.replacements=pack_flat (r) end + local r=rule.lookups if r then rule.lookups=pack_indexed(r) end + end + end + end + end + local anchor_to_lookup=resources.anchor_to_lookup + if anchor_to_lookup then + for anchor,lookup in next,anchor_to_lookup do + anchor_to_lookup[anchor]=pack_normal(lookup) + end + end + local lookup_to_anchor=resources.lookup_to_anchor + if lookup_to_anchor then + for lookup,anchor in next,lookup_to_anchor do + lookup_to_anchor[lookup]=pack_normal(anchor) + end + end + local sequences=resources.sequences + if sequences then + for feature,sequence in next,sequences do + local flags=sequence.flags + if flags then + sequence.flags=pack_normal(flags) + end + local subtables=sequence.subtables + if subtables then + sequence.subtables=pack_normal(subtables) + end + local features=sequence.features + if features then + for script,feature in next,features do + features[script]=pack_normal(feature) + end + end + local order=sequence.order + if order then + sequence.order=pack_indexed(order) + end + local markclass=sequence.markclass + if markclass then + sequence.markclass=pack_boolean(markclass) + end + end + end + local lookups=resources.lookups + if lookups then + for name,lookup in next,lookups do + local flags=lookup.flags + if flags then + lookup.flags=pack_normal(flags) + end + local subtables=lookup.subtables + if subtables then + lookup.subtables=pack_normal(subtables) + end + end + end + local features=resources.features + if features then + for _,what in next,glists do + local list=features[what] + if list then + for feature,spec in next,list do + list[feature]=pack_normal(spec) + end + end + end + end + if not success(1,pass) then + return + end + end + if nt>0 then + for pass=1,2 do + if trace_packing then + report_otf("start packing: stage 2, pass %s",pass) + end + local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass) + for unicode,description in next,data.descriptions do + local kerns=description.kerns + if kerns then + description.kerns=pack_normal(kerns) + end + local math=description.math + if math then + local kerns=math.kerns + if kerns then + math.kerns=pack_normal(kerns) + end + end + local anchors=description.anchors + if anchors then + description.anchors=pack_normal(anchors) + end + local mlookups=description.mlookups + if mlookups then + for tag,mlookup in next,mlookups do + mlookups[tag]=pack_normal(mlookup) + end + end + local altuni=description.altuni + if altuni then + description.altuni=pack_normal(altuni) + end + end + local lookups=data.lookups + if lookups then + for _,lookup in next,lookups do + local rules=lookup.rules + if rules then + for i=1,#rules do + local rule=rules[i] + local r=rule.before if r then rule.before=pack_normal(r) end + local r=rule.after if r then rule.after=pack_normal(r) end + local r=rule.current if r then rule.current=pack_normal(r) end + end + end + end + end + local sequences=resources.sequences + if sequences then + for feature,sequence in next,sequences do + sequence.features=pack_normal(sequence.features) + end + end + if not success(2,pass) then + end + end + for pass=1,2 do + local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass) + for unicode,description in next,data.descriptions do + local slookups=description.slookups + if slookups then + description.slookups=pack_normal(slookups) + end + local mlookups=description.mlookups + if mlookups then + description.mlookups=pack_normal(mlookups) + end + end + end + end + end +end +local unpacked_mt={ + __index=function(t,k) + t[k]=false + return k + end +} +local function unpackdata(data) + if data then + local tables=data.tables + if tables then + local resources=data.resources + local lookuptypes=resources.lookuptypes + local unpacked={} + setmetatable(unpacked,unpacked_mt) + for unicode,description in next,data.descriptions do + local tv=tables[description.boundingbox] + if tv then + description.boundingbox=tv + end + local slookups=description.slookups + if slookups then + local tv=tables[slookups] + if tv then + description.slookups=tv + slookups=unpacked[tv] + end + if slookups then + for tag,lookup in next,slookups do + local what=lookuptypes[tag] + if what=="pair" then + local tv=tables[lookup[2]] + if tv then + lookup[2]=tv + end + local tv=tables[lookup[3]] + if tv then + lookup[3]=tv + end + elseif what~="substitution" then + local tv=tables[lookup] + if tv then + slookups[tag]=tv + end + end + end + end + end + local mlookups=description.mlookups + if mlookups then + local tv=tables[mlookups] + if tv then + description.mlookups=tv + mlookups=unpacked[tv] + end + if mlookups then + for tag,list in next,mlookups do + local tv=tables[list] + if tv then + mlookups[tag]=tv + list=unpacked[tv] + end + if list then + local what=lookuptypes[tag] + if what=="pair" then + for i=1,#list do + local lookup=list[i] + local tv=tables[lookup[2]] + if tv then + lookup[2]=tv + end + local tv=tables[lookup[3]] + if tv then + lookup[3]=tv + end + end + elseif what~="substitution" then + for i=1,#list do + local tv=tables[list[i]] + if tv then + list[i]=tv + end + end + end + end + end + end + end + local kerns=description.kerns + if kerns then + local tm=tables[kerns] + if tm then + description.kerns=tm + kerns=unpacked[tm] + end + if kerns then + for k,kern in next,kerns do + local tv=tables[kern] + if tv then + kerns[k]=tv + end + end + end + end + local math=description.math + if math then + local kerns=math.kerns + if kerns then + local tm=tables[kerns] + if tm then + math.kerns=tm + kerns=unpacked[tm] + end + if kerns then + for k,kern in next,kerns do + local tv=tables[kern] + if tv then + kerns[k]=tv + end + end + end + end + end + local anchors=description.anchors + if anchors then + local ta=tables[anchors] + if ta then + description.anchors=ta + anchors=unpacked[ta] + end + if anchors then + for tag,anchor in next,anchors do + if tag=="baselig" then + for _,list in next,anchor do + for i=1,#list do + local tv=tables[list[i]] + if tv then + list[i]=tv + end + end + end + else + for a,data in next,anchor do + local tv=tables[data] + if tv then + anchor[a]=tv + end + end + end + end + end + end + local altuni=description.altuni + if altuni then + local altuni=tables[altuni] + if altuni then + description.altuni=altuni + for i=1,#altuni do + local tv=tables[altuni[i]] + if tv then + altuni[i]=tv + end + end + end + end + end + local lookups=data.lookups + if lookups then + for _,lookup in next,lookups do + local rules=lookup.rules + if rules then + for i=1,#rules do + local rule=rules[i] + local before=rule.before + if before then + local tv=tables[before] + if tv then + rule.before=tv + before=unpacked[tv] + end + if before then + for i=1,#before do + local tv=tables[before[i]] + if tv then + before[i]=tv + end + end + end + end + local after=rule.after + if after then + local tv=tables[after] + if tv then + rule.after=tv + after=unpacked[tv] + end + if after then + for i=1,#after do + local tv=tables[after[i]] + if tv then + after[i]=tv + end + end + end + end + local current=rule.current + if current then + local tv=tables[current] + if tv then + rule.current=tv + current=unpacked[tv] + end + if current then + for i=1,#current do + local tv=tables[current[i]] + if tv then + current[i]=tv + end + end + end + end + local replacements=rule.replacements + if replacements then + local tv=tables[replacements] + if tv then + rule.replacements=tv + end + end + local lookups=rule.lookups + if lookups then + local tv=tables[lookups] + if tv then + rule.lookups=tv + end + end + end + end + end + end + local anchor_to_lookup=resources.anchor_to_lookup + if anchor_to_lookup then + for anchor,lookup in next,anchor_to_lookup do + local tv=tables[lookup] + if tv then + anchor_to_lookup[anchor]=tv + end + end + end + local lookup_to_anchor=resources.lookup_to_anchor + if lookup_to_anchor then + for lookup,anchor in next,lookup_to_anchor do + local tv=tables[anchor] + if tv then + lookup_to_anchor[lookup]=tv + end + end + end + local ls=resources.sequences + if ls then + for _,feature in next,ls do + local flags=feature.flags + if flags then + local tv=tables[flags] + if tv then + feature.flags=tv + end + end + local subtables=feature.subtables + if subtables then + local tv=tables[subtables] + if tv then + feature.subtables=tv + end + end + local features=feature.features + if features then + local tv=tables[features] + if tv then + feature.features=tv + features=unpacked[tv] + end + if features then + for script,data in next,features do + local tv=tables[data] + if tv then + features[script]=tv + end + end + end + end + local order=feature.order + if order then + local tv=tables[order] + if tv then + feature.order=tv + end + end + local markclass=feature.markclass + if markclass then + local tv=tables[markclass] + if tv then + feature.markclass=tv + end + end + end + end + local lookups=resources.lookups + if lookups then + for _,lookup in next,lookups do + local flags=lookup.flags + if flags then + local tv=tables[flags] + if tv then + lookup.flags=tv + end + end + local subtables=lookup.subtables + if subtables then + local tv=tables[subtables] + if tv then + lookup.subtables=tv + end + end + end + end + local features=resources.features + if features then + for _,what in next,glists do + local feature=features[what] + if feature then + for tag,spec in next,feature do + local tv=tables[spec] + if tv then + feature[tag]=tv + end + end + end + end + end + data.tables=nil + end + end +end +if otf.enhancers.register then + otf.enhancers.register("pack",packdata) + otf.enhancers.register("unpack",unpackdata) +end +otf.enhancers.unpack=unpackdata +otf.enhancers.pack=packdata + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['luatex-fonts-lua']={ + version=1.001, + comment="companion to luatex-*.tex", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end +local fonts=fonts +fonts.formats.lua="lua" +function fonts.readers.lua(specification) + local fullname=specification.filename or "" + if fullname=="" then + local forced=specification.forced or "" + if forced~="" then + fullname=specification.name.."."..forced + else + fullname=specification.name + end + end + local fullname=resolvers.findfile(fullname) or "" + if fullname~="" then + local loader=loadfile(fullname) + loader=loader and loader() + return loader and loader(specification) + end +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-def']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local format,gmatch,match,find,lower,gsub=string.format,string.gmatch,string.match,string.find,string.lower,string.gsub +local tostring,next=tostring,next +local lpegmatch=lpeg.match +local suffixonly,removesuffix=file.suffix,file.removesuffix +local allocate=utilities.storage.allocate +local trace_defining=false trackers .register("fonts.defining",function(v) trace_defining=v end) +local directive_embedall=false directives.register("fonts.embedall",function(v) directive_embedall=v end) +trackers.register("fonts.loading","fonts.defining","otf.loading","afm.loading","tfm.loading") +trackers.register("fonts.all","fonts.*","otf.*","afm.*","tfm.*") +local report_defining=logs.reporter("fonts","defining") +local fonts=fonts +local fontdata=fonts.hashes.identifiers +local readers=fonts.readers +local definers=fonts.definers +local specifiers=fonts.specifiers +local constructors=fonts.constructors +local fontgoodies=fonts.goodies +readers.sequence=allocate { 'otf','ttf','afm','tfm','lua' } +local variants=allocate() +specifiers.variants=variants +definers.methods=definers.methods or {} +local internalized=allocate() +local lastdefined=nil +local loadedfonts=constructors.loadedfonts +local designsizes=constructors.designsizes +local resolvefile=fontgoodies and fontgoodies.filenames and fontgoodies.filenames.resolve or function(s) return s end +local splitter,splitspecifiers=nil,"" +local P,C,S,Cc=lpeg.P,lpeg.C,lpeg.S,lpeg.Cc +local left=P("(") +local right=P(")") +local colon=P(":") +local space=P(" ") +definers.defaultlookup="file" +local prefixpattern=P(false) +local function addspecifier(symbol) + splitspecifiers=splitspecifiers..symbol + local method=S(splitspecifiers) + local lookup=C(prefixpattern)*colon + local sub=left*C(P(1-left-right-method)^1)*right + local specification=C(method)*C(P(1)^1) + local name=C((1-sub-specification)^1) + splitter=P((lookup+Cc(""))*name*(sub+Cc(""))*(specification+Cc(""))) +end +local function addlookup(str,default) + prefixpattern=prefixpattern+P(str) +end +definers.addlookup=addlookup +addlookup("file") +addlookup("name") +addlookup("spec") +local function getspecification(str) + return lpegmatch(splitter,str or "") +end +definers.getspecification=getspecification +function definers.registersplit(symbol,action,verbosename) + addspecifier(symbol) + variants[symbol]=action + if verbosename then + variants[verbosename]=action + end +end +local function makespecification(specification,lookup,name,sub,method,detail,size) + size=size or 655360 + if not lookup or lookup=="" then + lookup=definers.defaultlookup + end + if trace_defining then + report_defining("specification %a, lookup %a, name %a, sub %a, method %a, detail %a", + specification,lookup,name,sub,method,detail) + end + local t={ + lookup=lookup, + specification=specification, + size=size, + name=name, + sub=sub, + method=method, + detail=detail, + resolved="", + forced="", + features={}, + } + return t +end +definers.makespecification=makespecification +function definers.analyze(specification,size) + local lookup,name,sub,method,detail=getspecification(specification or "") + return makespecification(specification,lookup,name,sub,method,detail,size) +end +definers.resolvers=definers.resolvers or {} +local resolvers=definers.resolvers +function resolvers.file(specification) + local name=resolvefile(specification.name) + local suffix=lower(suffixonly(name)) + if fonts.formats[suffix] then + specification.forced=suffix + specification.forcedname=name + specification.name=removesuffix(name) + else + specification.name=name + end +end +function resolvers.name(specification) + local resolve=fonts.names.resolve + if resolve then + local resolved,sub=resolve(specification.name,specification.sub,specification) + if resolved then + specification.resolved=resolved + specification.sub=sub + local suffix=lower(suffixonly(resolved)) + if fonts.formats[suffix] then + specification.forced=suffix + specification.forcedname=resolved + specification.name=removesuffix(resolved) + else + specification.name=resolved + end + end + else + resolvers.file(specification) + end +end +function resolvers.spec(specification) + local resolvespec=fonts.names.resolvespec + if resolvespec then + local resolved,sub=resolvespec(specification.name,specification.sub,specification) + if resolved then + specification.resolved=resolved + specification.sub=sub + specification.forced=lower(suffixonly(resolved)) + specification.forcedname=resolved + specification.name=removesuffix(resolved) + end + else + resolvers.name(specification) + end +end +function definers.resolve(specification) + if not specification.resolved or specification.resolved=="" then + local r=resolvers[specification.lookup] + if r then + r(specification) + end + end + if specification.forced=="" then + specification.forced=nil + specification.forcedname=nil + end + specification.hash=lower(specification.name..' @ '..constructors.hashfeatures(specification)) + if specification.sub and specification.sub~="" then + specification.hash=specification.sub..' @ '..specification.hash + end + return specification +end +function definers.applypostprocessors(tfmdata) + local postprocessors=tfmdata.postprocessors + if postprocessors then + local properties=tfmdata.properties + for i=1,#postprocessors do + local extrahash=postprocessors[i](tfmdata) + if type(extrahash)=="string" and extrahash~="" then + extrahash=gsub(lower(extrahash),"[^a-z]","-") + properties.fullname=format("%s-%s",properties.fullname,extrahash) + end + end + end + return tfmdata +end +local function checkembedding(tfmdata) + local properties=tfmdata.properties + local embedding + if directive_embedall then + embedding="full" + elseif properties and properties.filename and constructors.dontembed[properties.filename] then + embedding="no" + else + embedding="subset" + end + if properties then + properties.embedding=embedding + else + tfmdata.properties={ embedding=embedding } + end + tfmdata.embedding=embedding +end +function definers.loadfont(specification) + local hash=constructors.hashinstance(specification) + local tfmdata=loadedfonts[hash] + if not tfmdata then + local forced=specification.forced or "" + if forced~="" then + local reader=readers[lower(forced)] + tfmdata=reader and reader(specification) + if not tfmdata then + report_defining("forced type %a of %a not found",forced,specification.name) + end + else + local sequence=readers.sequence + for s=1,#sequence do + local reader=sequence[s] + if readers[reader] then + if trace_defining then + report_defining("trying (reader sequence driven) type %a for %a with file %a",reader,specification.name,specification.filename) + end + tfmdata=readers[reader](specification) + if tfmdata then + break + else + specification.filename=nil + end + end + end + end + if tfmdata then + tfmdata=definers.applypostprocessors(tfmdata) + checkembedding(tfmdata) + loadedfonts[hash]=tfmdata + designsizes[specification.hash]=tfmdata.parameters.designsize + end + end + if not tfmdata then + report_defining("font with asked name %a is not found using lookup %a",specification.name,specification.lookup) + end + return tfmdata +end +function constructors.checkvirtualids() +end +function constructors.readanddefine(name,size) + local specification=definers.analyze(name,size) + local method=specification.method + if method and variants[method] then + specification=variants[method](specification) + end + specification=definers.resolve(specification) + local hash=constructors.hashinstance(specification) + local id=definers.registered(hash) + if not id then + local tfmdata=definers.loadfont(specification) + if tfmdata then + tfmdata.properties.hash=hash + constructors.checkvirtualids(tfmdata) + id=font.define(tfmdata) + definers.register(tfmdata,id) + else + id=0 + end + end + return fontdata[id],id +end +function definers.current() + return lastdefined +end +function definers.registered(hash) + local id=internalized[hash] + return id,id and fontdata[id] +end +function definers.register(tfmdata,id) + if tfmdata and id then + local hash=tfmdata.properties.hash + if not hash then + report_defining("registering font, id %a, name %a, invalid hash",id,tfmdata.properties.filename or "?") + elseif not internalized[hash] then + internalized[hash]=id + if trace_defining then + report_defining("registering font, id %s, hash %a",id,hash) + end + fontdata[id]=tfmdata + end + end +end +function definers.read(specification,size,id) + statistics.starttiming(fonts) + if type(specification)=="string" then + specification=definers.analyze(specification,size) + end + local method=specification.method + if method and variants[method] then + specification=variants[method](specification) + end + specification=definers.resolve(specification) + local hash=constructors.hashinstance(specification) + local tfmdata=definers.registered(hash) + if tfmdata then + if trace_defining then + report_defining("already hashed: %s",hash) + end + else + tfmdata=definers.loadfont(specification) + if tfmdata then + if trace_defining then + report_defining("loaded and hashed: %s",hash) + end + tfmdata.properties.hash=hash + if id then + definers.register(tfmdata,id) + end + else + if trace_defining then + report_defining("not loaded and hashed: %s",hash) + end + end + end + lastdefined=tfmdata or id + if not tfmdata then + report_defining("unknown font %a, loading aborted",specification.name) + elseif trace_defining and type(tfmdata)=="table" then + local properties=tfmdata.properties or {} + local parameters=tfmdata.parameters or {} + report_defining("using %a font with id %a, name %a, size %a, bytes %a, encoding %a, fullname %a, filename %a", + properties.format or "unknown",id,properties.name,parameters.size,properties.encodingbytes, + properties.encodingname,properties.fullname,file.basename(properties.filename)) + end + statistics.stoptiming(fonts) + return tfmdata +end +function font.getfont(id) + return fontdata[id] +end +callbacks.register('define_font',definers.read,"definition of fonts (tfmdata preparation)") + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['luatex-font-def']={ + version=1.001, + comment="companion to luatex-*.tex", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end +local fonts=fonts +fonts.constructors.namemode="specification" +function fonts.definers.getspecification(str) + return "",str,"",":",str +end +local list={} +local function issome () list.lookup='name' end +local function isfile () list.lookup='file' end +local function isname () list.lookup='name' end +local function thename(s) list.name=s end +local function issub (v) list.sub=v end +local function iscrap (s) list.crap=string.lower(s) end +local function iskey (k,v) list[k]=v end +local function istrue (s) list[s]=true end +local function isfalse(s) list[s]=false end +local P,S,R,C=lpeg.P,lpeg.S,lpeg.R,lpeg.C +local spaces=P(" ")^0 +local namespec=(1-S("/:("))^0 +local crapspec=spaces*P("/")*(((1-P(":"))^0)/iscrap)*spaces +local filename_1=P("file:")/isfile*(namespec/thename) +local filename_2=P("[")*P(true)/isname*(((1-P("]"))^0)/thename)*P("]") +local fontname_1=P("name:")/isname*(namespec/thename) +local fontname_2=P(true)/issome*(namespec/thename) +local sometext=(R("az","AZ","09")+S("+-."))^1 +local truevalue=P("+")*spaces*(sometext/istrue) +local falsevalue=P("-")*spaces*(sometext/isfalse) +local keyvalue=(C(sometext)*spaces*P("=")*spaces*C(sometext))/iskey +local somevalue=sometext/istrue +local subvalue=P("(")*(C(P(1-S("()"))^1)/issub)*P(")") +local option=spaces*(keyvalue+falsevalue+truevalue+somevalue)*spaces +local options=P(":")*spaces*(P(";")^0*option)^0 +local pattern=(filename_1+filename_2+fontname_1+fontname_2)*subvalue^0*crapspec^0*options^0 +local function colonized(specification) + list={} + lpeg.match(pattern,specification.specification) + list.crap=nil + if list.name then + specification.name=list.name + list.name=nil + end + if list.lookup then + specification.lookup=list.lookup + list.lookup=nil + end + if list.sub then + specification.sub=list.sub + list.sub=nil + end + specification.features.normal=fonts.handlers.otf.features.normalize(list) + return specification +end +fonts.definers.registersplit(":",colonized,"cryptic") +fonts.definers.registersplit("",colonized,"more cryptic") +function fonts.definers.applypostprocessors(tfmdata) + local postprocessors=tfmdata.postprocessors + if postprocessors then + for i=1,#postprocessors do + local extrahash=postprocessors[i](tfmdata) + if type(extrahash)=="string" and extrahash~="" then + extrahash=string.gsub(lower(extrahash),"[^a-z]","-") + tfmdata.properties.fullname=format("%s-%s",tfmdata.properties.fullname,extrahash) + end + end + end + return tfmdata +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['luatex-fonts-ext']={ + version=1.001, + comment="companion to luatex-*.tex", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end +local fonts=fonts +local otffeatures=fonts.constructors.newfeatures("otf") +local function initializeitlc(tfmdata,value) + if value then + local parameters=tfmdata.parameters + local italicangle=parameters.italicangle + if italicangle and italicangle~=0 then + local properties=tfmdata.properties + local factor=tonumber(value) or 1 + properties.hasitalics=true + properties.autoitalicamount=factor*(parameters.uwidth or 40)/2 + end + end +end +otffeatures.register { + name="itlc", + description="italic correction", + initializers={ + base=initializeitlc, + node=initializeitlc, + } +} +local function initializeslant(tfmdata,value) + value=tonumber(value) + if not value then + value=0 + elseif value>1 then + value=1 + elseif value<-1 then + value=-1 + end + tfmdata.parameters.slantfactor=value +end +otffeatures.register { + name="slant", + description="slant glyphs", + initializers={ + base=initializeslant, + node=initializeslant, + } +} +local function initializeextend(tfmdata,value) + value=tonumber(value) + if not value then + value=0 + elseif value>10 then + value=10 + elseif value<-10 then + value=-10 + end + tfmdata.parameters.extendfactor=value +end +otffeatures.register { + name="extend", + description="scale glyphs horizontally", + initializers={ + base=initializeextend, + node=initializeextend, + } +} +fonts.protrusions=fonts.protrusions or {} +fonts.protrusions.setups=fonts.protrusions.setups or {} +local setups=fonts.protrusions.setups +local function initializeprotrusion(tfmdata,value) + if value then + local setup=setups[value] + if setup then + local factor,left,right=setup.factor or 1,setup.left or 1,setup.right or 1 + local emwidth=tfmdata.parameters.quad + tfmdata.parameters.protrusion={ + auto=true, + } + for i,chr in next,tfmdata.characters do + local v,pl,pr=setup[i],nil,nil + if v then + pl,pr=v[1],v[2] + end + if pl and pl~=0 then chr.left_protruding=left*pl*factor end + if pr and pr~=0 then chr.right_protruding=right*pr*factor end + end + end + end +end +otffeatures.register { + name="protrusion", + description="shift characters into the left and or right margin", + initializers={ + base=initializeprotrusion, + node=initializeprotrusion, + } +} +fonts.expansions=fonts.expansions or {} +fonts.expansions.setups=fonts.expansions.setups or {} +local setups=fonts.expansions.setups +local function initializeexpansion(tfmdata,value) + if value then + local setup=setups[value] + if setup then + local factor=setup.factor or 1 + tfmdata.parameters.expansion={ + stretch=10*(setup.stretch or 0), + shrink=10*(setup.shrink or 0), + step=10*(setup.step or 0), + auto=true, + } + for i,chr in next,tfmdata.characters do + local v=setup[i] + if v and v~=0 then + chr.expansion_factor=v*factor + else + chr.expansion_factor=factor + end + end + end + end +end +otffeatures.register { + name="expansion", + description="apply hz optimization", + initializers={ + base=initializeexpansion, + node=initializeexpansion, + } +} +function fonts.loggers.onetimemessage() end +local byte=string.byte +fonts.expansions.setups['default']={ + stretch=2,shrink=2,step=.5,factor=1, + [byte('A')]=0.5,[byte('B')]=0.7,[byte('C')]=0.7,[byte('D')]=0.5,[byte('E')]=0.7, + [byte('F')]=0.7,[byte('G')]=0.5,[byte('H')]=0.7,[byte('K')]=0.7,[byte('M')]=0.7, + [byte('N')]=0.7,[byte('O')]=0.5,[byte('P')]=0.7,[byte('Q')]=0.5,[byte('R')]=0.7, + [byte('S')]=0.7,[byte('U')]=0.7,[byte('W')]=0.7,[byte('Z')]=0.7, + [byte('a')]=0.7,[byte('b')]=0.7,[byte('c')]=0.7,[byte('d')]=0.7,[byte('e')]=0.7, + [byte('g')]=0.7,[byte('h')]=0.7,[byte('k')]=0.7,[byte('m')]=0.7,[byte('n')]=0.7, + [byte('o')]=0.7,[byte('p')]=0.7,[byte('q')]=0.7,[byte('s')]=0.7,[byte('u')]=0.7, + [byte('w')]=0.7,[byte('z')]=0.7, + [byte('2')]=0.7,[byte('3')]=0.7,[byte('6')]=0.7,[byte('8')]=0.7,[byte('9')]=0.7, +} +fonts.protrusions.setups['default']={ + factor=1,left=1,right=1, + [0x002C]={ 0,1 }, + [0x002E]={ 0,1 }, + [0x003A]={ 0,1 }, + [0x003B]={ 0,1 }, + [0x002D]={ 0,1 }, + [0x2013]={ 0,0.50 }, + [0x2014]={ 0,0.33 }, + [0x3001]={ 0,1 }, + [0x3002]={ 0,1 }, + [0x060C]={ 0,1 }, + [0x061B]={ 0,1 }, + [0x06D4]={ 0,1 }, +} +fonts.handlers.otf.features.normalize=function(t) + if t.rand then + t.rand="random" + end + return t +end +function fonts.helpers.nametoslot(name) + local t=type(name) + if t=="string" then + local tfmdata=fonts.hashes.identifiers[currentfont()] + local shared=tfmdata and tfmdata.shared + local fntdata=shared and shared.rawdata + return fntdata and fntdata.resources.unicodes[name] + elseif t=="number" then + return n + end +end +fonts.encodings=fonts.encodings or {} +local reencodings={} +fonts.encodings.reencodings=reencodings +local function specialreencode(tfmdata,value) + local encoding=value and reencodings[value] + if encoding then + local temp={} + local char=tfmdata.characters + for k,v in next,encoding do + temp[k]=char[v] + end + for k,v in next,temp do + char[k]=temp[k] + end + return string.format("reencoded:%s",value) + end +end +local function reencode(tfmdata,value) + tfmdata.postprocessors=tfmdata.postprocessors or {} + table.insert(tfmdata.postprocessors, + function(tfmdata) + return specialreencode(tfmdata,value) + end + ) +end +otffeatures.register { + name="reencode", + description="reencode characters", + manipulators={ + base=reencode, + node=reencode, + } +} + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['luatex-fonts-cbk']={ + version=1.001, + comment="companion to luatex-*.tex", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end +local fonts=fonts +local nodes=nodes +local traverse_id=node.traverse_id +local glyph_code=nodes.nodecodes.glyph +local ligaturing=node.ligaturing +local kerning=node.kerning +function node.ligaturing() texio.write_nl("warning: node.ligaturing is already applied") end +function node.kerning () texio.write_nl("warning: node.kerning is already applied") end +function nodes.handlers.characters(head) + local fontdata=fonts.hashes.identifiers + if fontdata then + local usedfonts,basefonts,prevfont,basefont={},{},nil,nil + for n in traverse_id(glyph_code,head) do + local font=n.font + if font~=prevfont then + if basefont then + basefont[2]=n.prev + end + prevfont=font + local used=usedfonts[font] + if not used then + local tfmdata=fontdata[font] + if tfmdata then + local shared=tfmdata.shared + if shared then + local processors=shared.processes + if processors and #processors>0 then + usedfonts[font]=processors + else + basefont={ n,nil } + basefonts[#basefonts+1]=basefont + end + end + end + end + end + end + if next(usedfonts) then + for font,processors in next,usedfonts do + for i=1,#processors do + head=processors[i](head,font,0) or head + end + end + end + if #basefonts>0 then + for i=1,#basefonts do + local range=basefonts[i] + local start,stop=range[1],range[2] + if stop then + ligaturing(start,stop) + kerning(start,stop) + else + ligaturing(start) + kerning(start) + end + end + end + return head,true + else + return head,false + end +end +function nodes.simple_font_handler(head) + head=nodes.handlers.characters(head) + nodes.injections.handler(head) + nodes.handlers.protectglyphs(head) + return head +end + +end -- closure diff --git a/src/fontloader/luaotfload-fonts-cbk.lua b/src/fontloader/luaotfload-fonts-cbk.lua new file mode 100644 index 0000000..9db94f6 --- /dev/null +++ b/src/fontloader/luaotfload-fonts-cbk.lua @@ -0,0 +1,68 @@ +if not modules then modules = { } end modules ['luatex-fonts-cbk'] = { + version = 1.001, + comment = "companion to luatex-*.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end + +local fonts = fonts +local nodes = nodes + +-- Fonts: (might move to node-gef.lua) + +local traverse_id = node.traverse_id +local glyph_code = nodes.nodecodes.glyph + +function nodes.handlers.characters(head) + local fontdata = fonts.hashes.identifiers + if fontdata then + local usedfonts, done, prevfont = { }, false, nil + for n in traverse_id(glyph_code,head) do + local font = n.font + if font ~= prevfont then + prevfont = font + local used = usedfonts[font] + if not used then + local tfmdata = fontdata[font] -- + if tfmdata then + local shared = tfmdata.shared -- we need to check shared, only when same features + if shared then + local processors = shared.processes + if processors and #processors > 0 then + usedfonts[font] = processors + done = true + end + end + end + end + end + end + if done then + for font, processors in next, usedfonts do + for i=1,#processors do + local h, d = processors[i](head,font,0) + head, done = h or head, done or d + end + end + end + return head, true + else + return head, false + end +end + +function nodes.simple_font_handler(head) +-- lang.hyphenate(head) + head = nodes.handlers.characters(head) + nodes.injections.handler(head) + nodes.handlers.protectglyphs(head) + head = node.ligaturing(head) + head = node.kerning(head) + return head +end diff --git a/src/fontloader/luaotfload-fonts-def.lua b/src/fontloader/luaotfload-fonts-def.lua new file mode 100644 index 0000000..0c2f0db --- /dev/null +++ b/src/fontloader/luaotfload-fonts-def.lua @@ -0,0 +1,97 @@ +if not modules then modules = { } end modules ['luatex-font-def'] = { + version = 1.001, + comment = "companion to luatex-*.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end + +local fonts = fonts + +-- A bit of tuning for definitions. + +fonts.constructors.namemode = "specification" -- somehow latex needs this (changed name!) => will change into an overload + +-- tricky: we sort of bypass the parser and directly feed all into +-- the sub parser + +function fonts.definers.getspecification(str) + return "", str, "", ":", str +end + +-- the generic name parser (different from context!) + +local list = { } + +local function issome () list.lookup = 'name' end -- xetex mode prefers name (not in context!) +local function isfile () list.lookup = 'file' end +local function isname () list.lookup = 'name' end +local function thename(s) list.name = s end +local function issub (v) list.sub = v end +local function iscrap (s) list.crap = string.lower(s) end +local function iskey (k,v) list[k] = v end +local function istrue (s) list[s] = true end +local function isfalse(s) list[s] = false end + +local P, S, R, C = lpeg.P, lpeg.S, lpeg.R, lpeg.C + +local spaces = P(" ")^0 +local namespec = (1-S("/:("))^0 -- was: (1-S("/: ("))^0 +local crapspec = spaces * P("/") * (((1-P(":"))^0)/iscrap) * spaces +local filename_1 = P("file:")/isfile * (namespec/thename) +local filename_2 = P("[") * P(true)/isname * (((1-P("]"))^0)/thename) * P("]") +local fontname_1 = P("name:")/isname * (namespec/thename) +local fontname_2 = P(true)/issome * (namespec/thename) +local sometext = (R("az","AZ","09") + S("+-."))^1 +local truevalue = P("+") * spaces * (sometext/istrue) +local falsevalue = P("-") * spaces * (sometext/isfalse) +local keyvalue = (C(sometext) * spaces * P("=") * spaces * C(sometext))/iskey +local somevalue = sometext/istrue +local subvalue = P("(") * (C(P(1-S("()"))^1)/issub) * P(")") -- for Kim +local option = spaces * (keyvalue + falsevalue + truevalue + somevalue) * spaces +local options = P(":") * spaces * (P(";")^0 * option)^0 + +local pattern = (filename_1 + filename_2 + fontname_1 + fontname_2) * subvalue^0 * crapspec^0 * options^0 + +local function colonized(specification) -- xetex mode + list = { } + lpeg.match(pattern,specification.specification) + list.crap = nil -- style not supported, maybe some day + if list.name then + specification.name = list.name + list.name = nil + end + if list.lookup then + specification.lookup = list.lookup + list.lookup = nil + end + if list.sub then + specification.sub = list.sub + list.sub = nil + end + specification.features.normal = fonts.handlers.otf.features.normalize(list) + return specification +end + +fonts.definers.registersplit(":",colonized,"cryptic") +fonts.definers.registersplit("", colonized,"more cryptic") -- catches \font\text=[names] + +function fonts.definers.applypostprocessors(tfmdata) + local postprocessors = tfmdata.postprocessors + if postprocessors then + for i=1,#postprocessors do + local extrahash = postprocessors[i](tfmdata) -- after scaling etc + if type(extrahash) == "string" and extrahash ~= "" then + -- e.g. a reencoding needs this + extrahash = string.gsub(lower(extrahash),"[^a-z]","-") + tfmdata.properties.fullname = format("%s-%s",tfmdata.properties.fullname,extrahash) + end + end + end + return tfmdata +end diff --git a/src/fontloader/luaotfload-fonts-enc.lua b/src/fontloader/luaotfload-fonts-enc.lua new file mode 100644 index 0000000..e20c3a0 --- /dev/null +++ b/src/fontloader/luaotfload-fonts-enc.lua @@ -0,0 +1,28 @@ +if not modules then modules = { } end modules ['luatex-font-enc'] = { + version = 1.001, + comment = "companion to luatex-*.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end + +local fonts = fonts +fonts.encodings = { } +fonts.encodings.agl = { } + +setmetatable(fonts.encodings.agl, { __index = function(t,k) + if k == "unicodes" then + texio.write(" ") + local unicodes = dofile(resolvers.findfile("font-age.lua")) + fonts.encodings.agl = { unicodes = unicodes } + return unicodes + else + return nil + end +end }) + diff --git a/src/fontloader/luaotfload-fonts-ext.lua b/src/fontloader/luaotfload-fonts-ext.lua new file mode 100644 index 0000000..b60d045 --- /dev/null +++ b/src/fontloader/luaotfload-fonts-ext.lua @@ -0,0 +1,272 @@ +if not modules then modules = { } end modules ['luatex-fonts-ext'] = { + version = 1.001, + comment = "companion to luatex-*.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end + +local fonts = fonts +local otffeatures = fonts.constructors.newfeatures("otf") + +-- A few generic extensions. + +local function initializeitlc(tfmdata,value) + if value then + -- the magic 40 and it formula come from Dohyun Kim but we might need another guess + local parameters = tfmdata.parameters + local italicangle = parameters.italicangle + if italicangle and italicangle ~= 0 then + local properties = tfmdata.properties + local factor = tonumber(value) or 1 + properties.hasitalics = true + properties.autoitalicamount = factor * (parameters.uwidth or 40)/2 + end + end +end + +otffeatures.register { + name = "itlc", + description = "italic correction", + initializers = { + base = initializeitlc, + node = initializeitlc, + } +} + +-- slant and extend + +local function initializeslant(tfmdata,value) + value = tonumber(value) + if not value then + value = 0 + elseif value > 1 then + value = 1 + elseif value < -1 then + value = -1 + end + tfmdata.parameters.slantfactor = value +end + +otffeatures.register { + name = "slant", + description = "slant glyphs", + initializers = { + base = initializeslant, + node = initializeslant, + } +} + +local function initializeextend(tfmdata,value) + value = tonumber(value) + if not value then + value = 0 + elseif value > 10 then + value = 10 + elseif value < -10 then + value = -10 + end + tfmdata.parameters.extendfactor = value +end + +otffeatures.register { + name = "extend", + description = "scale glyphs horizontally", + initializers = { + base = initializeextend, + node = initializeextend, + } +} + +-- expansion and protrusion + +fonts.protrusions = fonts.protrusions or { } +fonts.protrusions.setups = fonts.protrusions.setups or { } + +local setups = fonts.protrusions.setups + +local function initializeprotrusion(tfmdata,value) + if value then + local setup = setups[value] + if setup then + local factor, left, right = setup.factor or 1, setup.left or 1, setup.right or 1 + local emwidth = tfmdata.parameters.quad + tfmdata.parameters.protrusion = { + auto = true, + } + for i, chr in next, tfmdata.characters do + local v, pl, pr = setup[i], nil, nil + if v then + pl, pr = v[1], v[2] + end + if pl and pl ~= 0 then chr.left_protruding = left *pl*factor end + if pr and pr ~= 0 then chr.right_protruding = right*pr*factor end + end + end + end +end + +otffeatures.register { + name = "protrusion", + description = "shift characters into the left and or right margin", + initializers = { + base = initializeprotrusion, + node = initializeprotrusion, + } +} + +fonts.expansions = fonts.expansions or { } +fonts.expansions.setups = fonts.expansions.setups or { } + +local setups = fonts.expansions.setups + +local function initializeexpansion(tfmdata,value) + if value then + local setup = setups[value] + if setup then + local factor = setup.factor or 1 + tfmdata.parameters.expansion = { + stretch = 10 * (setup.stretch or 0), + shrink = 10 * (setup.shrink or 0), + step = 10 * (setup.step or 0), + auto = true, + } + for i, chr in next, tfmdata.characters do + local v = setup[i] + if v and v ~= 0 then + chr.expansion_factor = v*factor + else -- can be option + chr.expansion_factor = factor + end + end + end + end +end + +otffeatures.register { + name = "expansion", + description = "apply hz optimization", + initializers = { + base = initializeexpansion, + node = initializeexpansion, + } +} + +-- left over + +function fonts.loggers.onetimemessage() end + +-- example vectors + +local byte = string.byte + +fonts.expansions.setups['default'] = { + + stretch = 2, shrink = 2, step = .5, factor = 1, + + [byte('A')] = 0.5, [byte('B')] = 0.7, [byte('C')] = 0.7, [byte('D')] = 0.5, [byte('E')] = 0.7, + [byte('F')] = 0.7, [byte('G')] = 0.5, [byte('H')] = 0.7, [byte('K')] = 0.7, [byte('M')] = 0.7, + [byte('N')] = 0.7, [byte('O')] = 0.5, [byte('P')] = 0.7, [byte('Q')] = 0.5, [byte('R')] = 0.7, + [byte('S')] = 0.7, [byte('U')] = 0.7, [byte('W')] = 0.7, [byte('Z')] = 0.7, + [byte('a')] = 0.7, [byte('b')] = 0.7, [byte('c')] = 0.7, [byte('d')] = 0.7, [byte('e')] = 0.7, + [byte('g')] = 0.7, [byte('h')] = 0.7, [byte('k')] = 0.7, [byte('m')] = 0.7, [byte('n')] = 0.7, + [byte('o')] = 0.7, [byte('p')] = 0.7, [byte('q')] = 0.7, [byte('s')] = 0.7, [byte('u')] = 0.7, + [byte('w')] = 0.7, [byte('z')] = 0.7, + [byte('2')] = 0.7, [byte('3')] = 0.7, [byte('6')] = 0.7, [byte('8')] = 0.7, [byte('9')] = 0.7, +} + +fonts.protrusions.setups['default'] = { + + factor = 1, left = 1, right = 1, + + [0x002C] = { 0, 1 }, -- comma + [0x002E] = { 0, 1 }, -- period + [0x003A] = { 0, 1 }, -- colon + [0x003B] = { 0, 1 }, -- semicolon + [0x002D] = { 0, 1 }, -- hyphen + [0x2013] = { 0, 0.50 }, -- endash + [0x2014] = { 0, 0.33 }, -- emdash + [0x3001] = { 0, 1 }, -- ideographic comma 、 + [0x3002] = { 0, 1 }, -- ideographic full stop 。 + [0x060C] = { 0, 1 }, -- arabic comma ، + [0x061B] = { 0, 1 }, -- arabic semicolon ؛ + [0x06D4] = { 0, 1 }, -- arabic full stop ۔ + +} + +-- normalizer + +fonts.handlers.otf.features.normalize = function(t) + if t.rand then + t.rand = "random" + end + return t +end + +-- bonus + +function fonts.helpers.nametoslot(name) + local t = type(name) + if t == "string" then + local tfmdata = fonts.hashes.identifiers[currentfont()] + local shared = tfmdata and tfmdata.shared + local fntdata = shared and shared.rawdata + return fntdata and fntdata.resources.unicodes[name] + elseif t == "number" then + return n + end +end + +-- \font\test=file:somefont:reencode=mymessup +-- +-- fonts.encodings.reencodings.mymessup = { +-- [109] = 110, -- m +-- [110] = 109, -- n +-- } + +fonts.encodings = fonts.encodings or { } +local reencodings = { } +fonts.encodings.reencodings = reencodings + +local function specialreencode(tfmdata,value) + -- we forget about kerns as we assume symbols and we + -- could issue a message if ther are kerns but it's + -- a hack anyway so we odn't care too much here + local encoding = value and reencodings[value] + if encoding then + local temp = { } + local char = tfmdata.characters + for k, v in next, encoding do + temp[k] = char[v] + end + for k, v in next, temp do + char[k] = temp[k] + end + -- if we use the font otherwise luatex gets confused so + -- we return an additional hash component for fullname + return string.format("reencoded:%s",value) + end +end + +local function reencode(tfmdata,value) + tfmdata.postprocessors = tfmdata.postprocessors or { } + table.insert(tfmdata.postprocessors, + function(tfmdata) + return specialreencode(tfmdata,value) + end + ) +end + +otffeatures.register { + name = "reencode", + description = "reencode characters", + manipulators = { + base = reencode, + node = reencode, + } +} diff --git a/src/fontloader/luaotfload-fonts-inj.lua b/src/fontloader/luaotfload-fonts-inj.lua new file mode 100644 index 0000000..ae48150 --- /dev/null +++ b/src/fontloader/luaotfload-fonts-inj.lua @@ -0,0 +1,526 @@ +if not modules then modules = { } end modules ['node-inj'] = { + version = 1.001, + comment = "companion to node-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files", +} + +-- This is very experimental (this will change when we have luatex > .50 and +-- a few pending thingies are available. Also, Idris needs to make a few more +-- test fonts. Btw, future versions of luatex will have extended glyph properties +-- that can be of help. Some optimizations can go away when we have faster machines. + +-- todo: make a special one for context + +local next = next +local utfchar = utf.char + +local trace_injections = false trackers.register("nodes.injections", function(v) trace_injections = v end) + +local report_injections = logs.reporter("nodes","injections") + +local attributes, nodes, node = attributes, nodes, node + +fonts = fonts +local fontdata = fonts.hashes.identifiers + +nodes.injections = nodes.injections or { } +local injections = nodes.injections + +local nodecodes = nodes.nodecodes +local glyph_code = nodecodes.glyph +local kern_code = nodecodes.kern +local nodepool = nodes.pool +local newkern = nodepool.kern + +local traverse_id = node.traverse_id +local insert_node_before = node.insert_before +local insert_node_after = node.insert_after + +local a_kernpair = attributes.private('kernpair') +local a_ligacomp = attributes.private('ligacomp') +local a_markbase = attributes.private('markbase') +local a_markmark = attributes.private('markmark') +local a_markdone = attributes.private('markdone') +local a_cursbase = attributes.private('cursbase') +local a_curscurs = attributes.private('curscurs') +local a_cursdone = attributes.private('cursdone') + +-- This injector has been tested by Idris Samawi Hamid (several arabic fonts as well as +-- the rather demanding Husayni font), Khaled Hosny (latin and arabic) and Kaj Eigner +-- (arabic, hebrew and thai) and myself (whatever font I come across). I'm pretty sure +-- that this code is not 100% okay but examples are needed to figure things out. + +function injections.installnewkern(nk) + newkern = nk or newkern +end + +local cursives = { } +local marks = { } +local kerns = { } + +-- Currently we do gpos/kern in a bit inofficial way but when we have the extra fields in +-- glyphnodes to manipulate ht/dp/wd explicitly I will provide an alternative; also, we +-- can share tables. + +-- For the moment we pass the r2l key ... volt/arabtype tests .. idris: this needs +-- checking with husayni (volt and fontforge). + +function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext) + local dx, dy = factor*(exit[1]-entry[1]), factor*(exit[2]-entry[2]) + local ws, wn = tfmstart.width, tfmnext.width + local bound = #cursives + 1 + start[a_cursbase] = bound + nxt[a_curscurs] = bound + cursives[bound] = { rlmode, dx, dy, ws, wn } + return dx, dy, bound +end + +function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr) + local x, y, w, h = factor*spec[1], factor*spec[2], factor*spec[3], factor*spec[4] + -- dy = y - h + if x ~= 0 or w ~= 0 or y ~= 0 or h ~= 0 then + local bound = current[a_kernpair] + if bound then + local kb = kerns[bound] + -- inefficient but singles have less, but weird anyway, needs checking + kb[2], kb[3], kb[4], kb[5] = (kb[2] or 0) + x, (kb[3] or 0) + y, (kb[4] or 0)+ w, (kb[5] or 0) + h + else + bound = #kerns + 1 + current[a_kernpair] = bound + kerns[bound] = { rlmode, x, y, w, h, r2lflag, tfmchr.width } + end + return x, y, w, h, bound + end + return x, y, w, h -- no bound +end + +function injections.setkern(current,factor,rlmode,x,tfmchr) + local dx = factor*x + if dx ~= 0 then + local bound = #kerns + 1 + current[a_kernpair] = bound + kerns[bound] = { rlmode, dx } + return dx, bound + else + return 0, 0 + end +end + +function injections.setmark(start,base,factor,rlmode,ba,ma,index,baseismark) -- ba=baseanchor, ma=markanchor + local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2]) -- the index argument is no longer used but when this + local bound = base[a_markbase] -- fails again we should pass it + local index = 1 + if bound then + local mb = marks[bound] + if mb then + -- if not index then index = #mb + 1 end + index = #mb + 1 + mb[index] = { dx, dy, rlmode } + start[a_markmark] = bound + start[a_markdone] = index + return dx, dy, bound + else + report_injections("possible problem, %U is base mark without data (id %a)",base.char,bound) + end + end +-- index = index or 1 + index = index or 1 + bound = #marks + 1 + base[a_markbase] = bound + start[a_markmark] = bound + start[a_markdone] = index + marks[bound] = { [index] = { dx, dy, rlmode, baseismark } } + return dx, dy, bound +end + +local function dir(n) + return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset" +end + +local function trace(head) + report_injections("begin run") + for n in traverse_id(glyph_code,head) do + if n.subtype < 256 then + local kp = n[a_kernpair] + local mb = n[a_markbase] + local mm = n[a_markmark] + local md = n[a_markdone] + local cb = n[a_cursbase] + local cc = n[a_curscurs] + local char = n.char + report_injections("font %s, char %U, glyph %c",n.font,char,char) + if kp then + local k = kerns[kp] + if k[3] then + report_injections(" pairkern: dir %a, x %p, y %p, w %p, h %p",dir(k[1]),k[2],k[3],k[4],k[5]) + else + report_injections(" kern: dir %a, dx %p",dir(k[1]),k[2]) + end + end + if mb then + report_injections(" markbase: bound %a",mb) + end + if mm then + local m = marks[mm] + if mb then + local m = m[mb] + if m then + report_injections(" markmark: bound %a, index %a, dx %p, dy %p",mm,md,m[1],m[2]) + else + report_injections(" markmark: bound %a, missing index",mm) + end + else + m = m[1] + report_injections(" markmark: bound %a, dx %p, dy %p",mm,m and m[1],m and m[2]) + end + end + if cb then + report_injections(" cursbase: bound %a",cb) + end + if cc then + local c = cursives[cc] + report_injections(" curscurs: bound %a, dir %a, dx %p, dy %p",cc,dir(c[1]),c[2],c[3]) + end + end + end + report_injections("end run") +end + +-- todo: reuse tables (i.e. no collection), but will be extra fields anyway +-- todo: check for attribute + +-- We can have a fast test on a font being processed, so we can check faster for marks etc +-- but I'll make a context variant anyway. + +local function show_result(head) + local current = head + local skipping = false + while current do + local id = current.id + if id == glyph_code then + report_injections("char: %C, width %p, xoffset %p, yoffset %p",current.char,current.width,current.xoffset,current.yoffset) + skipping = false + elseif id == kern_code then + report_injections("kern: %p",current.kern) + skipping = false + elseif not skipping then + report_injections() + skipping = true + end + current = current.next + end +end + +function injections.handler(head,where,keep) + local has_marks, has_cursives, has_kerns = next(marks), next(cursives), next(kerns) + if has_marks or has_cursives then + if trace_injections then + trace(head) + end + -- in the future variant we will not copy items but refs to tables + local done, ky, rl, valid, cx, wx, mk, nofvalid = false, { }, { }, { }, { }, { }, { }, 0 + if has_kerns then -- move outside loop + local nf, tm = nil, nil + for n in traverse_id(glyph_code,head) do -- only needed for relevant fonts + if n.subtype < 256 then + nofvalid = nofvalid + 1 + valid[nofvalid] = n + if n.font ~= nf then + nf = n.font + tm = fontdata[nf].resources.marks + end + if tm then + mk[n] = tm[n.char] + end + local k = n[a_kernpair] + if k then + local kk = kerns[k] + if kk then + local x, y, w, h = kk[2] or 0, kk[3] or 0, kk[4] or 0, kk[5] or 0 + local dy = y - h + if dy ~= 0 then + ky[n] = dy + end + if w ~= 0 or x ~= 0 then + wx[n] = kk + end + rl[n] = kk[1] -- could move in test + end + end + end + end + else + local nf, tm = nil, nil + for n in traverse_id(glyph_code,head) do + if n.subtype < 256 then + nofvalid = nofvalid + 1 + valid[nofvalid] = n + if n.font ~= nf then + nf = n.font + tm = fontdata[nf].resources.marks + end + if tm then + mk[n] = tm[n.char] + end + end + end + end + if nofvalid > 0 then + -- we can assume done == true because we have cursives and marks + local cx = { } + if has_kerns and next(ky) then + for n, k in next, ky do + n.yoffset = k + end + end + -- todo: reuse t and use maxt + if has_cursives then + local p_cursbase, p = nil, nil + -- since we need valid[n+1] we can also use a "while true do" + local t, d, maxt = { }, { }, 0 + for i=1,nofvalid do -- valid == glyphs + local n = valid[i] + if not mk[n] then + local n_cursbase = n[a_cursbase] + if p_cursbase then + local n_curscurs = n[a_curscurs] + if p_cursbase == n_curscurs then + local c = cursives[n_curscurs] + if c then + local rlmode, dx, dy, ws, wn = c[1], c[2], c[3], c[4], c[5] + if rlmode >= 0 then + dx = dx - ws + else + dx = dx + wn + end + if dx ~= 0 then + cx[n] = dx + rl[n] = rlmode + end + -- if rlmode and rlmode < 0 then + dy = -dy + -- end + maxt = maxt + 1 + t[maxt] = p + d[maxt] = dy + else + maxt = 0 + end + end + elseif maxt > 0 then + local ny = n.yoffset + for i=maxt,1,-1 do + ny = ny + d[i] + local ti = t[i] + ti.yoffset = ti.yoffset + ny + end + maxt = 0 + end + if not n_cursbase and maxt > 0 then + local ny = n.yoffset + for i=maxt,1,-1 do + ny = ny + d[i] + local ti = t[i] + ti.yoffset = ny + end + maxt = 0 + end + p_cursbase, p = n_cursbase, n + end + end + if maxt > 0 then + local ny = n.yoffset + for i=maxt,1,-1 do + ny = ny + d[i] + local ti = t[i] + ti.yoffset = ny + end + maxt = 0 + end + if not keep then + cursives = { } + end + end + if has_marks then + for i=1,nofvalid do + local p = valid[i] + local p_markbase = p[a_markbase] + if p_markbase then + local mrks = marks[p_markbase] + local nofmarks = #mrks + for n in traverse_id(glyph_code,p.next) do + local n_markmark = n[a_markmark] + if p_markbase == n_markmark then + local index = n[a_markdone] or 1 + local d = mrks[index] + if d then + local rlmode = d[3] + -- + local k = wx[p] + if k then + local x = k[2] + local w = k[4] + if w then + if rlmode and rlmode >= 0 then + -- kern(x) glyph(p) kern(w-x) mark(n) + n.xoffset = p.xoffset - p.width + d[1] - (w-x) + else + -- kern(w-x) glyph(p) kern(x) mark(n) + n.xoffset = p.xoffset - d[1] - x + end + else + if rlmode and rlmode >= 0 then + -- okay for husayni + n.xoffset = p.xoffset - p.width + d[1] + else + -- needs checking: is x ok here? + n.xoffset = p.xoffset - d[1] - x + end + end + else + if rlmode and rlmode >= 0 then + n.xoffset = p.xoffset - p.width + d[1] + else + n.xoffset = p.xoffset - d[1] + end + local w = n.width + if w ~= 0 then + insert_node_before(head,n,newkern(-w/2)) + insert_node_after(head,n,newkern(-w/2)) + end + end + -- -- + if mk[p] then + n.yoffset = p.yoffset + d[2] + else + n.yoffset = n.yoffset + p.yoffset + d[2] + end + -- + if nofmarks == 1 then + break + else + nofmarks = nofmarks - 1 + end + end + else + -- KE: there can be sequences in ligatures + end + end + end + end + if not keep then + marks = { } + end + end + -- todo : combine + if next(wx) then + for n, k in next, wx do + -- only w can be nil (kernclasses), can be sped up when w == nil + local x = k[2] + local w = k[4] + if w then + local rl = k[1] -- r2l = k[6] + local wx = w - x + if rl < 0 then -- KE: don't use r2l here + if wx ~= 0 then + insert_node_before(head,n,newkern(wx)) -- type 0/2 + end + if x ~= 0 then + insert_node_after (head,n,newkern(x)) -- type 0/2 + end + else + if x ~= 0 then + insert_node_before(head,n,newkern(x)) -- type 0/2 + end + if wx ~= 0 then + insert_node_after (head,n,newkern(wx)) -- type 0/2 + end + end + elseif x ~= 0 then + -- this needs checking for rl < 0 but it is unlikely that a r2l script + -- uses kernclasses between glyphs so we're probably safe (KE has a + -- problematic font where marks interfere with rl < 0 in the previous + -- case) + insert_node_before(head,n,newkern(x)) -- a real font kern, type 0 + end + end + end + if next(cx) then + for n, k in next, cx do + if k ~= 0 then + local rln = rl[n] + if rln and rln < 0 then + insert_node_before(head,n,newkern(-k)) -- type 0/2 + else + insert_node_before(head,n,newkern(k)) -- type 0/2 + end + end + end + end + if not keep then + kerns = { } + end + -- if trace_injections then + -- show_result(head) + -- end + return head, true + elseif not keep then + kerns, cursives, marks = { }, { }, { } + end + elseif has_kerns then + if trace_injections then + trace(head) + end + for n in traverse_id(glyph_code,head) do + if n.subtype < 256 then + local k = n[a_kernpair] + if k then + local kk = kerns[k] + if kk then + local rl, x, y, w = kk[1], kk[2] or 0, kk[3], kk[4] + if y and y ~= 0 then + n.yoffset = y -- todo: h ? + end + if w then + -- copied from above + -- local r2l = kk[6] + local wx = w - x + if rl < 0 then -- KE: don't use r2l here + if wx ~= 0 then + insert_node_before(head,n,newkern(wx)) + end + if x ~= 0 then + insert_node_after (head,n,newkern(x)) + end + else + if x ~= 0 then + insert_node_before(head,n,newkern(x)) + end + if wx ~= 0 then + insert_node_after(head,n,newkern(wx)) + end + end + else + -- simple (e.g. kernclass kerns) + if x ~= 0 then + insert_node_before(head,n,newkern(x)) + end + end + end + end + end + end + if not keep then + kerns = { } + end + -- if trace_injections then + -- show_result(head) + -- end + return head, true + else + -- no tracing needed + end + return head, false +end diff --git a/src/fontloader/luaotfload-fonts-lua.lua b/src/fontloader/luaotfload-fonts-lua.lua new file mode 100644 index 0000000..ec3fe38 --- /dev/null +++ b/src/fontloader/luaotfload-fonts-lua.lua @@ -0,0 +1,33 @@ +if not modules then modules = { } end modules ['luatex-fonts-lua'] = { + version = 1.001, + comment = "companion to luatex-*.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end + +local fonts = fonts +fonts.formats.lua = "lua" + +function fonts.readers.lua(specification) + local fullname = specification.filename or "" + if fullname == "" then + local forced = specification.forced or "" + if forced ~= "" then + fullname = specification.name .. "." .. forced + else + fullname = specification.name + end + end + local fullname = resolvers.findfile(fullname) or "" + if fullname ~= "" then + local loader = loadfile(fullname) + loader = loader and loader() + return loader and loader(specification) + end +end diff --git a/src/fontloader/luaotfload-fonts-otn.lua b/src/fontloader/luaotfload-fonts-otn.lua new file mode 100644 index 0000000..c57be5f --- /dev/null +++ b/src/fontloader/luaotfload-fonts-otn.lua @@ -0,0 +1,2848 @@ +if not modules then modules = { } end modules ['font-otn'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files", +} + +-- preprocessors = { "nodes" } + +-- this is still somewhat preliminary and it will get better in due time; +-- much functionality could only be implemented thanks to the husayni font +-- of Idris Samawi Hamid to who we dedicate this module. + +-- in retrospect it always looks easy but believe it or not, it took a lot +-- of work to get proper open type support done: buggy fonts, fuzzy specs, +-- special made testfonts, many skype sessions between taco, idris and me, +-- torture tests etc etc ... unfortunately the code does not show how much +-- time it took ... + +-- todo: +-- +-- kerning is probably not yet ok for latin around dics nodes (interesting challenge) +-- extension infrastructure (for usage out of context) +-- sorting features according to vendors/renderers +-- alternative loop quitters +-- check cursive and r2l +-- find out where ignore-mark-classes went +-- default features (per language, script) +-- handle positions (we need example fonts) +-- handle gpos_single (we might want an extra width field in glyph nodes because adding kerns might interfere) +-- mark (to mark) code is still not what it should be (too messy but we need some more extreem husayni tests) +-- remove some optimizations (when I have a faster machine) +-- +-- maybe redo the lot some way (more context specific) + +--[[ldx-- +

This module is a bit more split up that I'd like but since we also want to test +with plain it has to be so. This module is part of +and discussion about improvements and functionality mostly happens on the + mailing list.

+ +

The specification of OpenType is kind of vague. Apart from a lack of a proper +free specifications there's also the problem that Microsoft and Adobe +may have their own interpretation of how and in what order to apply features. +In general the Microsoft website has more detailed specifications and is a +better reference. There is also some information in the FontForge help files.

+ +

Because there is so much possible, fonts might contain bugs and/or be made to +work with certain rederers. These may evolve over time which may have the side +effect that suddenly fonts behave differently.

+ +

After a lot of experiments (mostly by Taco, me and Idris) we're now at yet another +implementation. Of course all errors are mine and of course the code can be +improved. There are quite some optimizations going on here and processing speed +is currently acceptable. Not all functions are implemented yet, often because I +lack the fonts for testing. Many scripts are not yet supported either, but I will +look into them as soon as users ask for it.

+ +

Because there are different interpretations possible, I will extend the code +with more (configureable) variants. I can also add hooks for users so that they can +write their own extensions.

+ +

Glyphs are indexed not by unicode but in their own way. This is because there is no +relationship with unicode at all, apart from the fact that a font might cover certain +ranges of characters. One character can have multiple shapes. However, at the + end we use unicode so and all extra glyphs are mapped into a private +space. This is needed because we need to access them and has to include +then in the output eventually.

+ +

The raw table as it coms from gets reorganized in to fit out needs. +In that table is packed (similar tables are shared) and cached on disk +so that successive runs can use the optimized table (after loading the table is +unpacked). The flattening code used later is a prelude to an even more compact table +format (and as such it keeps evolving).

+ +

This module is sparsely documented because it is a moving target. The table format +of the reader changes and we experiment a lot with different methods for supporting +features.

+ +

As with the code, we may decide to store more information in the + table.

+ +

Incrementing the version number will force a re-cache. We jump the number by one +when there's a fix in the library or code that +results in different tables.

+--ldx]]-- + +-- action handler chainproc chainmore comment +-- +-- gsub_single ok ok ok +-- gsub_multiple ok ok not implemented yet +-- gsub_alternate ok ok not implemented yet +-- gsub_ligature ok ok ok +-- gsub_context ok -- +-- gsub_contextchain ok -- +-- gsub_reversecontextchain ok -- +-- chainsub -- ok +-- reversesub -- ok +-- gpos_mark2base ok ok +-- gpos_mark2ligature ok ok +-- gpos_mark2mark ok ok +-- gpos_cursive ok untested +-- gpos_single ok ok +-- gpos_pair ok ok +-- gpos_context ok -- +-- gpos_contextchain ok -- +-- +-- todo: contextpos and contextsub and class stuff +-- +-- actions: +-- +-- handler : actions triggered by lookup +-- chainproc : actions triggered by contextual lookup +-- chainmore : multiple substitutions triggered by contextual lookup (e.g. fij -> f + ij) +-- +-- remark: the 'not implemented yet' variants will be done when we have fonts that use them +-- remark: we need to check what to do with discretionaries + +-- We used to have independent hashes for lookups but as the tags are unique +-- we now use only one hash. If needed we can have multiple again but in that +-- case I will probably prefix (i.e. rename) the lookups in the cached font file. + +-- Todo: make plugin feature that operates on char/glyphnode arrays + +local concat, insert, remove = table.concat, table.insert, table.remove +local gmatch, gsub, find, match, lower, strip = string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip +local type, next, tonumber, tostring = type, next, tonumber, tostring +local lpegmatch = lpeg.match +local random = math.random +local formatters = string.formatters + +local logs, trackers, nodes, attributes = logs, trackers, nodes, attributes + +local registertracker = trackers.register + +local fonts = fonts +local otf = fonts.handlers.otf + +local trace_lookups = false registertracker("otf.lookups", function(v) trace_lookups = v end) +local trace_singles = false registertracker("otf.singles", function(v) trace_singles = v end) +local trace_multiples = false registertracker("otf.multiples", function(v) trace_multiples = v end) +local trace_alternatives = false registertracker("otf.alternatives", function(v) trace_alternatives = v end) +local trace_ligatures = false registertracker("otf.ligatures", function(v) trace_ligatures = v end) +local trace_contexts = false registertracker("otf.contexts", function(v) trace_contexts = v end) +local trace_marks = false registertracker("otf.marks", function(v) trace_marks = v end) +local trace_kerns = false registertracker("otf.kerns", function(v) trace_kerns = v end) +local trace_cursive = false registertracker("otf.cursive", function(v) trace_cursive = v end) +local trace_preparing = false registertracker("otf.preparing", function(v) trace_preparing = v end) +local trace_bugs = false registertracker("otf.bugs", function(v) trace_bugs = v end) +local trace_details = false registertracker("otf.details", function(v) trace_details = v end) +local trace_applied = false registertracker("otf.applied", function(v) trace_applied = v end) +local trace_steps = false registertracker("otf.steps", function(v) trace_steps = v end) +local trace_skips = false registertracker("otf.skips", function(v) trace_skips = v end) +local trace_directions = false registertracker("otf.directions", function(v) trace_directions = v end) + +local report_direct = logs.reporter("fonts","otf direct") +local report_subchain = logs.reporter("fonts","otf subchain") +local report_chain = logs.reporter("fonts","otf chain") +local report_process = logs.reporter("fonts","otf process") +local report_prepare = logs.reporter("fonts","otf prepare") +local report_warning = logs.reporter("fonts","otf warning") + +registertracker("otf.verbose_chain", function(v) otf.setcontextchain(v and "verbose") end) +registertracker("otf.normal_chain", function(v) otf.setcontextchain(v and "normal") end) + +registertracker("otf.replacements", "otf.singles,otf.multiples,otf.alternatives,otf.ligatures") +registertracker("otf.positions","otf.marks,otf.kerns,otf.cursive") +registertracker("otf.actions","otf.replacements,otf.positions") +registertracker("otf.injections","nodes.injections") + +registertracker("*otf.sample","otf.steps,otf.actions,otf.analyzing") + +local insert_node_after = node.insert_after +local delete_node = nodes.delete +local copy_node = node.copy +local find_node_tail = node.tail or node.slide +local flush_node_list = node.flush_list +local end_of_math = node.end_of_math + +local setmetatableindex = table.setmetatableindex + +local zwnj = 0x200C +local zwj = 0x200D +local wildcard = "*" +local default = "dflt" + +local nodecodes = nodes.nodecodes +local whatcodes = nodes.whatcodes +local glyphcodes = nodes.glyphcodes +local disccodes = nodes.disccodes + +local glyph_code = nodecodes.glyph +local glue_code = nodecodes.glue +local disc_code = nodecodes.disc +local whatsit_code = nodecodes.whatsit +local math_code = nodecodes.math + +local dir_code = whatcodes.dir +local localpar_code = whatcodes.localpar + +local discretionary_code = disccodes.discretionary + +local ligature_code = glyphcodes.ligature + +local privateattribute = attributes.private + +-- Something is messed up: we have two mark / ligature indices, one at the injection +-- end and one here ... this is bases in KE's patches but there is something fishy +-- there as I'm pretty sure that for husayni we need some connection (as it's much +-- more complex than an average font) but I need proper examples of all cases, not +-- of only some. + +local a_state = privateattribute('state') +local a_markbase = privateattribute('markbase') +local a_markmark = privateattribute('markmark') +local a_markdone = privateattribute('markdone') -- assigned at the injection end +local a_cursbase = privateattribute('cursbase') +local a_curscurs = privateattribute('curscurs') +local a_cursdone = privateattribute('cursdone') +local a_kernpair = privateattribute('kernpair') +local a_ligacomp = privateattribute('ligacomp') -- assigned here (ideally it should be combined) + +local injections = nodes.injections +local setmark = injections.setmark +local setcursive = injections.setcursive +local setkern = injections.setkern +local setpair = injections.setpair + +local markonce = true +local cursonce = true +local kernonce = true + +local fonthashes = fonts.hashes +local fontdata = fonthashes.identifiers + +local otffeatures = fonts.constructors.newfeatures("otf") +local registerotffeature = otffeatures.register + +local onetimemessage = fonts.loggers.onetimemessage or function() end + +otf.defaultnodealternate = "none" -- first last + +-- we share some vars here, after all, we have no nested lookups and less code + +local tfmdata = false +local characters = false +local descriptions = false +local resources = false +local marks = false +local currentfont = false +local lookuptable = false +local anchorlookups = false +local lookuptypes = false +local handlers = { } +local rlmode = 0 +local featurevalue = false + +-- head is always a whatsit so we can safely assume that head is not changed + +-- we use this for special testing and documentation + +local checkstep = (nodes and nodes.tracers and nodes.tracers.steppers.check) or function() end +local registerstep = (nodes and nodes.tracers and nodes.tracers.steppers.register) or function() end +local registermessage = (nodes and nodes.tracers and nodes.tracers.steppers.message) or function() end + +local function logprocess(...) + if trace_steps then + registermessage(...) + end + report_direct(...) +end + +local function logwarning(...) + report_direct(...) +end + +local f_unicode = formatters["%U"] +local f_uniname = formatters["%U (%s)"] +local f_unilist = formatters["% t (% t)"] + +local function gref(n) -- currently the same as in font-otb + if type(n) == "number" then + local description = descriptions[n] + local name = description and description.name + if name then + return f_uniname(n,name) + else + return f_unicode(n) + end + elseif n then + local num, nam = { }, { } + for i=1,#n do + local ni = n[i] + if tonumber(ni) then -- later we will start at 2 + local di = descriptions[ni] + num[i] = f_unicode(ni) + nam[i] = di and di.name or "-" + end + end + return f_unilist(num,nam) + else + return "" + end +end + +local function cref(kind,chainname,chainlookupname,lookupname,index) -- not in the mood to alias f_ + if index then + return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookupname,index) + elseif lookupname then + return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookupname) + elseif chainlookupname then + return formatters["feature %a, chain %a, sub %a"](kind,chainname,chainlookupname) + elseif chainname then + return formatters["feature %a, chain %a"](kind,chainname) + else + return formatters["feature %a"](kind) + end +end + +local function pref(kind,lookupname) + return formatters["feature %a, lookup %a"](kind,lookupname) +end + +-- We can assume that languages that use marks are not hyphenated. We can also assume +-- that at most one discretionary is present. + +-- We do need components in funny kerning mode but maybe I can better reconstruct then +-- as we do have the font components info available; removing components makes the +-- previous code much simpler. Also, later on copying and freeing becomes easier. +-- However, for arabic we need to keep them around for the sake of mark placement +-- and indices. + +local function copy_glyph(g) -- next and prev are untouched ! + local components = g.components + if components then + g.components = nil + local n = copy_node(g) + g.components = components + return n + else + return copy_node(g) + end +end + +-- start is a mark and we need to keep that one + +local function markstoligature(kind,lookupname,head,start,stop,char) + if start == stop and start.char == char then + return head, start + else + local prev = start.prev + local next = stop.next + start.prev = nil + stop.next = nil + local base = copy_glyph(start) + if head == start then + head = base + end + base.char = char + base.subtype = ligature_code + base.components = start + if prev then + prev.next = base + end + if next then + next.prev = base + end + base.next = next + base.prev = prev + return head, base + end +end + +-- The next code is somewhat complicated by the fact that some fonts can have ligatures made +-- from ligatures that themselves have marks. This was identified by Kai in for instance +-- arabtype: KAF LAM SHADDA ALEF FATHA (0x0643 0x0644 0x0651 0x0627 0x064E). This becomes +-- KAF LAM-ALEF with a SHADDA on the first and a FATHA op de second component. In a next +-- iteration this becomes a KAF-LAM-ALEF with a SHADDA on the second and a FATHA on the +-- third component. + +local function getcomponentindex(start) + if start.id ~= glyph_code then + return 0 + elseif start.subtype == ligature_code then + local i = 0 + local components = start.components + while components do + i = i + getcomponentindex(components) + components = components.next + end + return i + elseif not marks[start.char] then + return 1 + else + return 0 + end +end + +-- eventually we will do positioning in an other way (needs addional w/h/d fields) + +local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound) -- brr head + if start == stop and start.char == char then + start.char = char + return head, start + end + local prev = start.prev + local next = stop.next + start.prev = nil + stop.next = nil + local base = copy_glyph(start) + if start == head then + head = base + end + base.char = char + base.subtype = ligature_code + base.components = start -- start can have components + if prev then + prev.next = base + end + if next then + next.prev = base + end + base.next = next + base.prev = prev + if not discfound then + local deletemarks = markflag ~= "mark" + local components = start + local baseindex = 0 + local componentindex = 0 + local head = base + local current = base + -- first we loop over the glyphs in start .. stop + while start do + local char = start.char + if not marks[char] then + baseindex = baseindex + componentindex + componentindex = getcomponentindex(start) + elseif not deletemarks then -- quite fishy + start[a_ligacomp] = baseindex + (start[a_ligacomp] or componentindex) + if trace_marks then + logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp]) + end + head, current = insert_node_after(head,current,copy_node(start)) -- unlikely that mark has components + elseif trace_marks then + logwarning("%s: delete mark %s",pref(kind,lookupname),gref(char)) + end + start = start.next + end + -- we can have one accent as part of a lookup and another following + -- local start = components -- was wrong (component scanning was introduced when more complex ligs in devanagari was added) + local start = current.next + while start and start.id == glyph_code do + local char = start.char + if marks[char] then + start[a_ligacomp] = baseindex + (start[a_ligacomp] or componentindex) + if trace_marks then + logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp]) + end + else + break + end + start = start.next + end + end + return head, base +end + +function handlers.gsub_single(head,start,kind,lookupname,replacement) + if trace_singles then + logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(start.char),gref(replacement)) + end + start.char = replacement + return head, start, true +end + +local function get_alternative_glyph(start,alternatives,value,trace_alternatives) + local n = #alternatives + if value == "random" then + local r = random(1,n) + return alternatives[r], trace_alternatives and formatters["value %a, taking %a"](value,r) + elseif value == "first" then + return alternatives[1], trace_alternatives and formatters["value %a, taking %a"](value,1) + elseif value == "last" then + return alternatives[n], trace_alternatives and formatters["value %a, taking %a"](value,n) + else + value = tonumber(value) + if type(value) ~= "number" then + return alternatives[1], trace_alternatives and formatters["invalid value %s, taking %a"](value,1) + elseif value > n then + local defaultalt = otf.defaultnodealternate + if defaultalt == "first" then + return alternatives[n], trace_alternatives and formatters["invalid value %s, taking %a"](value,1) + elseif defaultalt == "last" then + return alternatives[1], trace_alternatives and formatters["invalid value %s, taking %a"](value,n) + else + return false, trace_alternatives and formatters["invalid value %a, %s"](value,"out of range") + end + elseif value == 0 then + return start.char, trace_alternatives and formatters["invalid value %a, %s"](value,"no change") + elseif value < 1 then + return alternatives[1], trace_alternatives and formatters["invalid value %a, taking %a"](value,1) + else + return alternatives[value], trace_alternatives and formatters["value %a, taking %a"](value,value) + end + end +end + +local function multiple_glyphs(head,start,multiple,ignoremarks) + local nofmultiples = #multiple + if nofmultiples > 0 then + start.char = multiple[1] + if nofmultiples > 1 then + local sn = start.next + for k=2,nofmultiples do -- todo: use insert_node +-- untested: +-- +-- while ignoremarks and marks[sn.char] then +-- local sn = sn.next +-- end + local n = copy_node(start) -- ignore components + n.char = multiple[k] + n.next = sn + n.prev = start + if sn then + sn.prev = n + end + start.next = n + start = n + end + end + return head, start, true + else + if trace_multiples then + logprocess("no multiple for %s",gref(start.char)) + end + return head, start, false + end +end + +function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence) + local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue + local choice, comment = get_alternative_glyph(start,alternative,value,trace_alternatives) + if choice then + if trace_alternatives then + logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(start.char),choice,gref(choice),comment) + end + start.char = choice + else + if trace_alternatives then + logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(start.char),comment) + end + end + return head, start, true +end + +function handlers.gsub_multiple(head,start,kind,lookupname,multiple,sequence) + if trace_multiples then + logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(start.char),gref(multiple)) + end + return multiple_glyphs(head,start,multiple,sequence.flags[1]) +end + +function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence) + local s, stop, discfound = start.next, nil, false + local startchar = start.char + if marks[startchar] then + while s do + local id = s.id + if id == glyph_code and s.font == currentfont and s.subtype<256 then + local lg = ligature[s.char] + if lg then + stop = s + ligature = lg + s = s.next + else + break + end + else + break + end + end + if stop then + local lig = ligature.ligature + if lig then + if trace_ligatures then + local stopchar = stop.char + head, start = markstoligature(kind,lookupname,head,start,stop,lig) + logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char)) + else + head, start = markstoligature(kind,lookupname,head,start,stop,lig) + end + return head, start, true + else + -- ok, goto next lookup + end + end + else + local skipmark = sequence.flags[1] + while s do + local id = s.id + if id == glyph_code and s.subtype<256 then + if s.font == currentfont then + local char = s.char + if skipmark and marks[char] then + s = s.next + else + local lg = ligature[char] + if lg then + stop = s + ligature = lg + s = s.next + else + break + end + end + else + break + end + elseif id == disc_code then + discfound = true + s = s.next + else + break + end + end + local lig = ligature.ligature + if lig then + if stop then + if trace_ligatures then + local stopchar = stop.char + head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound) + logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char)) + else + head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound) + end + return head, start, true + else + -- weird but happens (in some arabic font) + start.char = lig + if trace_ligatures then + logprocess("%s: replacing %s by (no real) ligature %s case 3",pref(kind,lookupname),gref(startchar),gref(lig)) + end + return head, start, true + end + else + -- weird but happens + end + end + return head, start, false +end + +--[[ldx-- +

We get hits on a mark, but we're not sure if the it has to be applied so +we need to explicitly test for basechar, baselig and basemark entries.

+--ldx]]-- + +function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence) + local markchar = start.char + if marks[markchar] then + local base = start.prev -- [glyph] [start=mark] + if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then + local basechar = base.char + if marks[basechar] then + while true do + base = base.prev + if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then + basechar = base.char + if not marks[basechar] then + break + end + else + if trace_bugs then + logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar)) + end + return head, start, false + end + end + end + local baseanchors = descriptions[basechar] + if baseanchors then + baseanchors = baseanchors.anchors + end + if baseanchors then + local baseanchors = baseanchors['basechar'] + if baseanchors then + local al = anchorlookups[lookupname] + for anchor,ba in next, baseanchors do + if al[anchor] then + local ma = markanchors[anchor] + if ma then + local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) + if trace_marks then + logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)", + pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) + end + return head, start, true + end + end + end + if trace_bugs then + logwarning("%s, no matching anchors for mark %s and base %s",pref(kind,lookupname),gref(markchar),gref(basechar)) + end + end + elseif trace_bugs then + -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar)) + onetimemessage(currentfont,basechar,"no base anchors",report_fonts) + end + elseif trace_bugs then + logwarning("%s: prev node is no char",pref(kind,lookupname)) + end + elseif trace_bugs then + logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar)) + end + return head, start, false +end + +function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence) + -- check chainpos variant + local markchar = start.char + if marks[markchar] then + local base = start.prev -- [glyph] [optional marks] [start=mark] + if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then + local basechar = base.char + if marks[basechar] then + while true do + base = base.prev + if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then + basechar = base.char + if not marks[basechar] then + break + end + else + if trace_bugs then + logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar)) + end + return head, start, false + end + end + end + local index = start[a_ligacomp] + local baseanchors = descriptions[basechar] + if baseanchors then + baseanchors = baseanchors.anchors + if baseanchors then + local baseanchors = baseanchors['baselig'] + if baseanchors then + local al = anchorlookups[lookupname] + for anchor, ba in next, baseanchors do + if al[anchor] then + local ma = markanchors[anchor] + if ma then + ba = ba[index] + if ba then + local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) -- index + if trace_marks then + logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)", + pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy) + end + return head, start, true + else + if trace_bugs then + logwarning("%s: no matching anchors for mark %s and baselig %s with index %a",pref(kind,lookupname),gref(markchar),gref(basechar),index) + end + end + end + end + end + if trace_bugs then + logwarning("%s: no matching anchors for mark %s and baselig %s",pref(kind,lookupname),gref(markchar),gref(basechar)) + end + end + end + elseif trace_bugs then + -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar)) + onetimemessage(currentfont,basechar,"no base anchors",report_fonts) + end + elseif trace_bugs then + logwarning("%s: prev node is no char",pref(kind,lookupname)) + end + elseif trace_bugs then + logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar)) + end + return head, start, false +end + +function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence) + local markchar = start.char + if marks[markchar] then + local base = start.prev -- [glyph] [basemark] [start=mark] + local slc = start[a_ligacomp] + if slc then -- a rather messy loop ... needs checking with husayni + while base do + local blc = base[a_ligacomp] + if blc and blc ~= slc then + base = base.prev + else + break + end + end + end + if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then -- subtype test can go + local basechar = base.char + local baseanchors = descriptions[basechar] + if baseanchors then + baseanchors = baseanchors.anchors + if baseanchors then + baseanchors = baseanchors['basemark'] + if baseanchors then + local al = anchorlookups[lookupname] + for anchor,ba in next, baseanchors do + if al[anchor] then + local ma = markanchors[anchor] + if ma then + local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true) + if trace_marks then + logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)", + pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) + end + return head, start, true + end + end + end + if trace_bugs then + logwarning("%s: no matching anchors for mark %s and basemark %s",pref(kind,lookupname),gref(markchar),gref(basechar)) + end + end + end + elseif trace_bugs then + -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar)) + onetimemessage(currentfont,basechar,"no base anchors",report_fonts) + end + elseif trace_bugs then + logwarning("%s: prev node is no mark",pref(kind,lookupname)) + end + elseif trace_bugs then + logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar)) + end + return head, start, false +end + +function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence) -- to be checked + local alreadydone = cursonce and start[a_cursbase] + if not alreadydone then + local done = false + local startchar = start.char + if marks[startchar] then + if trace_cursive then + logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar)) + end + else + local nxt = start.next + while not done and nxt and nxt.id == glyph_code and nxt.font == currentfont and nxt.subtype<256 do + local nextchar = nxt.char + if marks[nextchar] then + -- should not happen (maybe warning) + nxt = nxt.next + else + local entryanchors = descriptions[nextchar] + if entryanchors then + entryanchors = entryanchors.anchors + if entryanchors then + entryanchors = entryanchors['centry'] + if entryanchors then + local al = anchorlookups[lookupname] + for anchor, entry in next, entryanchors do + if al[anchor] then + local exit = exitanchors[anchor] + if exit then + local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar]) + if trace_cursive then + logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode) + end + done = true + break + end + end + end + end + end + elseif trace_bugs then + -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar)) + onetimemessage(currentfont,startchar,"no entry anchors",report_fonts) + end + break + end + end + end + return head, start, done + else + if trace_cursive and trace_details then + logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone) + end + return head, start, false + end +end + +function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence) + local startchar = start.char + local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar]) + if trace_kerns then + logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h) + end + return head, start, false +end + +function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence) + -- todo: kerns in disc nodes: pre, post, replace -> loop over disc too + -- todo: kerns in components of ligatures + local snext = start.next + if not snext then + return head, start, false + else + local prev, done = start, false + local factor = tfmdata.parameters.factor + local lookuptype = lookuptypes[lookupname] + while snext and snext.id == glyph_code and snext.font == currentfont and snext.subtype<256 do + local nextchar = snext.char + local krn = kerns[nextchar] + if not krn and marks[nextchar] then + prev = snext + snext = snext.next + else + if not krn then + -- skip + elseif type(krn) == "table" then + if lookuptype == "pair" then -- probably not needed + local a, b = krn[2], krn[3] + if a and #a > 0 then + local startchar = start.char + local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar]) + if trace_kerns then + logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h) + end + end + if b and #b > 0 then + local startchar = start.char + local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar]) + if trace_kerns then + logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h) + end + end + else -- wrong ... position has different entries + report_process("%s: check this out (old kern stuff)",pref(kind,lookupname)) + -- local a, b = krn[2], krn[6] + -- if a and a ~= 0 then + -- local k = setkern(snext,factor,rlmode,a) + -- if trace_kerns then + -- logprocess("%s: inserting first kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar)) + -- end + -- end + -- if b and b ~= 0 then + -- logwarning("%s: ignoring second kern xoff %s",pref(kind,lookupname),b*factor) + -- end + end + done = true + elseif krn ~= 0 then + local k = setkern(snext,factor,rlmode,krn) + if trace_kerns then + logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar)) + end + done = true + end + break + end + end + return head, start, done + end +end + +--[[ldx-- +

I will implement multiple chain replacements once I run into a font that uses +it. It's not that complex to handle.

+--ldx]]-- + +local chainmores = { } +local chainprocs = { } + +local function logprocess(...) + if trace_steps then + registermessage(...) + end + report_subchain(...) +end + +local logwarning = report_subchain + +local function logprocess(...) + if trace_steps then + registermessage(...) + end + report_chain(...) +end + +local logwarning = report_chain + +-- We could share functions but that would lead to extra function calls with many +-- arguments, redundant tests and confusing messages. + +function chainprocs.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname) + logwarning("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname)) + return head, start, false +end + +function chainmores.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname,n) + logprocess("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname)) + return head, start, false +end + +-- The reversesub is a special case, which is why we need to store the replacements +-- in a bit weird way. There is no lookup and the replacement comes from the lookup +-- itself. It is meant mostly for dealing with Urdu. + +function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements) + local char = start.char + local replacement = replacements[char] + if replacement then + if trace_singles then + logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement)) + end + start.char = replacement + return head, start, true + else + return head, start, false + end +end + +--[[ldx-- +

This chain stuff is somewhat tricky since we can have a sequence of actions to be +applied: single, alternate, multiple or ligature where ligature can be an invalid +one in the sense that it will replace multiple by one but not neccessary one that +looks like the combination (i.e. it is the counterpart of multiple then). For +example, the following is valid:

+ + +xxxabcdexxx [single a->A][multiple b->BCD][ligature cde->E] xxxABCDExxx + + +

Therefore we we don't really do the replacement here already unless we have the +single lookup case. The efficiency of the replacements can be improved by deleting +as less as needed but that would also make the code even more messy.

+--ldx]]-- + +-- local function delete_till_stop(head,start,stop,ignoremarks) -- keeps start +-- local n = 1 +-- if start == stop then +-- -- done +-- elseif ignoremarks then +-- repeat -- start x x m x x stop => start m +-- local next = start.next +-- if not marks[next.char] then +-- local components = next.components +-- if components then -- probably not needed +-- flush_node_list(components) +-- end +-- head = delete_node(head,next) +-- end +-- n = n + 1 +-- until next == stop +-- else -- start x x x stop => start +-- repeat +-- local next = start.next +-- local components = next.components +-- if components then -- probably not needed +-- flush_node_list(components) +-- end +-- head = delete_node(head,next) +-- n = n + 1 +-- until next == stop +-- end +-- return head, n +-- end + +--[[ldx-- +

Here we replace start by a single variant, First we delete the rest of the +match.

+--ldx]]-- + +function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex) + -- todo: marks ? + local current = start + local subtables = currentlookup.subtables + if #subtables > 1 then + logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," ")) + end + while current do + if current.id == glyph_code then + local currentchar = current.char + local lookupname = subtables[1] -- only 1 + local replacement = lookuphash[lookupname] + if not replacement then + if trace_bugs then + logwarning("%s: no single hits",cref(kind,chainname,chainlookupname,lookupname,chainindex)) + end + else + replacement = replacement[currentchar] + if not replacement or replacement == "" then + if trace_bugs then + logwarning("%s: no single for %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar)) + end + else + if trace_singles then + logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement)) + end + current.char = replacement + end + end + return head, start, true + elseif current == stop then + break + else + current = current.next + end + end + return head, start, false +end + +chainmores.gsub_single = chainprocs.gsub_single + +--[[ldx-- +

Here we replace start by a sequence of new glyphs. First we delete the rest of +the match.

+--ldx]]-- + +function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) + -- local head, n = delete_till_stop(head,start,stop) + local startchar = start.char + local subtables = currentlookup.subtables + local lookupname = subtables[1] + local replacements = lookuphash[lookupname] + if not replacements then + if trace_bugs then + logwarning("%s: no multiple hits",cref(kind,chainname,chainlookupname,lookupname)) + end + else + replacements = replacements[startchar] + if not replacements or replacement == "" then + if trace_bugs then + logwarning("%s: no multiple for %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar)) + end + else + if trace_multiples then + logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements)) + end + return multiple_glyphs(head,start,replacements,currentlookup.flags[1]) + end + end + return head, start, false +end + +chainmores.gsub_multiple = chainprocs.gsub_multiple + +--[[ldx-- +

Here we replace start by new glyph. First we delete the rest of the match.

+--ldx]]-- + +-- char_1 mark_1 -> char_x mark_1 (ignore marks) +-- char_1 mark_1 -> char_x + +-- to be checked: do we always have just one glyph? +-- we can also have alternates for marks +-- marks come last anyway +-- are there cases where we need to delete the mark + +function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) + local current = start + local subtables = currentlookup.subtables + local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue + while current do + if current.id == glyph_code then -- is this check needed? + local currentchar = current.char + local lookupname = subtables[1] + local alternatives = lookuphash[lookupname] + if not alternatives then + if trace_bugs then + logwarning("%s: no alternative hit",cref(kind,chainname,chainlookupname,lookupname)) + end + else + alternatives = alternatives[currentchar] + if alternatives then + local choice, comment = get_alternative_glyph(current,alternatives,value,trace_alternatives) + if choice then + if trace_alternatives then + logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment) + end + start.char = choice + else + if trace_alternatives then + logwarning("%s: no variant %a for %s, %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(char),comment) + end + end + elseif trace_bugs then + logwarning("%s: no alternative for %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(currentchar),comment) + end + end + return head, start, true + elseif current == stop then + break + else + current = current.next + end + end + return head, start, false +end + +chainmores.gsub_alternate = chainprocs.gsub_alternate + +--[[ldx-- +

When we replace ligatures we use a helper that handles the marks. I might change +this function (move code inline and handle the marks by a separate function). We +assume rather stupid ligatures (no complex disc nodes).

+--ldx]]-- + +function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex) + local startchar = start.char + local subtables = currentlookup.subtables + local lookupname = subtables[1] + local ligatures = lookuphash[lookupname] + if not ligatures then + if trace_bugs then + logwarning("%s: no ligature hits",cref(kind,chainname,chainlookupname,lookupname,chainindex)) + end + else + ligatures = ligatures[startchar] + if not ligatures then + if trace_bugs then + logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar)) + end + else + local s = start.next + local discfound = false + local last = stop + local nofreplacements = 0 + local skipmark = currentlookup.flags[1] + while s do + local id = s.id + if id == disc_code then + s = s.next + discfound = true + else + local schar = s.char + if skipmark and marks[schar] then -- marks + s = s.next + else + local lg = ligatures[schar] + if lg then + ligatures, last, nofreplacements = lg, s, nofreplacements + 1 + if s == stop then + break + else + s = s.next + end + else + break + end + end + end + end + local l2 = ligatures.ligature + if l2 then + if chainindex then + stop = last + end + if trace_ligatures then + if start == stop then + logprocess("%s: replacing character %s by ligature %s case 3",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2)) + else + logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char),gref(l2)) + end + end + head, start = toligature(kind,lookupname,head,start,stop,l2,currentlookup.flags[1],discfound) + return head, start, true, nofreplacements + elseif trace_bugs then + if start == stop then + logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar)) + else + logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char)) + end + end + end + end + return head, start, false, 0 +end + +chainmores.gsub_ligature = chainprocs.gsub_ligature + +function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) + local markchar = start.char + if marks[markchar] then + local subtables = currentlookup.subtables + local lookupname = subtables[1] + local markanchors = lookuphash[lookupname] + if markanchors then + markanchors = markanchors[markchar] + end + if markanchors then + local base = start.prev -- [glyph] [start=mark] + if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then + local basechar = base.char + if marks[basechar] then + while true do + base = base.prev + if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then + basechar = base.char + if not marks[basechar] then + break + end + else + if trace_bugs then + logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar)) + end + return head, start, false + end + end + end + local baseanchors = descriptions[basechar].anchors + if baseanchors then + local baseanchors = baseanchors['basechar'] + if baseanchors then + local al = anchorlookups[lookupname] + for anchor,ba in next, baseanchors do + if al[anchor] then + local ma = markanchors[anchor] + if ma then + local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) + if trace_marks then + logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)", + cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) + end + return head, start, true + end + end + end + if trace_bugs then + logwarning("%s, no matching anchors for mark %s and base %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar)) + end + end + end + elseif trace_bugs then + logwarning("%s: prev node is no char",cref(kind,chainname,chainlookupname,lookupname)) + end + elseif trace_bugs then + logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar)) + end + elseif trace_bugs then + logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar)) + end + return head, start, false +end + +function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) + local markchar = start.char + if marks[markchar] then + local subtables = currentlookup.subtables + local lookupname = subtables[1] + local markanchors = lookuphash[lookupname] + if markanchors then + markanchors = markanchors[markchar] + end + if markanchors then + local base = start.prev -- [glyph] [optional marks] [start=mark] + if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then + local basechar = base.char + if marks[basechar] then + while true do + base = base.prev + if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then + basechar = base.char + if not marks[basechar] then + break + end + else + if trace_bugs then + logwarning("%s: no base for mark %s",cref(kind,chainname,chainlookupname,lookupname),markchar) + end + return head, start, false + end + end + end + -- todo: like marks a ligatures hash + local index = start[a_ligacomp] + local baseanchors = descriptions[basechar].anchors + if baseanchors then + local baseanchors = baseanchors['baselig'] + if baseanchors then + local al = anchorlookups[lookupname] + for anchor,ba in next, baseanchors do + if al[anchor] then + local ma = markanchors[anchor] + if ma then + ba = ba[index] + if ba then + local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) -- index + if trace_marks then + logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)", + cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy) + end + return head, start, true + end + end + end + end + if trace_bugs then + logwarning("%s: no matching anchors for mark %s and baselig %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar)) + end + end + end + elseif trace_bugs then + logwarning("feature %s, lookup %s: prev node is no char",kind,lookupname) + end + elseif trace_bugs then + logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar)) + end + elseif trace_bugs then + logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar)) + end + return head, start, false +end + +function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) + local markchar = start.char + if marks[markchar] then + -- local alreadydone = markonce and start[a_markmark] + -- if not alreadydone then + -- local markanchors = descriptions[markchar].anchors markanchors = markanchors and markanchors.mark + local subtables = currentlookup.subtables + local lookupname = subtables[1] + local markanchors = lookuphash[lookupname] + if markanchors then + markanchors = markanchors[markchar] + end + if markanchors then + local base = start.prev -- [glyph] [basemark] [start=mark] + local slc = start[a_ligacomp] + if slc then -- a rather messy loop ... needs checking with husayni + while base do + local blc = base[a_ligacomp] + if blc and blc ~= slc then + base = base.prev + else + break + end + end + end + if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then -- subtype test can go + local basechar = base.char + local baseanchors = descriptions[basechar].anchors + if baseanchors then + baseanchors = baseanchors['basemark'] + if baseanchors then + local al = anchorlookups[lookupname] + for anchor,ba in next, baseanchors do + if al[anchor] then + local ma = markanchors[anchor] + if ma then + local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true) + if trace_marks then + logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)", + cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) + end + return head, start, true + end + end + end + if trace_bugs then + logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar)) + end + end + end + elseif trace_bugs then + logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname)) + end + elseif trace_bugs then + logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar)) + end + -- elseif trace_marks and trace_details then + -- logprocess("%s, mark %s is already bound (n=%s), ignoring mark2mark",pref(kind,lookupname),gref(markchar),alreadydone) + -- end + elseif trace_bugs then + logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar)) + end + return head, start, false +end + +function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) + local alreadydone = cursonce and start[a_cursbase] + if not alreadydone then + local startchar = start.char + local subtables = currentlookup.subtables + local lookupname = subtables[1] + local exitanchors = lookuphash[lookupname] + if exitanchors then + exitanchors = exitanchors[startchar] + end + if exitanchors then + local done = false + if marks[startchar] then + if trace_cursive then + logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar)) + end + else + local nxt = start.next + while not done and nxt and nxt.id == glyph_code and nxt.font == currentfont and nxt.subtype<256 do + local nextchar = nxt.char + if marks[nextchar] then + -- should not happen (maybe warning) + nxt = nxt.next + else + local entryanchors = descriptions[nextchar] + if entryanchors then + entryanchors = entryanchors.anchors + if entryanchors then + entryanchors = entryanchors['centry'] + if entryanchors then + local al = anchorlookups[lookupname] + for anchor, entry in next, entryanchors do + if al[anchor] then + local exit = exitanchors[anchor] + if exit then + local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar]) + if trace_cursive then + logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode) + end + done = true + break + end + end + end + end + end + elseif trace_bugs then + -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar)) + onetimemessage(currentfont,startchar,"no entry anchors",report_fonts) + end + break + end + end + end + return head, start, done + else + if trace_cursive and trace_details then + logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone) + end + return head, start, false + end + end + return head, start, false +end + +function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence) + -- untested .. needs checking for the new model + local startchar = start.char + local subtables = currentlookup.subtables + local lookupname = subtables[1] + local kerns = lookuphash[lookupname] + if kerns then + kerns = kerns[startchar] -- needed ? + if kerns then + local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar]) + if trace_kerns then + logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h) + end + end + end + return head, start, false +end + +chainmores.gpos_single = chainprocs.gpos_single -- okay? + +-- when machines become faster i will make a shared function + +function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence) + local snext = start.next + if snext then + local startchar = start.char + local subtables = currentlookup.subtables + local lookupname = subtables[1] + local kerns = lookuphash[lookupname] + if kerns then + kerns = kerns[startchar] + if kerns then + local lookuptype = lookuptypes[lookupname] + local prev, done = start, false + local factor = tfmdata.parameters.factor + while snext and snext.id == glyph_code and snext.font == currentfont and snext.subtype<256 do + local nextchar = snext.char + local krn = kerns[nextchar] + if not krn and marks[nextchar] then + prev = snext + snext = snext.next + else + if not krn then + -- skip + elseif type(krn) == "table" then + if lookuptype == "pair" then + local a, b = krn[2], krn[3] + if a and #a > 0 then + local startchar = start.char + local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar]) + if trace_kerns then + logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h) + end + end + if b and #b > 0 then + local startchar = start.char + local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar]) + if trace_kerns then + logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h) + end + end + else + report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname)) + local a, b = krn[2], krn[6] + if a and a ~= 0 then + local k = setkern(snext,factor,rlmode,a) + if trace_kerns then + logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar)) + end + end + if b and b ~= 0 then + logwarning("%s: ignoring second kern xoff %s",cref(kind,chainname,chainlookupname),b*factor) + end + end + done = true + elseif krn ~= 0 then + local k = setkern(snext,factor,rlmode,krn) + if trace_kerns then + logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar)) + end + done = true + end + break + end + end + return head, start, done + end + end + end + return head, start, false +end + +chainmores.gpos_pair = chainprocs.gpos_pair -- okay? + +-- what pointer to return, spec says stop +-- to be discussed ... is bidi changer a space? +-- elseif char == zwnj and sequence[n][32] then -- brrr + +-- somehow l or f is global +-- we don't need to pass the currentcontext, saves a bit +-- make a slow variant then can be activated but with more tracing + +local function show_skip(kind,chainname,char,ck,class) + if ck[9] then + logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a, %a => %a",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10]) + else + logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a",cref(kind,chainname),gref(char),class,ck[1],ck[2]) + end +end + +local function normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash) + -- local rule, lookuptype, sequence, f, l, lookups = ck[1], ck[2] ,ck[3], ck[4], ck[5], ck[6] + local flags = sequence.flags + local done = false + local skipmark = flags[1] + local skipligature = flags[2] + local skipbase = flags[3] + local someskip = skipmark or skipligature or skipbase -- could be stored in flags for a fast test (hm, flags could be false !) + local markclass = sequence.markclass -- todo, first we need a proper test + local skipped = false + for k=1,#contexts do + local match = true + local current = start + local last = start + local ck = contexts[k] + local seq = ck[3] + local s = #seq + -- f..l = mid string + if s == 1 then + -- never happens + match = current.id == glyph_code and current.font == currentfont and current.subtype<256 and seq[1][current.char] + else + -- maybe we need a better space check (maybe check for glue or category or combination) + -- we cannot optimize for n=2 because there can be disc nodes + local f, l = ck[4], ck[5] + -- current match + if f == 1 and f == l then -- current only + -- already a hit + -- match = true + else -- before/current/after | before/current | current/after + -- no need to test first hit (to be optimized) + if f == l then -- new, else last out of sync (f is > 1) + -- match = true + else + local n = f + 1 + last = last.next + while n <= l do + if last then + local id = last.id + if id == glyph_code then + if last.font == currentfont and last.subtype<256 then + local char = last.char + local ccd = descriptions[char] + if ccd then + local class = ccd.class + if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then + skipped = true + if trace_skips then + show_skip(kind,chainname,char,ck,class) + end + last = last.next + elseif seq[n][char] then + if n < l then + last = last.next + end + n = n + 1 + else + match = false + break + end + else + match = false + break + end + else + match = false + break + end + elseif id == disc_code then + last = last.next + else + match = false + break + end + else + match = false + break + end + end + end + end + -- before + if match and f > 1 then + local prev = start.prev + if prev then + local n = f-1 + while n >= 1 do + if prev then + local id = prev.id + if id == glyph_code then + if prev.font == currentfont and prev.subtype<256 then -- normal char + local char = prev.char + local ccd = descriptions[char] + if ccd then + local class = ccd.class + if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then + skipped = true + if trace_skips then + show_skip(kind,chainname,char,ck,class) + end + elseif seq[n][char] then + n = n -1 + else + match = false + break + end + else + match = false + break + end + else + match = false + break + end + elseif id == disc_code then + -- skip 'm + elseif seq[n][32] then + n = n -1 + else + match = false + break + end + prev = prev.prev + elseif seq[n][32] then -- somewhat special, as zapfino can have many preceding spaces + n = n -1 + else + match = false + break + end + end + elseif f == 2 then + match = seq[1][32] + else + for n=f-1,1 do + if not seq[n][32] then + match = false + break + end + end + end + end + -- after + if match and s > l then + local current = last and last.next + if current then + -- removed optimization for s-l == 1, we have to deal with marks anyway + local n = l + 1 + while n <= s do + if current then + local id = current.id + if id == glyph_code then + if current.font == currentfont and current.subtype<256 then -- normal char + local char = current.char + local ccd = descriptions[char] + if ccd then + local class = ccd.class + if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then + skipped = true + if trace_skips then + show_skip(kind,chainname,char,ck,class) + end + elseif seq[n][char] then + n = n + 1 + else + match = false + break + end + else + match = false + break + end + else + match = false + break + end + elseif id == disc_code then + -- skip 'm + elseif seq[n][32] then -- brrr + n = n + 1 + else + match = false + break + end + current = current.next + elseif seq[n][32] then + n = n + 1 + else + match = false + break + end + end + elseif s-l == 1 then + match = seq[s][32] + else + for n=l+1,s do + if not seq[n][32] then + match = false + break + end + end + end + end + end + if match then + -- ck == currentcontext + if trace_contexts then + local rule, lookuptype, f, l = ck[1], ck[2], ck[4], ck[5] + local char = start.char + if ck[9] then + logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a, %a => %a", + cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10]) + else + logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a", + cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype) + end + end + local chainlookups = ck[6] + if chainlookups then + local nofchainlookups = #chainlookups + -- we can speed this up if needed + if nofchainlookups == 1 then + local chainlookupname = chainlookups[1] + local chainlookup = lookuptable[chainlookupname] + if chainlookup then + local cp = chainprocs[chainlookup.type] + if cp then + local ok + head, start, ok = cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence) + if ok then + done = true + end + else + logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type) + end + else -- shouldn't happen + logprocess("%s is not yet supported",cref(kind,chainname,chainlookupname)) + end + else + local i = 1 + repeat + if skipped then + while true do + local char = start.char + local ccd = descriptions[char] + if ccd then + local class = ccd.class + if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then + start = start.next + else + break + end + else + break + end + end + end + local chainlookupname = chainlookups[i] + local chainlookup = lookuptable[chainlookupname] + if not chainlookup then + -- okay, n matches, < n replacements + i = i + 1 + else + local cp = chainmores[chainlookup.type] + if not cp then + -- actually an error + logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type) + i = i + 1 + else + local ok, n + head, start, ok, n = cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence) + -- messy since last can be changed ! + if ok then + done = true + -- skip next one(s) if ligature + i = i + (n or 1) + else + i = i + 1 + end + end + end + if start then + start = start.next + else + -- weird + end + until i > nofchainlookups + end + else + local replacements = ck[7] + if replacements then + head, start, done = chainprocs.reversesub(head,start,last,kind,chainname,ck,lookuphash,replacements) -- sequence + else + done = true -- can be meant to be skipped + if trace_contexts then + logprocess("%s: skipping match",cref(kind,chainname)) + end + end + end + end + end + return head, start, done +end + +-- Because we want to keep this elsewhere (an because speed is less an issue) we +-- pass the font id so that the verbose variant can access the relevant helper tables. + +local verbose_handle_contextchain = function(font,...) + logwarning("no verbose handler installed, reverting to 'normal'") + otf.setcontextchain() + return normal_handle_contextchain(...) +end + +otf.chainhandlers = { + normal = normal_handle_contextchain, + verbose = verbose_handle_contextchain, +} + +function otf.setcontextchain(method) + if not method or method == "normal" or not otf.chainhandlers[method] then + if handlers.contextchain then -- no need for a message while making the format + logwarning("installing normal contextchain handler") + end + handlers.contextchain = normal_handle_contextchain + else + logwarning("installing contextchain handler %a",method) + local handler = otf.chainhandlers[method] + handlers.contextchain = function(...) + return handler(currentfont,...) -- hm, get rid of ... + end + end + handlers.gsub_context = handlers.contextchain + handlers.gsub_contextchain = handlers.contextchain + handlers.gsub_reversecontextchain = handlers.contextchain + handlers.gpos_contextchain = handlers.contextchain + handlers.gpos_context = handlers.contextchain +end + +otf.setcontextchain() + +local missing = { } -- we only report once + +local function logprocess(...) + if trace_steps then + registermessage(...) + end + report_process(...) +end + +local logwarning = report_process + +local function report_missing_cache(typ,lookup) + local f = missing[currentfont] if not f then f = { } missing[currentfont] = f end + local t = f[typ] if not t then t = { } f[typ] = t end + if not t[lookup] then + t[lookup] = true + logwarning("missing cache for lookup %a, type %a, font %a, name %a",lookup,typ,currentfont,tfmdata.properties.fullname) + end +end + +local resolved = { } -- we only resolve a font,script,language pair once + +-- todo: pass all these 'locals' in a table + +local lookuphashes = { } + +setmetatableindex(lookuphashes, function(t,font) + local lookuphash = fontdata[font].resources.lookuphash + if not lookuphash or not next(lookuphash) then + lookuphash = false + end + t[font] = lookuphash + return lookuphash +end) + +-- fonts.hashes.lookups = lookuphashes + +local autofeatures = fonts.analyzers.features -- was: constants + +local function initialize(sequence,script,language,enabled) + local features = sequence.features + if features then + for kind, scripts in next, features do + local valid = enabled[kind] + if valid then + local languages = scripts[script] or scripts[wildcard] + if languages and (languages[language] or languages[wildcard]) then + return { valid, autofeatures[kind] or false, sequence.chain or 0, kind, sequence } + end + end + end + end + return false +end + +function otf.dataset(tfmdata,font) -- generic variant, overloaded in context + local shared = tfmdata.shared + local properties = tfmdata.properties + local language = properties.language or "dflt" + local script = properties.script or "dflt" + local enabled = shared.features + local res = resolved[font] + if not res then + res = { } + resolved[font] = res + end + local rs = res[script] + if not rs then + rs = { } + res[script] = rs + end + local rl = rs[language] + if not rl then + rl = { + -- indexed but we can also add specific data by key + } + rs[language] = rl + local sequences = tfmdata.resources.sequences +-- setmetatableindex(rl, function(t,k) +-- if type(k) == "number" then +-- local v = enabled and initialize(sequences[k],script,language,enabled) +-- t[k] = v +-- return v +-- end +-- end) +for s=1,#sequences do + local v = enabled and initialize(sequences[s],script,language,enabled) + if v then + rl[#rl+1] = v + end +end + end + return rl +end + +-- elseif id == glue_code then +-- if p[5] then -- chain +-- local pc = pp[32] +-- if pc then +-- start, ok = start, false -- p[1](start,kind,p[2],pc,p[3],p[4]) +-- if ok then +-- done = true +-- end +-- if start then start = start.next end +-- else +-- start = start.next +-- end +-- else +-- start = start.next +-- end + +-- there will be a new direction parser (pre-parsed etc) + +-- less bytecode: 290 -> 254 +-- +-- attr = attr or false +-- +-- local a = getattr(start,0) +-- if (a == attr and (not attribute or getattr(start,a_state) == attribute)) or (not attribute or getattr(start,a_state) == attribute) then +-- -- the action +-- end + +local function featuresprocessor(head,font,attr) + + local lookuphash = lookuphashes[font] -- we can also check sequences here + + if not lookuphash then + return head, false + end + + if trace_steps then + checkstep(head) + end + + tfmdata = fontdata[font] + descriptions = tfmdata.descriptions + characters = tfmdata.characters + resources = tfmdata.resources + + marks = resources.marks + anchorlookups = resources.lookup_to_anchor + lookuptable = resources.lookups + lookuptypes = resources.lookuptypes + + currentfont = font + rlmode = 0 + + local sequences = resources.sequences + local done = false + local datasets = otf.dataset(tfmdata,font,attr) + + local dirstack = { } -- could move outside function + + -- We could work on sub start-stop ranges instead but I wonder if there is that + -- much speed gain (experiments showed that it made not much sense) and we need + -- to keep track of directions anyway. Also at some point I want to play with + -- font interactions and then we do need the full sweeps. + + -- Keeping track of the headnode is needed for devanagari (I generalized it a bit + -- so that multiple cases are also covered.) + + for s=1,#datasets do + local dataset = datasets[s] + featurevalue = dataset[1] -- todo: pass to function instead of using a global + + local sequence = dataset[5] -- sequences[s] -- also dataset[5] + local rlparmode = 0 + local topstack = 0 + local success = false + local attribute = dataset[2] + local chain = dataset[3] -- sequence.chain or 0 + local typ = sequence.type + local subtables = sequence.subtables + if chain < 0 then + -- this is a limited case, no special treatments like 'init' etc + local handler = handlers[typ] + -- we need to get rid of this slide! probably no longer needed in latest luatex + local start = find_node_tail(head) -- slow (we can store tail because there's always a skip at the end): todo + while start do + local id = start.id + if id == glyph_code then + if start.font == font and start.subtype<256 then + local a = start[0] + if a then + a = a == attr + else + a = true + end + if a then + for i=1,#subtables do + local lookupname = subtables[i] + local lookupcache = lookuphash[lookupname] + if lookupcache then + local lookupmatch = lookupcache[start.char] + if lookupmatch then + head, start, success = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) + if success then + break + end + end + else + report_missing_cache(typ,lookupname) + end + end + if start then start = start.prev end + else + start = start.prev + end + else + start = start.prev + end + else + start = start.prev + end + end + else + local handler = handlers[typ] + local ns = #subtables + local start = head -- local ? + rlmode = 0 -- to be checked ? + if ns == 1 then -- happens often + local lookupname = subtables[1] + local lookupcache = lookuphash[lookupname] + if not lookupcache then -- also check for empty cache + report_missing_cache(typ,lookupname) + else + + local function subrun(start) + -- mostly for gsub, gpos would demand a more clever approach + local head = start + local done = false + while start do + local id = start.id + if id == glyph_code and start.font == font and start.subtype <256 then + local a = start[0] + if a then + a = (a == attr) and (not attribute or start[a_state] == attribute) + else + a = not attribute or start[a_state] == attribute + end + if a then + local lookupmatch = lookupcache[start.char] + if lookupmatch then + -- sequence kan weg + local ok + head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) + if ok then + done = true + end + end + if start then start = start.next end + else + start = start.next + end + else + start = start.next + end + end + if done then + success = true + return head + end + end + + local function kerndisc(disc) -- we can assume that prev and next are glyphs + local prev = disc.prev + local next = disc.next + if prev and next then + prev.next = next + -- next.prev = prev + local a = prev[0] + if a then + a = (a == attr) and (not attribute or prev[a_state] == attribute) + else + a = not attribute or prev[a_state] == attribute + end + if a then + local lookupmatch = lookupcache[prev.char] + if lookupmatch then + -- sequence kan weg + local h, d, ok = handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) + if ok then + done = true + success = true + end + end + end + prev.next = disc + -- next.prev = disc + end + return next + end + + while start do + local id = start.id + if id == glyph_code then + if start.font == font and start.subtype<256 then + local a = start[0] + if a then + a = (a == attr) and (not attribute or start[a_state] == attribute) + else + a = not attribute or start[a_state] == attribute + end + if a then + local lookupmatch = lookupcache[start.char] + if lookupmatch then + -- sequence kan weg + local ok + head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) + if ok then + success = true + end + end + if start then start = start.next end + else + start = start.next + end + else + start = start.next + end + elseif id == disc_code then + -- mostly for gsub + if start.subtype == discretionary_code then + local pre = start.pre + if pre then + local new = subrun(pre) + if new then start.pre = new end + end + local post = start.post + if post then + local new = subrun(post) + if new then start.post = new end + end + local replace = start.replace + if replace then + local new = subrun(replace) + if new then start.replace = new end + end +elseif typ == "gpos_single" or typ == "gpos_pair" then + kerndisc(start) + end + start = start.next + elseif id == whatsit_code then -- will be function + local subtype = start.subtype + if subtype == dir_code then + local dir = start.dir + if dir == "+TRT" or dir == "+TLT" then + topstack = topstack + 1 + dirstack[topstack] = dir + elseif dir == "-TRT" or dir == "-TLT" then + topstack = topstack - 1 + end + local newdir = dirstack[topstack] + if newdir == "+TRT" then + rlmode = -1 + elseif newdir == "+TLT" then + rlmode = 1 + else + rlmode = rlparmode + end + if trace_directions then + report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir) + end + elseif subtype == localpar_code then + local dir = start.dir + if dir == "TRT" then + rlparmode = -1 + elseif dir == "TLT" then + rlparmode = 1 + else + rlparmode = 0 + end + -- one might wonder if the par dir should be looked at, so we might as well drop the next line + rlmode = rlparmode + if trace_directions then + report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode) + end + end + start = start.next + elseif id == math_code then + start = end_of_math(start).next + else + start = start.next + end + end + end + else + + local function subrun(start) + -- mostly for gsub, gpos would demand a more clever approach + local head = start + local done = false + while start do + local id = start.id + if id == glyph_code and start.id == font and start.subtype <256 then + local a = start[0] + if a then + a = (a == attr) and (not attribute or start[a_state] == attribute) + else + a = not attribute or start[a_state] == attribute + end + if a then + for i=1,ns do + local lookupname = subtables[i] + local lookupcache = lookuphash[lookupname] + if lookupcache then + local lookupmatch = lookupcache[start.char] + if lookupmatch then + -- we could move all code inline but that makes things even more unreadable + local ok + head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) + if ok then + done = true + break + elseif not start then + -- don't ask why ... shouldn't happen + break + end + end + else + report_missing_cache(typ,lookupname) + end + end + if start then start = start.next end + else + start = start.next + end + else + start = start.next + end + end + if done then + success = true + return head + end + end + + local function kerndisc(disc) -- we can assume that prev and next are glyphs + local prev = disc.prev + local next = disc.next + if prev and next then + prev.next = next + -- next.prev = prev + local a = prev[0] + if a then + a = (a == attr) and (not attribute or prev[a_state] == attribute) + else + a = not attribute or prev[a_state] == attribute + end + if a then + for i=1,ns do + local lookupname = subtables[i] + local lookupcache = lookuphash[lookupname] + if lookupcache then + local lookupmatch = lookupcache[prev.char] + if lookupmatch then + -- we could move all code inline but that makes things even more unreadable + local h, d, ok = handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) + if ok then + done = true + break + end + end + else + report_missing_cache(typ,lookupname) + end + end + end + prev.next = disc + -- next.prev = disc + end + return next + end + + while start do + local id = start.id + if id == glyph_code then + if start.font == font and start.subtype<256 then + local a = start[0] + if a then + a = (a == attr) and (not attribute or start[a_state] == attribute) + else + a = not attribute or start[a_state] == attribute + end + if a then + for i=1,ns do + local lookupname = subtables[i] + local lookupcache = lookuphash[lookupname] + if lookupcache then + local lookupmatch = lookupcache[start.char] + if lookupmatch then + -- we could move all code inline but that makes things even more unreadable + local ok + head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) + if ok then + success = true + break + elseif not start then + -- don't ask why ... shouldn't happen + break + end + end + else + report_missing_cache(typ,lookupname) + end + end + if start then start = start.next end + else + start = start.next + end + else + start = start.next + end + elseif id == disc_code then + -- mostly for gsub + if start.subtype == discretionary_code then + local pre = start.pre + if pre then + local new = subrun(pre) + if new then start.pre = new end + end + local post = start.post + if post then + local new = subrun(post) + if new then start.post = new end + end + local replace = start.replace + if replace then + local new = subrun(replace) + if new then start.replace = new end + end +elseif typ == "gpos_single" or typ == "gpos_pair" then + kerndisc(start) + end + start = start.next + elseif id == whatsit_code then + local subtype = start.subtype + if subtype == dir_code then + local dir = start.dir + if dir == "+TRT" or dir == "+TLT" then + topstack = topstack + 1 + dirstack[topstack] = dir + elseif dir == "-TRT" or dir == "-TLT" then + topstack = topstack - 1 + end + local newdir = dirstack[topstack] + if newdir == "+TRT" then + rlmode = -1 + elseif newdir == "+TLT" then + rlmode = 1 + else + rlmode = rlparmode + end + if trace_directions then + report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir) + end + elseif subtype == localpar_code then + local dir = start.dir + if dir == "TRT" then + rlparmode = -1 + elseif dir == "TLT" then + rlparmode = 1 + else + rlparmode = 0 + end + rlmode = rlparmode + if trace_directions then + report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode) + end + end + start = start.next + elseif id == math_code then + start = end_of_math(start).next + else + start = start.next + end + end + end + end + if success then + done = true + end + if trace_steps then -- ? + registerstep(head) + end + end + return head, done +end + +local function generic(lookupdata,lookupname,unicode,lookuphash) + local target = lookuphash[lookupname] + if target then + target[unicode] = lookupdata + else + lookuphash[lookupname] = { [unicode] = lookupdata } + end +end + +local action = { + + substitution = generic, + multiple = generic, + alternate = generic, + position = generic, + + ligature = function(lookupdata,lookupname,unicode,lookuphash) + local target = lookuphash[lookupname] + if not target then + target = { } + lookuphash[lookupname] = target + end + for i=1,#lookupdata do + local li = lookupdata[i] + local tu = target[li] + if not tu then + tu = { } + target[li] = tu + end + target = tu + end + target.ligature = unicode + end, + + pair = function(lookupdata,lookupname,unicode,lookuphash) + local target = lookuphash[lookupname] + if not target then + target = { } + lookuphash[lookupname] = target + end + local others = target[unicode] + local paired = lookupdata[1] + if others then + others[paired] = lookupdata + else + others = { [paired] = lookupdata } + target[unicode] = others + end + end, + +} + +local function prepare_lookups(tfmdata) + + local rawdata = tfmdata.shared.rawdata + local resources = rawdata.resources + local lookuphash = resources.lookuphash + local anchor_to_lookup = resources.anchor_to_lookup + local lookup_to_anchor = resources.lookup_to_anchor + local lookuptypes = resources.lookuptypes + local characters = tfmdata.characters + local descriptions = tfmdata.descriptions + + -- we cannot free the entries in the descriptions as sometimes we access + -- then directly (for instance anchors) ... selectively freeing does save + -- much memory as it's only a reference to a table and the slot in the + -- description hash is not freed anyway + + for unicode, character in next, characters do -- we cannot loop over descriptions ! + + local description = descriptions[unicode] + + if description then + + local lookups = description.slookups + if lookups then + for lookupname, lookupdata in next, lookups do + action[lookuptypes[lookupname]](lookupdata,lookupname,unicode,lookuphash) + end + end + + local lookups = description.mlookups + if lookups then + for lookupname, lookuplist in next, lookups do + local lookuptype = lookuptypes[lookupname] + for l=1,#lookuplist do + local lookupdata = lookuplist[l] + action[lookuptype](lookupdata,lookupname,unicode,lookuphash) + end + end + end + + local list = description.kerns + if list then + for lookup, krn in next, list do -- ref to glyph, saves lookup + local target = lookuphash[lookup] + if target then + target[unicode] = krn + else + lookuphash[lookup] = { [unicode] = krn } + end + end + end + + local list = description.anchors + if list then + for typ, anchors in next, list do -- types + if typ == "mark" or typ == "cexit" then -- or entry? + for name, anchor in next, anchors do + local lookups = anchor_to_lookup[name] + if lookups then + for lookup, _ in next, lookups do + local target = lookuphash[lookup] + if target then + target[unicode] = anchors + else + lookuphash[lookup] = { [unicode] = anchors } + end + end + end + end + end + end + end + + end + + end + +end + +local function split(replacement,original) + local result = { } + for i=1,#replacement do + result[original[i]] = replacement[i] + end + return result +end + +local valid = { + coverage = { chainsub = true, chainpos = true, contextsub = true }, + reversecoverage = { reversesub = true }, + glyphs = { chainsub = true, chainpos = true }, +} + +local function prepare_contextchains(tfmdata) + local rawdata = tfmdata.shared.rawdata + local resources = rawdata.resources + local lookuphash = resources.lookuphash + local lookups = rawdata.lookups + if lookups then + for lookupname, lookupdata in next, rawdata.lookups do + local lookuptype = lookupdata.type + if lookuptype then + local rules = lookupdata.rules + if rules then + local format = lookupdata.format + local validformat = valid[format] + if not validformat then + report_prepare("unsupported format %a",format) + elseif not validformat[lookuptype] then + -- todo: dejavu-serif has one (but i need to see what use it has) + report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookupname) + else + local contexts = lookuphash[lookupname] + if not contexts then + contexts = { } + lookuphash[lookupname] = contexts + end + local t, nt = { }, 0 + for nofrules=1,#rules do + local rule = rules[nofrules] + local current = rule.current + local before = rule.before + local after = rule.after + local replacements = rule.replacements + local sequence = { } + local nofsequences = 0 + -- Eventually we can store start, stop and sequence in the cached file + -- but then less sharing takes place so best not do that without a lot + -- of profiling so let's forget about it. + if before then + for n=1,#before do + nofsequences = nofsequences + 1 + sequence[nofsequences] = before[n] + end + end + local start = nofsequences + 1 + for n=1,#current do + nofsequences = nofsequences + 1 + sequence[nofsequences] = current[n] + end + local stop = nofsequences + if after then + for n=1,#after do + nofsequences = nofsequences + 1 + sequence[nofsequences] = after[n] + end + end + if sequence[1] then + -- Replacements only happen with reverse lookups as they are single only. We + -- could pack them into current (replacement value instead of true) and then + -- use sequence[start] instead but it's somewhat ugly. + nt = nt + 1 + t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups, replacements } + for unic, _ in next, sequence[start] do + local cu = contexts[unic] + if not cu then + contexts[unic] = t + end + end + end + end + end + else + -- no rules + end + else + report_prepare("missing lookuptype for lookupname %a",lookupname) + end + end + end +end + +-- we can consider lookuphash == false (initialized but empty) vs lookuphash == table + +local function featuresinitializer(tfmdata,value) + if true then -- value then + -- beware we need to use the topmost properties table + local rawdata = tfmdata.shared.rawdata + local properties = rawdata.properties + if not properties.initialized then + local starttime = trace_preparing and os.clock() + local resources = rawdata.resources + resources.lookuphash = resources.lookuphash or { } + prepare_contextchains(tfmdata) + prepare_lookups(tfmdata) + properties.initialized = true + if trace_preparing then + report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,tfmdata.properties.fullname) + end + end + end +end + +registerotffeature { + name = "features", + description = "features", + default = true, + initializers = { + position = 1, + node = featuresinitializer, + }, + processors = { + node = featuresprocessor, + } +} + +-- This can be used for extra handlers, but should be used with care! + +otf.handlers = handlers diff --git a/src/fontloader/luaotfload-fonts-tfm.lua b/src/fontloader/luaotfload-fonts-tfm.lua new file mode 100644 index 0000000..b9bb1bd --- /dev/null +++ b/src/fontloader/luaotfload-fonts-tfm.lua @@ -0,0 +1,38 @@ +if not modules then modules = { } end modules ['luatex-fonts-tfm'] = { + version = 1.001, + comment = "companion to luatex-*.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end + +local fonts = fonts +local tfm = { } +fonts.handlers.tfm = tfm +fonts.formats.tfm = "type1" -- we need to have at least a value here + +function fonts.readers.tfm(specification) + local fullname = specification.filename or "" + if fullname == "" then + local forced = specification.forced or "" + if forced ~= "" then + fullname = specification.name .. "." .. forced + else + fullname = specification.name + end + end + local foundname = resolvers.findbinfile(fullname, 'tfm') or "" + if foundname == "" then + foundname = resolvers.findbinfile(fullname, 'ofm') or "" + end + if foundname ~= "" then + specification.filename = foundname + specification.format = "ofm" + return font.read_tfm(specification.filename,specification.size) + end +end diff --git a/src/luaotfload-basics-gen.lua b/src/luaotfload-basics-gen.lua deleted file mode 100644 index c19a49a..0000000 --- a/src/luaotfload-basics-gen.lua +++ /dev/null @@ -1,368 +0,0 @@ -if not modules then modules = { } end modules ['luat-basics-gen'] = { - version = 1.100, - comment = "companion to luatex-*.tex", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end - -local dummyfunction = function() -end - -local dummyreporter = function(c) - return function(...) - (texio.reporter or texio.write_nl)(c .. " : " .. string.formatters(...)) - end -end - -statistics = { - register = dummyfunction, - starttiming = dummyfunction, - stoptiming = dummyfunction, - elapsedtime = nil, -} - -directives = { - register = dummyfunction, - enable = dummyfunction, - disable = dummyfunction, -} - -trackers = { - register = dummyfunction, - enable = dummyfunction, - disable = dummyfunction, -} - -experiments = { - register = dummyfunction, - enable = dummyfunction, - disable = dummyfunction, -} - -storage = { -- probably no longer needed - register = dummyfunction, - shared = { }, -} - -logs = { - new = dummyreporter, - reporter = dummyreporter, - messenger = dummyreporter, - report = dummyfunction, -} - -callbacks = { - register = function(n,f) return callback.register(n,f) end, - -} - -utilities = { - storage = { - allocate = function(t) return t or { } end, - mark = function(t) return t or { } end, - }, -} - -characters = characters or { - data = { } -} - --- we need to cheat a bit here - -texconfig.kpse_init = true - -resolvers = resolvers or { } -- no fancy file helpers used - -local remapper = { - otf = "opentype fonts", - ttf = "truetype fonts", - ttc = "truetype fonts", - dfont = "truetype fonts", -- "truetype dictionary", - cid = "cid maps", - cidmap = "cid maps", - fea = "font feature files", - pfa = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this! - pfb = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this! - afm = "afm", -} - -function resolvers.findfile(name,fileformat) - name = string.gsub(name,"\\","/") - if not fileformat or fileformat == "" then - fileformat = file.suffix(name) - if fileformat == "" then - fileformat = "tex" - end - end - fileformat = string.lower(fileformat) - fileformat = remapper[fileformat] or fileformat - local found = kpse.find_file(name,fileformat) - if not found or found == "" then - found = kpse.find_file(name,"other text files") - end - return found -end - --- function resolvers.findbinfile(name,fileformat) --- if not fileformat or fileformat == "" then --- fileformat = file.suffix(name) --- end --- return resolvers.findfile(name,(fileformat and remapper[fileformat]) or fileformat) --- end - -resolvers.findbinfile = resolvers.findfile - -function resolvers.loadbinfile(filename,filetype) - local data = io.loaddata(filename) - return true, data, #data -end - -function resolvers.resolve(s) - return s -end - -function resolvers.unresolve(s) - return s -end - --- Caches ... I will make a real stupid version some day when I'm in the --- mood. After all, the generic code does not need the more advanced --- ConTeXt features. Cached data is not shared between ConTeXt and other --- usage as I don't want any dependency at all. Also, ConTeXt might have --- different needs and tricks added. - ---~ containers.usecache = true - -caches = { } - -local writable = nil -local readables = { } -local usingjit = jit - -if not caches.namespace or caches.namespace == "" or caches.namespace == "context" then - caches.namespace = 'generic' -end - -do - - -- standard context tree setup - - local cachepaths = kpse.expand_var('$TEXMFCACHE') or "" - - -- quite like tex live or so (the weird $TEXMFCACHE test seems to be needed on miktex) - - if cachepaths == "" or cachepaths == "$TEXMFCACHE" then - cachepaths = kpse.expand_var('$TEXMFVAR') or "" - end - - -- this also happened to be used (the weird $TEXMFVAR test seems to be needed on miktex) - - if cachepaths == "" or cachepaths == "$TEXMFVAR" then - cachepaths = kpse.expand_var('$VARTEXMF') or "" - end - - -- and this is a last resort (hm, we could use TEMP or TEMPDIR) - - if cachepaths == "" then - local fallbacks = { "TMPDIR", "TEMPDIR", "TMP", "TEMP", "HOME", "HOMEPATH" } - for i=1,#fallbacks do - cachepaths = os.getenv(fallbacks[i]) or "" - if cachepath ~= "" and lfs.isdir(cachepath) then - break - end - end - end - - if cachepaths == "" then - cachepaths = "." - end - - cachepaths = string.split(cachepaths,os.type == "windows" and ";" or ":") - - for i=1,#cachepaths do - local cachepath = cachepaths[i] - if not lfs.isdir(cachepath) then - lfs.mkdirs(cachepath) -- needed for texlive and latex - if lfs.isdir(cachepath) then - texio.write(string.format("(created cache path: %s)",cachepath)) - end - end - if file.is_writable(cachepath) then - writable = file.join(cachepath,"luatex-cache") - lfs.mkdir(writable) - writable = file.join(writable,caches.namespace) - lfs.mkdir(writable) - break - end - end - - for i=1,#cachepaths do - if file.is_readable(cachepaths[i]) then - readables[#readables+1] = file.join(cachepaths[i],"luatex-cache",caches.namespace) - end - end - - if not writable then - texio.write_nl("quiting: fix your writable cache path") - os.exit() - elseif #readables == 0 then - texio.write_nl("quiting: fix your readable cache path") - os.exit() - elseif #readables == 1 and readables[1] == writable then - texio.write(string.format("(using cache: %s)",writable)) - else - texio.write(string.format("(using write cache: %s)",writable)) - texio.write(string.format("(using read cache: %s)",table.concat(readables, " "))) - end - -end - -function caches.getwritablepath(category,subcategory) - local path = file.join(writable,category) - lfs.mkdir(path) - path = file.join(path,subcategory) - lfs.mkdir(path) - return path -end - -function caches.getreadablepaths(category,subcategory) - local t = { } - for i=1,#readables do - t[i] = file.join(readables[i],category,subcategory) - end - return t -end - -local function makefullname(path,name) - if path and path ~= "" then - return file.addsuffix(file.join(path,name),"lua"), file.addsuffix(file.join(path,name),usingjit and "lub" or "luc") - end -end - -function caches.is_writable(path,name) - local fullname = makefullname(path,name) - return fullname and file.is_writable(fullname) -end - -function caches.loaddata(paths,name) - for i=1,#paths do - local data = false - local luaname, lucname = makefullname(paths[i],name) - if lucname and not lfs.isfile(lucname) and type(caches.compile) == "function" then - -- in case we used luatex and luajittex mixed ... lub or luc file - texio.write(string.format("(compiling luc: %s)",lucname)) - data = loadfile(luaname) - if data then - data = data() - end - if data then - caches.compile(data,luaname,lucname) - return data - end - end - if lucname and lfs.isfile(lucname) then -- maybe also check for size - texio.write(string.format("(load luc: %s)",lucname)) - data = loadfile(lucname) - if data then - data = data() - end - if data then - return data - else - texio.write(string.format("(loading failed: %s)",lucname)) - end - end - if luaname and lfs.isfile(luaname) then - texio.write(string.format("(load lua: %s)",luaname)) - data = loadfile(luaname) - if data then - data = data() - end - if data then - return data - end - end - end -end - -function caches.savedata(path,name,data) - local luaname, lucname = makefullname(path,name) - if luaname then - texio.write(string.format("(save: %s)",luaname)) - table.tofile(luaname,data,true) - if lucname and type(caches.compile) == "function" then - os.remove(lucname) -- better be safe - texio.write(string.format("(save: %s)",lucname)) - caches.compile(data,luaname,lucname) - end - end -end - --- According to KH os.execute is not permitted in plain/latex so there is --- no reason to use the normal context way. So the method here is slightly --- different from the one we have in context. We also use different suffixes --- as we don't want any clashes (sharing cache files is not that handy as --- context moves on faster.) --- --- Beware: serialization might fail on large files (so maybe we should pcall --- this) in which case one should limit the method to luac and enable support --- for execution. - --- function caches.compile(data,luaname,lucname) --- local d = io.loaddata(luaname) --- if not d or d == "" then --- d = table.serialize(data,true) -- slow --- end --- if d and d ~= "" then --- local f = io.open(lucname,'w') --- if f then --- local s = loadstring(d) --- if s then --- f:write(string.dump(s,true)) --- end --- f:close() --- end --- end --- end - -function caches.compile(data,luaname,lucname) - local d = io.loaddata(luaname) - if not d or d == "" then - d = table.serialize(data,true) -- slow - end - if d and d ~= "" then - local f = io.open(lucname,'wb') - if f then - local s = loadstring(d) - if s then - f:write(string.dump(s,true)) - end - f:close() - end - end -end - --- - -function table.setmetatableindex(t,f) - setmetatable(t,{ __index = f }) -end - --- helper for plain: - -arguments = { } - -if arg then - for i=1,#arg do - local k, v = string.match(arg[i],"^%-%-([^=]+)=?(.-)$") - if k and v then - arguments[k] = v - end - end -end diff --git a/src/luaotfload-basics-nod.lua b/src/luaotfload-basics-nod.lua deleted file mode 100644 index 373dab5..0000000 --- a/src/luaotfload-basics-nod.lua +++ /dev/null @@ -1,178 +0,0 @@ -if not modules then modules = { } end modules ['luatex-fonts-nod'] = { - version = 1.001, - comment = "companion to luatex-fonts.lua", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end - --- Don't depend on code here as it is only needed to complement the --- font handler code. - --- Attributes: - -if tex.attribute[0] ~= 0 then - - texio.write_nl("log","!") - texio.write_nl("log","! Attribute 0 is reserved for ConTeXt's font feature management and has to be") - texio.write_nl("log","! set to zero. Also, some attributes in the range 1-255 are used for special") - texio.write_nl("log","! purposes so setting them at the TeX end might break the font handler.") - texio.write_nl("log","!") - - tex.attribute[0] = 0 -- else no features - -end - -attributes = attributes or { } -attributes.unsetvalue = -0x7FFFFFFF - -local numbers, last = { }, 127 - -attributes.private = attributes.private or function(name) - local number = numbers[name] - if not number then - if last < 255 then - last = last + 1 - end - number = last - numbers[name] = number - end - return number -end - --- Nodes: - -nodes = { } -nodes.pool = { } -nodes.handlers = { } - -local nodecodes = { } for k,v in next, node.types () do nodecodes[string.gsub(v,"_","")] = k end -local whatcodes = { } for k,v in next, node.whatsits() do whatcodes[string.gsub(v,"_","")] = k end -local glyphcodes = { [0] = "character", "glyph", "ligature", "ghost", "left", "right" } -local disccodes = { [0] = "discretionary", "explicit", "automatic", "regular", "first", "second" } - -nodes.nodecodes = nodecodes -nodes.whatcodes = whatcodes -nodes.whatsitcodes = whatcodes -nodes.glyphcodes = glyphcodes -nodes.disccodes = disccodes - -local free_node = node.free -local remove_node = node.remove -local new_node = node.new -local traverse_id = node.traverse_id - -nodes.handlers.protectglyphs = node.protect_glyphs -nodes.handlers.unprotectglyphs = node.unprotect_glyphs - -local math_code = nodecodes.math -local end_of_math = node.end_of_math - -function node.end_of_math(n) - if n.id == math_code and n.subtype == 1 then - return n - else - return end_of_math(n) - end -end - -function nodes.remove(head, current, free_too) - local t = current - head, current = remove_node(head,current) - if t then - if free_too then - free_node(t) - t = nil - else - t.next, t.prev = nil, nil - end - end - return head, current, t -end - -function nodes.delete(head,current) - return nodes.remove(head,current,true) -end - -function nodes.pool.kern(k) - local n = new_node("kern",1) - n.kern = k - return n -end - --- experimental - -local getfield = node.getfield or function(n,tag) return n[tag] end -local setfield = node.setfield or function(n,tag,value) n[tag] = value end - -nodes.getfield = getfield -nodes.setfield = setfield - -nodes.getattr = getfield -nodes.setattr = setfield - -if node.getid then nodes.getid = node.getid else function nodes.getid (n) return getfield(n,"id") end end -if node.getsubtype then nodes.getsubtype = node.getsubtype else function nodes.getsubtype(n) return getfield(n,"subtype") end end -if node.getnext then nodes.getnext = node.getnext else function nodes.getnext (n) return getfield(n,"next") end end -if node.getprev then nodes.getprev = node.getprev else function nodes.getprev (n) return getfield(n,"prev") end end -if node.getchar then nodes.getchar = node.getchar else function nodes.getchar (n) return getfield(n,"char") end end -if node.getfont then nodes.getfont = node.getfont else function nodes.getfont (n) return getfield(n,"font") end end -if node.getlist then nodes.getlist = node.getlist else function nodes.getlist (n) return getfield(n,"list") end end - -function nodes.tonut (n) return n end -function nodes.tonode(n) return n end - --- being lazy ... just copy a bunch ... not all needed in generic but we assume --- nodes to be kind of private anyway - -nodes.tostring = node.tostring or tostring -nodes.copy = node.copy -nodes.copy_list = node.copy_list -nodes.delete = node.delete -nodes.dimensions = node.dimensions -nodes.end_of_math = node.end_of_math -nodes.flush_list = node.flush_list -nodes.flush_node = node.flush_node -nodes.free = node.free -nodes.insert_after = node.insert_after -nodes.insert_before = node.insert_before -nodes.hpack = node.hpack -nodes.new = node.new -nodes.tail = node.tail -nodes.traverse = node.traverse -nodes.traverse_id = node.traverse_id -nodes.slide = node.slide -nodes.vpack = node.vpack - -nodes.first_glyph = node.first_glyph -nodes.first_character = node.first_character -nodes.has_glyph = node.has_glyph or node.first_glyph - -nodes.current_attr = node.current_attr -nodes.do_ligature_n = node.do_ligature_n -nodes.has_field = node.has_field -nodes.last_node = node.last_node -nodes.usedlist = node.usedlist -nodes.protrusion_skippable = node.protrusion_skippable -nodes.write = node.write - -nodes.has_attribute = node.has_attribute -nodes.set_attribute = node.set_attribute -nodes.unset_attribute = node.unset_attribute - -nodes.protect_glyphs = node.protect_glyphs -nodes.unprotect_glyphs = node.unprotect_glyphs -nodes.kerning = node.kerning -nodes.ligaturing = node.ligaturing -nodes.mlist_to_hlist = node.mlist_to_hlist - --- in generic code, at least for some time, we stay nodes, while in context --- we can go nuts (e.g. experimental); this split permits us us keep code --- used elsewhere stable but at the same time play around in context - -nodes.nuts = nodes diff --git a/src/luaotfload-fontloader.lua b/src/luaotfload-fontloader.lua deleted file mode 100644 index e9c6638..0000000 --- a/src/luaotfload-fontloader.lua +++ /dev/null @@ -1,14628 +0,0 @@ --- merged file : luatex-fonts-merged.lua --- parent file : luatex-fonts.lua --- merge date : 12/06/14 14:20:08 - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['l-lua']={ - version=1.001, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local major,minor=string.match(_VERSION,"^[^%d]+(%d+)%.(%d+).*$") -_MAJORVERSION=tonumber(major) or 5 -_MINORVERSION=tonumber(minor) or 1 -_LUAVERSION=_MAJORVERSION+_MINORVERSION/10 -if not lpeg then - lpeg=require("lpeg") -end -if loadstring then - local loadnormal=load - function load(first,...) - if type(first)=="string" then - return loadstring(first,...) - else - return loadnormal(first,...) - end - end -else - loadstring=load -end -if not ipairs then - local function iterate(a,i) - i=i+1 - local v=a[i] - if v~=nil then - return i,v - end - end - function ipairs(a) - return iterate,a,0 - end -end -if not pairs then - function pairs(t) - return next,t - end -end -if not table.unpack then - table.unpack=_G.unpack -elseif not unpack then - _G.unpack=table.unpack -end -if not package.loaders then - package.loaders=package.searchers -end -local print,select,tostring=print,select,tostring -local inspectors={} -function setinspector(inspector) - inspectors[#inspectors+1]=inspector -end -function inspect(...) - for s=1,select("#",...) do - local value=select(s,...) - local done=false - for i=1,#inspectors do - done=inspectors[i](value) - if done then - break - end - end - if not done then - print(tostring(value)) - end - end -end -local dummy=function() end -function optionalrequire(...) - local ok,result=xpcall(require,dummy,...) - if ok then - return result - end -end -if lua then - lua.mask=load([[τεχ = 1]]) and "utf" or "ascii" -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['l-lpeg']={ - version=1.001, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -lpeg=require("lpeg") -if not lpeg.print then function lpeg.print(...) print(lpeg.pcode(...)) end end -local type,next,tostring=type,next,tostring -local byte,char,gmatch,format=string.byte,string.char,string.gmatch,string.format -local floor=math.floor -local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt -local lpegtype,lpegmatch,lpegprint=lpeg.type,lpeg.match,lpeg.print -if setinspector then - setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end) -end -lpeg.patterns=lpeg.patterns or {} -local patterns=lpeg.patterns -local anything=P(1) -local endofstring=P(-1) -local alwaysmatched=P(true) -patterns.anything=anything -patterns.endofstring=endofstring -patterns.beginofstring=alwaysmatched -patterns.alwaysmatched=alwaysmatched -local sign=S('+-') -local zero=P('0') -local digit=R('09') -local octdigit=R("07") -local lowercase=R("az") -local uppercase=R("AZ") -local underscore=P("_") -local hexdigit=digit+lowercase+uppercase -local cr,lf,crlf=P("\r"),P("\n"),P("\r\n") -local newline=P("\r")*(P("\n")+P(true))+P("\n") -local escaped=P("\\")*anything -local squote=P("'") -local dquote=P('"') -local space=P(" ") -local period=P(".") -local comma=P(",") -local utfbom_32_be=P('\000\000\254\255') -local utfbom_32_le=P('\255\254\000\000') -local utfbom_16_be=P('\254\255') -local utfbom_16_le=P('\255\254') -local utfbom_8=P('\239\187\191') -local utfbom=utfbom_32_be+utfbom_32_le+utfbom_16_be+utfbom_16_le+utfbom_8 -local utftype=utfbom_32_be*Cc("utf-32-be")+utfbom_32_le*Cc("utf-32-le")+utfbom_16_be*Cc("utf-16-be")+utfbom_16_le*Cc("utf-16-le")+utfbom_8*Cc("utf-8")+alwaysmatched*Cc("utf-8") -local utfstricttype=utfbom_32_be*Cc("utf-32-be")+utfbom_32_le*Cc("utf-32-le")+utfbom_16_be*Cc("utf-16-be")+utfbom_16_le*Cc("utf-16-le")+utfbom_8*Cc("utf-8") -local utfoffset=utfbom_32_be*Cc(4)+utfbom_32_le*Cc(4)+utfbom_16_be*Cc(2)+utfbom_16_le*Cc(2)+utfbom_8*Cc(3)+Cc(0) -local utf8next=R("\128\191") -patterns.utfbom_32_be=utfbom_32_be -patterns.utfbom_32_le=utfbom_32_le -patterns.utfbom_16_be=utfbom_16_be -patterns.utfbom_16_le=utfbom_16_le -patterns.utfbom_8=utfbom_8 -patterns.utf_16_be_nl=P("\000\r\000\n")+P("\000\r")+P("\000\n") -patterns.utf_16_le_nl=P("\r\000\n\000")+P("\r\000")+P("\n\000") -patterns.utf_32_be_nl=P("\000\000\000\r\000\000\000\n")+P("\000\000\000\r")+P("\000\000\000\n") -patterns.utf_32_le_nl=P("\r\000\000\000\n\000\000\000")+P("\r\000\000\000")+P("\n\000\000\000") -patterns.utf8one=R("\000\127") -patterns.utf8two=R("\194\223")*utf8next -patterns.utf8three=R("\224\239")*utf8next*utf8next -patterns.utf8four=R("\240\244")*utf8next*utf8next*utf8next -patterns.utfbom=utfbom -patterns.utftype=utftype -patterns.utfstricttype=utfstricttype -patterns.utfoffset=utfoffset -local utf8char=patterns.utf8one+patterns.utf8two+patterns.utf8three+patterns.utf8four -local validutf8char=utf8char^0*endofstring*Cc(true)+Cc(false) -local utf8character=P(1)*R("\128\191")^0 -patterns.utf8=utf8char -patterns.utf8char=utf8char -patterns.utf8character=utf8character -patterns.validutf8=validutf8char -patterns.validutf8char=validutf8char -local eol=S("\n\r") -local spacer=S(" \t\f\v") -local whitespace=eol+spacer -local nonspacer=1-spacer -local nonwhitespace=1-whitespace -patterns.eol=eol -patterns.spacer=spacer -patterns.whitespace=whitespace -patterns.nonspacer=nonspacer -patterns.nonwhitespace=nonwhitespace -local stripper=spacer^0*C((spacer^0*nonspacer^1)^0) -local fullstripper=whitespace^0*C((whitespace^0*nonwhitespace^1)^0) -local collapser=Cs(spacer^0/""*nonspacer^0*((spacer^0/" "*nonspacer^1)^0)) -local b_collapser=Cs(whitespace^0/""*(nonwhitespace^1+whitespace^1/" ")^0) -local e_collapser=Cs((whitespace^1*P(-1)/""+nonwhitespace^1+whitespace^1/" ")^0) -local m_collapser=Cs((nonwhitespace^1+whitespace^1/" ")^0) -local b_stripper=Cs(spacer^0/""*(nonspacer^1+spacer^1/" ")^0) -local e_stripper=Cs((spacer^1*P(-1)/""+nonspacer^1+spacer^1/" ")^0) -local m_stripper=Cs((nonspacer^1+spacer^1/" ")^0) -patterns.stripper=stripper -patterns.fullstripper=fullstripper -patterns.collapser=collapser -patterns.b_collapser=b_collapser -patterns.m_collapser=m_collapser -patterns.e_collapser=e_collapser -patterns.b_stripper=b_stripper -patterns.m_stripper=m_stripper -patterns.e_stripper=e_stripper -patterns.lowercase=lowercase -patterns.uppercase=uppercase -patterns.letter=patterns.lowercase+patterns.uppercase -patterns.space=space -patterns.tab=P("\t") -patterns.spaceortab=patterns.space+patterns.tab -patterns.newline=newline -patterns.emptyline=newline^1 -patterns.equal=P("=") -patterns.comma=comma -patterns.commaspacer=comma*spacer^0 -patterns.period=period -patterns.colon=P(":") -patterns.semicolon=P(";") -patterns.underscore=underscore -patterns.escaped=escaped -patterns.squote=squote -patterns.dquote=dquote -patterns.nosquote=(escaped+(1-squote))^0 -patterns.nodquote=(escaped+(1-dquote))^0 -patterns.unsingle=(squote/"")*patterns.nosquote*(squote/"") -patterns.undouble=(dquote/"")*patterns.nodquote*(dquote/"") -patterns.unquoted=patterns.undouble+patterns.unsingle -patterns.unspacer=((patterns.spacer^1)/"")^0 -patterns.singlequoted=squote*patterns.nosquote*squote -patterns.doublequoted=dquote*patterns.nodquote*dquote -patterns.quoted=patterns.doublequoted+patterns.singlequoted -patterns.digit=digit -patterns.octdigit=octdigit -patterns.hexdigit=hexdigit -patterns.sign=sign -patterns.cardinal=digit^1 -patterns.integer=sign^-1*digit^1 -patterns.unsigned=digit^0*period*digit^1 -patterns.float=sign^-1*patterns.unsigned -patterns.cunsigned=digit^0*comma*digit^1 -patterns.cpunsigned=digit^0*(period+comma)*digit^1 -patterns.cfloat=sign^-1*patterns.cunsigned -patterns.cpfloat=sign^-1*patterns.cpunsigned -patterns.number=patterns.float+patterns.integer -patterns.cnumber=patterns.cfloat+patterns.integer -patterns.cpnumber=patterns.cpfloat+patterns.integer -patterns.oct=zero*octdigit^1 -patterns.octal=patterns.oct -patterns.HEX=zero*P("X")*(digit+uppercase)^1 -patterns.hex=zero*P("x")*(digit+lowercase)^1 -patterns.hexadecimal=zero*S("xX")*hexdigit^1 -patterns.hexafloat=sign^-1*zero*S("xX")*(hexdigit^0*period*hexdigit^1+hexdigit^1*period*hexdigit^0+hexdigit^1)*(S("pP")*sign^-1*hexdigit^1)^-1 -patterns.decafloat=sign^-1*(digit^0*period*digit^1+digit^1*period*digit^0+digit^1)*S("eE")*sign^-1*digit^1 -patterns.propername=(uppercase+lowercase+underscore)*(uppercase+lowercase+underscore+digit)^0*endofstring -patterns.somecontent=(anything-newline-space)^1 -patterns.beginline=#(1-newline) -patterns.longtostring=Cs(whitespace^0/""*((patterns.quoted+nonwhitespace^1+whitespace^1/""*(P(-1)+Cc(" ")))^0)) -local function anywhere(pattern) - return P { P(pattern)+1*V(1) } -end -lpeg.anywhere=anywhere -function lpeg.instringchecker(p) - p=anywhere(p) - return function(str) - return lpegmatch(p,str) and true or false - end -end -function lpeg.splitter(pattern,action) - return (((1-P(pattern))^1)/action+1)^0 -end -function lpeg.tsplitter(pattern,action) - return Ct((((1-P(pattern))^1)/action+1)^0) -end -local splitters_s,splitters_m,splitters_t={},{},{} -local function splitat(separator,single) - local splitter=(single and splitters_s[separator]) or splitters_m[separator] - if not splitter then - separator=P(separator) - local other=C((1-separator)^0) - if single then - local any=anything - splitter=other*(separator*C(any^0)+"") - splitters_s[separator]=splitter - else - splitter=other*(separator*other)^0 - splitters_m[separator]=splitter - end - end - return splitter -end -local function tsplitat(separator) - local splitter=splitters_t[separator] - if not splitter then - splitter=Ct(splitat(separator)) - splitters_t[separator]=splitter - end - return splitter -end -lpeg.splitat=splitat -lpeg.tsplitat=tsplitat -function string.splitup(str,separator) - if not separator then - separator="," - end - return lpegmatch(splitters_m[separator] or splitat(separator),str) -end -local cache={} -function lpeg.split(separator,str) - local c=cache[separator] - if not c then - c=tsplitat(separator) - cache[separator]=c - end - return lpegmatch(c,str) -end -function string.split(str,separator) - if separator then - local c=cache[separator] - if not c then - c=tsplitat(separator) - cache[separator]=c - end - return lpegmatch(c,str) - else - return { str } - end -end -local spacing=patterns.spacer^0*newline -local empty=spacing*Cc("") -local nonempty=Cs((1-spacing)^1)*spacing^-1 -local content=(empty+nonempty)^1 -patterns.textline=content -local linesplitter=tsplitat(newline) -patterns.linesplitter=linesplitter -function string.splitlines(str) - return lpegmatch(linesplitter,str) -end -local cache={} -function lpeg.checkedsplit(separator,str) - local c=cache[separator] - if not c then - separator=P(separator) - local other=C((1-separator)^1) - c=Ct(separator^0*other*(separator^1*other)^0) - cache[separator]=c - end - return lpegmatch(c,str) -end -function string.checkedsplit(str,separator) - local c=cache[separator] - if not c then - separator=P(separator) - local other=C((1-separator)^1) - c=Ct(separator^0*other*(separator^1*other)^0) - cache[separator]=c - end - return lpegmatch(c,str) -end -local function f2(s) local c1,c2=byte(s,1,2) return c1*64+c2-12416 end -local function f3(s) local c1,c2,c3=byte(s,1,3) return (c1*64+c2)*64+c3-925824 end -local function f4(s) local c1,c2,c3,c4=byte(s,1,4) return ((c1*64+c2)*64+c3)*64+c4-63447168 end -local utf8byte=patterns.utf8one/byte+patterns.utf8two/f2+patterns.utf8three/f3+patterns.utf8four/f4 -patterns.utf8byte=utf8byte -local cache={} -function lpeg.stripper(str) - if type(str)=="string" then - local s=cache[str] - if not s then - s=Cs(((S(str)^1)/""+1)^0) - cache[str]=s - end - return s - else - return Cs(((str^1)/""+1)^0) - end -end -local cache={} -function lpeg.keeper(str) - if type(str)=="string" then - local s=cache[str] - if not s then - s=Cs((((1-S(str))^1)/""+1)^0) - cache[str]=s - end - return s - else - return Cs((((1-str)^1)/""+1)^0) - end -end -function lpeg.frontstripper(str) - return (P(str)+P(true))*Cs(anything^0) -end -function lpeg.endstripper(str) - return Cs((1-P(str)*endofstring)^0) -end -function lpeg.replacer(one,two,makefunction,isutf) - local pattern - local u=isutf and utf8char or 1 - if type(one)=="table" then - local no=#one - local p=P(false) - if no==0 then - for k,v in next,one do - p=p+P(k)/v - end - pattern=Cs((p+u)^0) - elseif no==1 then - local o=one[1] - one,two=P(o[1]),o[2] - pattern=Cs((one/two+u)^0) - else - for i=1,no do - local o=one[i] - p=p+P(o[1])/o[2] - end - pattern=Cs((p+u)^0) - end - else - pattern=Cs((P(one)/(two or "")+u)^0) - end - if makefunction then - return function(str) - return lpegmatch(pattern,str) - end - else - return pattern - end -end -function lpeg.finder(lst,makefunction,isutf) - local pattern - if type(lst)=="table" then - pattern=P(false) - if #lst==0 then - for k,v in next,lst do - pattern=pattern+P(k) - end - else - for i=1,#lst do - pattern=pattern+P(lst[i]) - end - end - else - pattern=P(lst) - end - if isutf then - pattern=((utf8char or 1)-pattern)^0*pattern - else - pattern=(1-pattern)^0*pattern - end - if makefunction then - return function(str) - return lpegmatch(pattern,str) - end - else - return pattern - end -end -local splitters_f,splitters_s={},{} -function lpeg.firstofsplit(separator) - local splitter=splitters_f[separator] - if not splitter then - local pattern=P(separator) - splitter=C((1-pattern)^0) - splitters_f[separator]=splitter - end - return splitter -end -function lpeg.secondofsplit(separator) - local splitter=splitters_s[separator] - if not splitter then - local pattern=P(separator) - splitter=(1-pattern)^0*pattern*C(anything^0) - splitters_s[separator]=splitter - end - return splitter -end -local splitters_s,splitters_p={},{} -function lpeg.beforesuffix(separator) - local splitter=splitters_s[separator] - if not splitter then - local pattern=P(separator) - splitter=C((1-pattern)^0)*pattern*endofstring - splitters_s[separator]=splitter - end - return splitter -end -function lpeg.afterprefix(separator) - local splitter=splitters_p[separator] - if not splitter then - local pattern=P(separator) - splitter=pattern*C(anything^0) - splitters_p[separator]=splitter - end - return splitter -end -function lpeg.balancer(left,right) - left,right=P(left),P(right) - return P { left*((1-left-right)+V(1))^0*right } -end -local nany=utf8char/"" -function lpeg.counter(pattern) - pattern=Cs((P(pattern)/" "+nany)^0) - return function(str) - return #lpegmatch(pattern,str) - end -end -utf=utf or (unicode and unicode.utf8) or {} -local utfcharacters=utf and utf.characters or string.utfcharacters -local utfgmatch=utf and utf.gmatch -local utfchar=utf and utf.char -lpeg.UP=lpeg.P -if utfcharacters then - function lpeg.US(str) - local p=P(false) - for uc in utfcharacters(str) do - p=p+P(uc) - end - return p - end -elseif utfgmatch then - function lpeg.US(str) - local p=P(false) - for uc in utfgmatch(str,".") do - p=p+P(uc) - end - return p - end -else - function lpeg.US(str) - local p=P(false) - local f=function(uc) - p=p+P(uc) - end - lpegmatch((utf8char/f)^0,str) - return p - end -end -local range=utf8byte*utf8byte+Cc(false) -function lpeg.UR(str,more) - local first,last - if type(str)=="number" then - first=str - last=more or first - else - first,last=lpegmatch(range,str) - if not last then - return P(str) - end - end - if first==last then - return P(str) - elseif utfchar and (last-first<8) then - local p=P(false) - for i=first,last do - p=p+P(utfchar(i)) - end - return p - else - local f=function(b) - return b>=first and b<=last - end - return utf8byte/f - end -end -function lpeg.is_lpeg(p) - return p and lpegtype(p)=="pattern" -end -function lpeg.oneof(list,...) - if type(list)~="table" then - list={ list,... } - end - local p=P(list[1]) - for l=2,#list do - p=p+P(list[l]) - end - return p -end -local sort=table.sort -local function copyindexed(old) - local new={} - for i=1,#old do - new[i]=old - end - return new -end -local function sortedkeys(tab) - local keys,s={},0 - for key,_ in next,tab do - s=s+1 - keys[s]=key - end - sort(keys) - return keys -end -function lpeg.append(list,pp,delayed,checked) - local p=pp - if #list>0 then - local keys=copyindexed(list) - sort(keys) - for i=#keys,1,-1 do - local k=keys[i] - if p then - p=P(k)+p - else - p=P(k) - end - end - elseif delayed then - local keys=sortedkeys(list) - if p then - for i=1,#keys,1 do - local k=keys[i] - local v=list[k] - p=P(k)/list+p - end - else - for i=1,#keys do - local k=keys[i] - local v=list[k] - if p then - p=P(k)+p - else - p=P(k) - end - end - if p then - p=p/list - end - end - elseif checked then - local keys=sortedkeys(list) - for i=1,#keys do - local k=keys[i] - local v=list[k] - if p then - if k==v then - p=P(k)+p - else - p=P(k)/v+p - end - else - if k==v then - p=P(k) - else - p=P(k)/v - end - end - end - else - local keys=sortedkeys(list) - for i=1,#keys do - local k=keys[i] - local v=list[k] - if p then - p=P(k)/v+p - else - p=P(k)/v - end - end - end - return p -end -local function make(t,hash) - local p=P(false) - local keys=sortedkeys(t) - for i=1,#keys do - local k=keys[i] - local v=t[k] - local h=hash[v] - if h then - if next(v) then - p=p+P(k)*(make(v,hash)+P(true)) - else - p=p+P(k)*P(true) - end - else - if next(v) then - p=p+P(k)*make(v,hash) - else - p=p+P(k) - end - end - end - return p -end -function lpeg.utfchartabletopattern(list) - local tree={} - local hash={} - local n=#list - if n==0 then - for s in next,list do - local t=tree - for c in gmatch(s,".") do - local tc=t[c] - if not tc then - tc={} - t[c]=tc - end - t=tc - end - hash[t]=s - end - else - for i=1,n do - local t=tree - local s=list[i] - for c in gmatch(s,".") do - local tc=t[c] - if not tc then - tc={} - t[c]=tc - end - t=tc - end - hash[t]=s - end - end - return make(tree,hash) -end -patterns.containseol=lpeg.finder(eol) -local function nextstep(n,step,result) - local m=n%step - local d=floor(n/step) - if d>0 then - local v=V(tostring(step)) - local s=result.start - for i=1,d do - if s then - s=v*s - else - s=v - end - end - result.start=s - end - if step>1 and result.start then - local v=V(tostring(step/2)) - result[tostring(step)]=v*v - end - if step>0 then - return nextstep(m,step/2,result) - else - return result - end -end -function lpeg.times(pattern,n) - return P(nextstep(n,2^16,{ "start",["1"]=pattern })) -end -local trailingzeros=zero^0*-digit -local case_1=period*trailingzeros/"" -local case_2=period*(digit-trailingzeros)^1*(trailingzeros/"") -local number=digit^1*(case_1+case_2) -local stripper=Cs((number+1)^0) -lpeg.patterns.stripzeros=stripper -local byte_to_HEX={} -local byte_to_hex={} -local byte_to_dec={} -local hex_to_byte={} -for i=0,255 do - local H=format("%02X",i) - local h=format("%02x",i) - local d=format("%03i",i) - local c=char(i) - byte_to_HEX[c]=H - byte_to_hex[c]=h - byte_to_dec[c]=d - hex_to_byte[h]=c - hex_to_byte[H]=c -end -local hextobyte=P(2)/hex_to_byte -local bytetoHEX=P(1)/byte_to_HEX -local bytetohex=P(1)/byte_to_hex -local bytetodec=P(1)/byte_to_dec -local hextobytes=Cs(hextobyte^0) -local bytestoHEX=Cs(bytetoHEX^0) -local bytestohex=Cs(bytetohex^0) -local bytestodec=Cs(bytetodec^0) -patterns.hextobyte=hextobyte -patterns.bytetoHEX=bytetoHEX -patterns.bytetohex=bytetohex -patterns.bytetodec=bytetodec -patterns.hextobytes=hextobytes -patterns.bytestoHEX=bytestoHEX -patterns.bytestohex=bytestohex -patterns.bytestodec=bytestodec -function string.toHEX(s) - if not s or s=="" then - return s - else - return lpegmatch(bytestoHEX,s) - end -end -function string.tohex(s) - if not s or s=="" then - return s - else - return lpegmatch(bytestohex,s) - end -end -function string.todec(s) - if not s or s=="" then - return s - else - return lpegmatch(bytestodec,s) - end -end -function string.tobytes(s) - if not s or s=="" then - return s - else - return lpegmatch(hextobytes,s) - end -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['l-functions']={ - version=1.001, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -functions=functions or {} -function functions.dummy() end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['l-string']={ - version=1.001, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local string=string -local sub,gmatch,format,char,byte,rep,lower=string.sub,string.gmatch,string.format,string.char,string.byte,string.rep,string.lower -local lpegmatch,patterns=lpeg.match,lpeg.patterns -local P,S,C,Ct,Cc,Cs=lpeg.P,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cc,lpeg.Cs -local unquoted=patterns.squote*C(patterns.nosquote)*patterns.squote+patterns.dquote*C(patterns.nodquote)*patterns.dquote -function string.unquoted(str) - return lpegmatch(unquoted,str) or str -end -function string.quoted(str) - return format("%q",str) -end -function string.count(str,pattern) - local n=0 - for _ in gmatch(str,pattern) do - n=n+1 - end - return n -end -function string.limit(str,n,sentinel) - if #str>n then - sentinel=sentinel or "..." - return sub(str,1,(n-#sentinel))..sentinel - else - return str - end -end -local stripper=patterns.stripper -local fullstripper=patterns.fullstripper -local collapser=patterns.collapser -local longtostring=patterns.longtostring -function string.strip(str) - return lpegmatch(stripper,str) or "" -end -function string.fullstrip(str) - return lpegmatch(fullstripper,str) or "" -end -function string.collapsespaces(str) - return lpegmatch(collapser,str) or "" -end -function string.longtostring(str) - return lpegmatch(longtostring,str) or "" -end -local pattern=P(" ")^0*P(-1) -function string.is_empty(str) - if str=="" then - return true - else - return lpegmatch(pattern,str) and true or false - end -end -local anything=patterns.anything -local allescapes=Cc("%")*S(".-+%?()[]*") -local someescapes=Cc("%")*S(".-+%()[]") -local matchescapes=Cc(".")*S("*?") -local pattern_a=Cs ((allescapes+anything )^0 ) -local pattern_b=Cs ((someescapes+matchescapes+anything )^0 ) -local pattern_c=Cs (Cc("^")*(someescapes+matchescapes+anything )^0*Cc("$") ) -function string.escapedpattern(str,simple) - return lpegmatch(simple and pattern_b or pattern_a,str) -end -function string.topattern(str,lowercase,strict) - if str=="" or type(str)~="string" then - return ".*" - elseif strict then - str=lpegmatch(pattern_c,str) - else - str=lpegmatch(pattern_b,str) - end - if lowercase then - return lower(str) - else - return str - end -end -function string.valid(str,default) - return (type(str)=="string" and str~="" and str) or default or nil -end -string.itself=function(s) return s end -local pattern=Ct(C(1)^0) -function string.totable(str) - return lpegmatch(pattern,str) -end -local replacer=lpeg.replacer("@","%%") -function string.tformat(fmt,...) - return format(lpegmatch(replacer,fmt),...) -end -string.quote=string.quoted -string.unquote=string.unquoted - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['l-table']={ - version=1.001, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local type,next,tostring,tonumber,ipairs,select=type,next,tostring,tonumber,ipairs,select -local table,string=table,string -local concat,sort,insert,remove=table.concat,table.sort,table.insert,table.remove -local format,lower,dump=string.format,string.lower,string.dump -local getmetatable,setmetatable=getmetatable,setmetatable -local getinfo=debug.getinfo -local lpegmatch,patterns=lpeg.match,lpeg.patterns -local floor=math.floor -local stripper=patterns.stripper -function table.strip(tab) - local lst,l={},0 - for i=1,#tab do - local s=lpegmatch(stripper,tab[i]) or "" - if s=="" then - else - l=l+1 - lst[l]=s - end - end - return lst -end -function table.keys(t) - if t then - local keys,k={},0 - for key,_ in next,t do - k=k+1 - keys[k]=key - end - return keys - else - return {} - end -end -local function compare(a,b) - local ta,tb=type(a),type(b) - if ta==tb then - return a0 then - local n=0 - for _,v in next,t do - n=n+1 - end - if n==#t then - local tt,nt={},0 - for i=1,#t do - local v=t[i] - local tv=type(v) - if tv=="number" then - nt=nt+1 - if hexify then - tt[nt]=format("0x%X",v) - else - tt[nt]=tostring(v) - end - elseif tv=="string" then - nt=nt+1 - tt[nt]=format("%q",v) - elseif tv=="boolean" then - nt=nt+1 - tt[nt]=v and "true" or "false" - else - tt=nil - break - end - end - return tt - end - end - return nil -end -local propername=patterns.propername -local function dummy() end -local function do_serialize(root,name,depth,level,indexed) - if level>0 then - depth=depth.." " - if indexed then - handle(format("%s{",depth)) - else - local tn=type(name) - if tn=="number" then - if hexify then - handle(format("%s[0x%X]={",depth,name)) - else - handle(format("%s[%s]={",depth,name)) - end - elseif tn=="string" then - if noquotes and not reserved[name] and lpegmatch(propername,name) then - handle(format("%s%s={",depth,name)) - else - handle(format("%s[%q]={",depth,name)) - end - elseif tn=="boolean" then - handle(format("%s[%s]={",depth,name and "true" or "false")) - else - handle(format("%s{",depth)) - end - end - end - if root and next(root) then - local first,last=nil,0 - if compact then - last=#root - for k=1,last do - if root[k]==nil then - last=k-1 - break - end - end - if last>0 then - first=1 - end - end - local sk=sortedkeys(root) - for i=1,#sk do - local k=sk[i] - local v=root[k] - local tv,tk=type(v),type(k) - if compact and first and tk=="number" and k>=first and k<=last then - if tv=="number" then - if hexify then - handle(format("%s 0x%X,",depth,v)) - else - handle(format("%s %s,",depth,v)) - end - elseif tv=="string" then - if reduce and tonumber(v) then - handle(format("%s %s,",depth,v)) - else - handle(format("%s %q,",depth,v)) - end - elseif tv=="table" then - if not next(v) then - handle(format("%s {},",depth)) - elseif inline then - local st=simple_table(v) - if st then - handle(format("%s { %s },",depth,concat(st,", "))) - else - do_serialize(v,k,depth,level+1,true) - end - else - do_serialize(v,k,depth,level+1,true) - end - elseif tv=="boolean" then - handle(format("%s %s,",depth,v and "true" or "false")) - elseif tv=="function" then - if functions then - handle(format('%s load(%q),',depth,dump(v))) - else - handle(format('%s "function",',depth)) - end - else - handle(format("%s %q,",depth,tostring(v))) - end - elseif k=="__p__" then - if false then - handle(format("%s __p__=nil,",depth)) - end - elseif tv=="number" then - if tk=="number" then - if hexify then - handle(format("%s [0x%X]=0x%X,",depth,k,v)) - else - handle(format("%s [%s]=%s,",depth,k,v)) - end - elseif tk=="boolean" then - if hexify then - handle(format("%s [%s]=0x%X,",depth,k and "true" or "false",v)) - else - handle(format("%s [%s]=%s,",depth,k and "true" or "false",v)) - end - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - if hexify then - handle(format("%s %s=0x%X,",depth,k,v)) - else - handle(format("%s %s=%s,",depth,k,v)) - end - else - if hexify then - handle(format("%s [%q]=0x%X,",depth,k,v)) - else - handle(format("%s [%q]=%s,",depth,k,v)) - end - end - elseif tv=="string" then - if reduce and tonumber(v) then - if tk=="number" then - if hexify then - handle(format("%s [0x%X]=%s,",depth,k,v)) - else - handle(format("%s [%s]=%s,",depth,k,v)) - end - elseif tk=="boolean" then - handle(format("%s [%s]=%s,",depth,k and "true" or "false",v)) - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - handle(format("%s %s=%s,",depth,k,v)) - else - handle(format("%s [%q]=%s,",depth,k,v)) - end - else - if tk=="number" then - if hexify then - handle(format("%s [0x%X]=%q,",depth,k,v)) - else - handle(format("%s [%s]=%q,",depth,k,v)) - end - elseif tk=="boolean" then - handle(format("%s [%s]=%q,",depth,k and "true" or "false",v)) - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - handle(format("%s %s=%q,",depth,k,v)) - else - handle(format("%s [%q]=%q,",depth,k,v)) - end - end - elseif tv=="table" then - if not next(v) then - if tk=="number" then - if hexify then - handle(format("%s [0x%X]={},",depth,k)) - else - handle(format("%s [%s]={},",depth,k)) - end - elseif tk=="boolean" then - handle(format("%s [%s]={},",depth,k and "true" or "false")) - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - handle(format("%s %s={},",depth,k)) - else - handle(format("%s [%q]={},",depth,k)) - end - elseif inline then - local st=simple_table(v) - if st then - if tk=="number" then - if hexify then - handle(format("%s [0x%X]={ %s },",depth,k,concat(st,", "))) - else - handle(format("%s [%s]={ %s },",depth,k,concat(st,", "))) - end - elseif tk=="boolean" then - handle(format("%s [%s]={ %s },",depth,k and "true" or "false",concat(st,", "))) - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - handle(format("%s %s={ %s },",depth,k,concat(st,", "))) - else - handle(format("%s [%q]={ %s },",depth,k,concat(st,", "))) - end - else - do_serialize(v,k,depth,level+1) - end - else - do_serialize(v,k,depth,level+1) - end - elseif tv=="boolean" then - if tk=="number" then - if hexify then - handle(format("%s [0x%X]=%s,",depth,k,v and "true" or "false")) - else - handle(format("%s [%s]=%s,",depth,k,v and "true" or "false")) - end - elseif tk=="boolean" then - handle(format("%s [%s]=%s,",depth,tostring(k),v and "true" or "false")) - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - handle(format("%s %s=%s,",depth,k,v and "true" or "false")) - else - handle(format("%s [%q]=%s,",depth,k,v and "true" or "false")) - end - elseif tv=="function" then - if functions then - local f=getinfo(v).what=="C" and dump(dummy) or dump(v) - if tk=="number" then - if hexify then - handle(format("%s [0x%X]=load(%q),",depth,k,f)) - else - handle(format("%s [%s]=load(%q),",depth,k,f)) - end - elseif tk=="boolean" then - handle(format("%s [%s]=load(%q),",depth,k and "true" or "false",f)) - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - handle(format("%s %s=load(%q),",depth,k,f)) - else - handle(format("%s [%q]=load(%q),",depth,k,f)) - end - end - else - if tk=="number" then - if hexify then - handle(format("%s [0x%X]=%q,",depth,k,tostring(v))) - else - handle(format("%s [%s]=%q,",depth,k,tostring(v))) - end - elseif tk=="boolean" then - handle(format("%s [%s]=%q,",depth,k and "true" or "false",tostring(v))) - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - handle(format("%s %s=%q,",depth,k,tostring(v))) - else - handle(format("%s [%q]=%q,",depth,k,tostring(v))) - end - end - end - end - if level>0 then - handle(format("%s},",depth)) - end -end -local function serialize(_handle,root,name,specification) - local tname=type(name) - if type(specification)=="table" then - noquotes=specification.noquotes - hexify=specification.hexify - handle=_handle or specification.handle or print - reduce=specification.reduce or false - functions=specification.functions - compact=specification.compact - inline=specification.inline and compact - if functions==nil then - functions=true - end - if compact==nil then - compact=true - end - if inline==nil then - inline=compact - end - else - noquotes=false - hexify=false - handle=_handle or print - reduce=false - compact=true - inline=true - functions=true - end - if tname=="string" then - if name=="return" then - handle("return {") - else - handle(name.."={") - end - elseif tname=="number" then - if hexify then - handle(format("[0x%X]={",name)) - else - handle("["..name.."]={") - end - elseif tname=="boolean" then - if name then - handle("return {") - else - handle("{") - end - else - handle("t={") - end - if root then - if getmetatable(root) then - local dummy=root._w_h_a_t_e_v_e_r_ - root._w_h_a_t_e_v_e_r_=nil - end - if next(root) then - do_serialize(root,name,"",0) - end - end - handle("}") -end -function table.serialize(root,name,specification) - local t,n={},0 - local function flush(s) - n=n+1 - t[n]=s - end - serialize(flush,root,name,specification) - return concat(t,"\n") -end -table.tohandle=serialize -local maxtab=2*1024 -function table.tofile(filename,root,name,specification) - local f=io.open(filename,'w') - if f then - if maxtab>1 then - local t,n={},0 - local function flush(s) - n=n+1 - t[n]=s - if n>maxtab then - f:write(concat(t,"\n"),"\n") - t,n={},0 - end - end - serialize(flush,root,name,specification) - f:write(concat(t,"\n"),"\n") - else - local function flush(s) - f:write(s,"\n") - end - serialize(flush,root,name,specification) - end - f:close() - io.flush() - end -end -local function flattened(t,f,depth) - if f==nil then - f={} - depth=0xFFFF - elseif tonumber(f) then - depth=f - f={} - elseif not depth then - depth=0xFFFF - end - for k,v in next,t do - if type(k)~="number" then - if depth>0 and type(v)=="table" then - flattened(v,f,depth-1) - else - f[#f+1]=v - end - end - end - for k=1,#t do - local v=t[k] - if depth>0 and type(v)=="table" then - flattened(v,f,depth-1) - else - f[#f+1]=v - end - end - return f -end -table.flattened=flattened -local function unnest(t,f) - if not f then - f={} - end - for i=1,#t do - local v=t[i] - if type(v)=="table" then - if type(v[1])=="table" then - unnest(v,f) - else - f[#f+1]=v - end - else - f[#f+1]=v - end - end - return f -end -function table.unnest(t) - return unnest(t) -end -local function are_equal(a,b,n,m) - if a and b and #a==#b then - n=n or 1 - m=m or #a - for i=n,m do - local ai,bi=a[i],b[i] - if ai==bi then - elseif type(ai)=="table" and type(bi)=="table" then - if not are_equal(ai,bi) then - return false - end - else - return false - end - end - return true - else - return false - end -end -local function identical(a,b) - for ka,va in next,a do - local vb=b[ka] - if va==vb then - elseif type(va)=="table" and type(vb)=="table" then - if not identical(va,vb) then - return false - end - else - return false - end - end - return true -end -table.identical=identical -table.are_equal=are_equal -local function sparse(old,nest,keeptables) - local new={} - for k,v in next,old do - if not (v=="" or v==false) then - if nest and type(v)=="table" then - v=sparse(v,nest) - if keeptables or next(v) then - new[k]=v - end - else - new[k]=v - end - end - end - return new -end -table.sparse=sparse -function table.compact(t) - return sparse(t,true,true) -end -function table.contains(t,v) - if t then - for i=1,#t do - if t[i]==v then - return i - end - end - end - return false -end -function table.count(t) - local n=0 - for k,v in next,t do - n=n+1 - end - return n -end -function table.swapped(t,s) - local n={} - if s then - for k,v in next,s do - n[k]=v - end - end - for k,v in next,t do - n[v]=k - end - return n -end -function table.mirrored(t) - local n={} - for k,v in next,t do - n[v]=k - n[k]=v - end - return n -end -function table.reversed(t) - if t then - local tt,tn={},#t - if tn>0 then - local ttn=0 - for i=tn,1,-1 do - ttn=ttn+1 - tt[ttn]=t[i] - end - end - return tt - end -end -function table.reverse(t) - if t then - local n=#t - for i=1,floor(n/2) do - local j=n-i+1 - t[i],t[j]=t[j],t[i] - end - return t - end -end -function table.sequenced(t,sep,simple) - if not t then - return "" - end - local n=#t - local s={} - if n>0 then - for i=1,n do - s[i]=tostring(t[i]) - end - else - n=0 - for k,v in sortedhash(t) do - if simple then - if v==true then - n=n+1 - s[n]=k - elseif v and v~="" then - n=n+1 - s[n]=k.."="..tostring(v) - end - else - n=n+1 - s[n]=k.."="..tostring(v) - end - end - end - return concat(s,sep or " | ") -end -function table.print(t,...) - if type(t)~="table" then - print(tostring(t)) - else - serialize(print,t,...) - end -end -if setinspector then - setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end) -end -function table.sub(t,i,j) - return { unpack(t,i,j) } -end -function table.is_empty(t) - return not t or not next(t) -end -function table.has_one_entry(t) - return t and not next(t,next(t)) -end -function table.loweredkeys(t) - local l={} - for k,v in next,t do - l[lower(k)]=v - end - return l -end -function table.unique(old) - local hash={} - local new={} - local n=0 - for i=1,#old do - local oi=old[i] - if not hash[oi] then - n=n+1 - new[n]=oi - hash[oi]=true - end - end - return new -end -function table.sorted(t,...) - sort(t,...) - return t -end -function table.values(t,s) - if t then - local values,keys,v={},{},0 - for key,value in next,t do - if not keys[value] then - v=v+1 - values[v]=value - keys[k]=key - end - end - if s then - sort(values) - end - return values - else - return {} - end -end -function table.filtered(t,pattern,sort,cmp) - if t and type(pattern)=="string" then - if sort then - local s - if cmp then - s=sortedhashkeys(t,function(a,b) return cmp(t,a,b) end) - else - s=sortedkeys(t) - end - local n=0 - local m=#s - local function kv(s) - while n16*1024*1024 then - step=16*1024*1024 - else - step=floor(size/(1024*1024))*1024*1024/8 - end - local data={} - while true do - local r=f:read(step) - if not r then - return concat(data) - else - data[#data+1]=r - end - end - end -end -io.readall=readall -function io.loaddata(filename,textmode) - local f=io.open(filename,(textmode and 'r') or 'rb') - if f then - local data=readall(f) - f:close() - if #data>0 then - return data - end - end -end -function io.savedata(filename,data,joiner) - local f=io.open(filename,"wb") - if f then - if type(data)=="table" then - f:write(concat(data,joiner or "")) - elseif type(data)=="function" then - data(f) - else - f:write(data or "") - end - f:close() - io.flush() - return true - else - return false - end -end -function io.loadlines(filename,n) - local f=io.open(filename,'r') - if not f then - elseif n then - local lines={} - for i=1,n do - local line=f:read("*lines") - if line then - lines[#lines+1]=line - else - break - end - end - f:close() - lines=concat(lines,"\n") - if #lines>0 then - return lines - end - else - local line=f:read("*line") or "" - f:close() - if #line>0 then - return line - end - end -end -function io.loadchunk(filename,n) - local f=io.open(filename,'rb') - if f then - local data=f:read(n or 1024) - f:close() - if #data>0 then - return data - end - end -end -function io.exists(filename) - local f=io.open(filename) - if f==nil then - return false - else - f:close() - return true - end -end -function io.size(filename) - local f=io.open(filename) - if f==nil then - return 0 - else - local s=f:seek("end") - f:close() - return s - end -end -function io.noflines(f) - if type(f)=="string" then - local f=io.open(filename) - if f then - local n=f and io.noflines(f) or 0 - f:close() - return n - else - return 0 - end - else - local n=0 - for _ in f:lines() do - n=n+1 - end - f:seek('set',0) - return n - end -end -local nextchar={ - [ 4]=function(f) - return f:read(1,1,1,1) - end, - [ 2]=function(f) - return f:read(1,1) - end, - [ 1]=function(f) - return f:read(1) - end, - [-2]=function(f) - local a,b=f:read(1,1) - return b,a - end, - [-4]=function(f) - local a,b,c,d=f:read(1,1,1,1) - return d,c,b,a - end -} -function io.characters(f,n) - if f then - return nextchar[n or 1],f - end -end -local nextbyte={ - [4]=function(f) - local a,b,c,d=f:read(1,1,1,1) - if d then - return byte(a),byte(b),byte(c),byte(d) - end - end, - [3]=function(f) - local a,b,c=f:read(1,1,1) - if b then - return byte(a),byte(b),byte(c) - end - end, - [2]=function(f) - local a,b=f:read(1,1) - if b then - return byte(a),byte(b) - end - end, - [1]=function (f) - local a=f:read(1) - if a then - return byte(a) - end - end, - [-2]=function (f) - local a,b=f:read(1,1) - if b then - return byte(b),byte(a) - end - end, - [-3]=function(f) - local a,b,c=f:read(1,1,1) - if b then - return byte(c),byte(b),byte(a) - end - end, - [-4]=function(f) - local a,b,c,d=f:read(1,1,1,1) - if d then - return byte(d),byte(c),byte(b),byte(a) - end - end -} -function io.bytes(f,n) - if f then - return nextbyte[n or 1],f - else - return nil,nil - end -end -function io.ask(question,default,options) - while true do - io.write(question) - if options then - io.write(format(" [%s]",concat(options,"|"))) - end - if default then - io.write(format(" [%s]",default)) - end - io.write(format(" ")) - io.flush() - local answer=io.read() - answer=gsub(answer,"^%s*(.*)%s*$","%1") - if answer=="" and default then - return default - elseif not options then - return answer - else - for k=1,#options do - if options[k]==answer then - return answer - end - end - local pattern="^"..answer - for k=1,#options do - local v=options[k] - if find(v,pattern) then - return v - end - end - end - end -end -local function readnumber(f,n,m) - if m then - f:seek("set",n) - n=m - end - if n==1 then - return byte(f:read(1)) - elseif n==2 then - local a,b=byte(f:read(2),1,2) - return 256*a+b - elseif n==3 then - local a,b,c=byte(f:read(3),1,3) - return 256*256*a+256*b+c - elseif n==4 then - local a,b,c,d=byte(f:read(4),1,4) - return 256*256*256*a+256*256*b+256*c+d - elseif n==8 then - local a,b=readnumber(f,4),readnumber(f,4) - return 256*a+b - elseif n==12 then - local a,b,c=readnumber(f,4),readnumber(f,4),readnumber(f,4) - return 256*256*a+256*b+c - elseif n==-2 then - local b,a=byte(f:read(2),1,2) - return 256*a+b - elseif n==-3 then - local c,b,a=byte(f:read(3),1,3) - return 256*256*a+256*b+c - elseif n==-4 then - local d,c,b,a=byte(f:read(4),1,4) - return 256*256*256*a+256*256*b+256*c+d - elseif n==-8 then - local h,g,f,e,d,c,b,a=byte(f:read(8),1,8) - return 256*256*256*256*256*256*256*a+256*256*256*256*256*256*b+256*256*256*256*256*c+256*256*256*256*d+256*256*256*e+256*256*f+256*g+h - else - return 0 - end -end -io.readnumber=readnumber -function io.readstring(f,n,m) - if m then - f:seek("set",n) - n=m - end - local str=gsub(f:read(n),"\000","") - return str -end -if not io.i_limiter then function io.i_limiter() end end -if not io.o_limiter then function io.o_limiter() end end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['l-file']={ - version=1.001, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -file=file or {} -local file=file -if not lfs then - lfs=optionalrequire("lfs") -end -if not lfs then - lfs={ - getcurrentdir=function() - return "." - end, - attributes=function() - return nil - end, - isfile=function(name) - local f=io.open(name,'rb') - if f then - f:close() - return true - end - end, - isdir=function(name) - print("you need to load lfs") - return false - end - } -elseif not lfs.isfile then - local attributes=lfs.attributes - function lfs.isdir(name) - return attributes(name,"mode")=="directory" - end - function lfs.isfile(name) - return attributes(name,"mode")=="file" - end -end -local insert,concat=table.insert,table.concat -local match,find,gmatch=string.match,string.find,string.gmatch -local lpegmatch=lpeg.match -local getcurrentdir,attributes=lfs.currentdir,lfs.attributes -local checkedsplit=string.checkedsplit -local P,R,S,C,Cs,Cp,Cc,Ct=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Cs,lpeg.Cp,lpeg.Cc,lpeg.Ct -local colon=P(":") -local period=P(".") -local periods=P("..") -local fwslash=P("/") -local bwslash=P("\\") -local slashes=S("\\/") -local noperiod=1-period -local noslashes=1-slashes -local name=noperiod^1 -local suffix=period/""*(1-period-slashes)^1*-1 -local pattern=C((1-(slashes^1*noslashes^1*-1))^1)*P(1) -local function pathpart(name,default) - return name and lpegmatch(pattern,name) or default or "" -end -local pattern=(noslashes^0*slashes)^1*C(noslashes^1)*-1 -local function basename(name) - return name and lpegmatch(pattern,name) or name -end -local pattern=(noslashes^0*slashes^1)^0*Cs((1-suffix)^1)*suffix^0 -local function nameonly(name) - return name and lpegmatch(pattern,name) or name -end -local pattern=(noslashes^0*slashes)^0*(noperiod^1*period)^1*C(noperiod^1)*-1 -local function suffixonly(name) - return name and lpegmatch(pattern,name) or "" -end -local pattern=(noslashes^0*slashes)^0*noperiod^1*((period*C(noperiod^1))^1)*-1+Cc("") -local function suffixesonly(name) - if name then - return lpegmatch(pattern,name) - else - return "" - end -end -file.pathpart=pathpart -file.basename=basename -file.nameonly=nameonly -file.suffixonly=suffixonly -file.suffix=suffixonly -file.suffixesonly=suffixesonly -file.suffixes=suffixesonly -file.dirname=pathpart -file.extname=suffixonly -local drive=C(R("az","AZ"))*colon -local path=C((noslashes^0*slashes)^0) -local suffix=period*C(P(1-period)^0*P(-1)) -local base=C((1-suffix)^0) -local rest=C(P(1)^0) -drive=drive+Cc("") -path=path+Cc("") -base=base+Cc("") -suffix=suffix+Cc("") -local pattern_a=drive*path*base*suffix -local pattern_b=path*base*suffix -local pattern_c=C(drive*path)*C(base*suffix) -local pattern_d=path*rest -function file.splitname(str,splitdrive) - if not str then - elseif splitdrive then - return lpegmatch(pattern_a,str) - else - return lpegmatch(pattern_b,str) - end -end -function file.splitbase(str) - if str then - return lpegmatch(pattern_d,str) - else - return "",str - end -end -function file.nametotable(str,splitdrive) - if str then - local path,drive,subpath,name,base,suffix=lpegmatch(pattern_c,str) - if splitdrive then - return { - path=path, - drive=drive, - subpath=subpath, - name=name, - base=base, - suffix=suffix, - } - else - return { - path=path, - name=name, - base=base, - suffix=suffix, - } - end - end -end -local pattern=Cs(((period*(1-period-slashes)^1*-1)/""+1)^1) -function file.removesuffix(name) - return name and lpegmatch(pattern,name) -end -local suffix=period/""*(1-period-slashes)^1*-1 -local pattern=Cs((noslashes^0*slashes^1)^0*((1-suffix)^1))*Cs(suffix) -function file.addsuffix(filename,suffix,criterium) - if not filename or not suffix or suffix=="" then - return filename - elseif criterium==true then - return filename.."."..suffix - elseif not criterium then - local n,s=lpegmatch(pattern,filename) - if not s or s=="" then - return filename.."."..suffix - else - return filename - end - else - local n,s=lpegmatch(pattern,filename) - if s and s~="" then - local t=type(criterium) - if t=="table" then - for i=1,#criterium do - if s==criterium[i] then - return filename - end - end - elseif t=="string" then - if s==criterium then - return filename - end - end - end - return (n or filename).."."..suffix - end -end -local suffix=period*(1-period-slashes)^1*-1 -local pattern=Cs((1-suffix)^0) -function file.replacesuffix(name,suffix) - if name and suffix and suffix~="" then - return lpegmatch(pattern,name).."."..suffix - else - return name - end -end -local reslasher=lpeg.replacer(P("\\"),"/") -function file.reslash(str) - return str and lpegmatch(reslasher,str) -end -function file.is_writable(name) - if not name then - elseif lfs.isdir(name) then - name=name.."/m_t_x_t_e_s_t.tmp" - local f=io.open(name,"wb") - if f then - f:close() - os.remove(name) - return true - end - elseif lfs.isfile(name) then - local f=io.open(name,"ab") - if f then - f:close() - return true - end - else - local f=io.open(name,"ab") - if f then - f:close() - os.remove(name) - return true - end - end - return false -end -local readable=P("r")*Cc(true) -function file.is_readable(name) - if name then - local a=attributes(name) - return a and lpegmatch(readable,a.permissions) or false - else - return false - end -end -file.isreadable=file.is_readable -file.iswritable=file.is_writable -function file.size(name) - if name then - local a=attributes(name) - return a and a.size or 0 - else - return 0 - end -end -function file.splitpath(str,separator) - return str and checkedsplit(lpegmatch(reslasher,str),separator or io.pathseparator) -end -function file.joinpath(tab,separator) - return tab and concat(tab,separator or io.pathseparator) -end -local someslash=S("\\/") -local stripper=Cs(P(fwslash)^0/""*reslasher) -local isnetwork=someslash*someslash*(1-someslash)+(1-fwslash-colon)^1*colon -local isroot=fwslash^1*-1 -local hasroot=fwslash^1 -local reslasher=lpeg.replacer(S("\\/"),"/") -local deslasher=lpeg.replacer(S("\\/")^1,"/") -function file.join(one,two,three,...) - if not two then - return one=="" and one or lpegmatch(stripper,one) - end - if one=="" then - return lpegmatch(stripper,three and concat({ two,three,... },"/") or two) - end - if lpegmatch(isnetwork,one) then - local one=lpegmatch(reslasher,one) - local two=lpegmatch(deslasher,three and concat({ two,three,... },"/") or two) - if lpegmatch(hasroot,two) then - return one..two - else - return one.."/"..two - end - elseif lpegmatch(isroot,one) then - local two=lpegmatch(deslasher,three and concat({ two,three,... },"/") or two) - if lpegmatch(hasroot,two) then - return two - else - return "/"..two - end - else - return lpegmatch(deslasher,concat({ one,two,three,... },"/")) - end -end -local drivespec=R("az","AZ")^1*colon -local anchors=fwslash+drivespec -local untouched=periods+(1-period)^1*P(-1) -local mswindrive=Cs(drivespec*(bwslash/"/"+fwslash)^0) -local mswinuncpath=(bwslash+fwslash)*(bwslash+fwslash)*Cc("//") -local splitstarter=(mswindrive+mswinuncpath+Cc(false))*Ct(lpeg.splitat(S("/\\")^1)) -local absolute=fwslash -function file.collapsepath(str,anchor) - if not str then - return - end - if anchor==true and not lpegmatch(anchors,str) then - str=getcurrentdir().."/"..str - end - if str=="" or str=="." then - return "." - elseif lpegmatch(untouched,str) then - return lpegmatch(reslasher,str) - end - local starter,oldelements=lpegmatch(splitstarter,str) - local newelements={} - local i=#oldelements - while i>0 do - local element=oldelements[i] - if element=='.' then - elseif element=='..' then - local n=i-1 - while n>0 do - local element=oldelements[n] - if element~='..' and element~='.' then - oldelements[n]='.' - break - else - n=n-1 - end - end - if n<1 then - insert(newelements,1,'..') - end - elseif element~="" then - insert(newelements,1,element) - end - i=i-1 - end - if #newelements==0 then - return starter or "." - elseif starter then - return starter..concat(newelements,'/') - elseif lpegmatch(absolute,str) then - return "/"..concat(newelements,'/') - else - newelements=concat(newelements,'/') - if anchor=="." and find(str,"^%./") then - return "./"..newelements - else - return newelements - end - end -end -local tricky=S("/\\")*P(-1) -local attributes=lfs.attributes -function lfs.isdir(name) - if lpegmatch(tricky,name) then - return attributes(name,"mode")=="directory" - else - return attributes(name.."/.","mode")=="directory" - end -end -function lfs.isfile(name) - return attributes(name,"mode")=="file" -end -local validchars=R("az","09","AZ","--","..") -local pattern_a=lpeg.replacer(1-validchars) -local pattern_a=Cs((validchars+P(1)/"-")^1) -local whatever=P("-")^0/"" -local pattern_b=Cs(whatever*(1-whatever*-1)^1) -function file.robustname(str,strict) - if str then - str=lpegmatch(pattern_a,str) or str - if strict then - return lpegmatch(pattern_b,str) or str - else - return str - end - end -end -file.readdata=io.loaddata -file.savedata=io.savedata -function file.copy(oldname,newname) - if oldname and newname then - local data=io.loaddata(oldname) - if data and data~="" then - file.savedata(newname,data) - end - end -end -local letter=R("az","AZ")+S("_-+") -local separator=P("://") -local qualified=period^0*fwslash+letter*colon+letter^1*separator+letter^1*fwslash -local rootbased=fwslash+letter*colon -lpeg.patterns.qualified=qualified -lpeg.patterns.rootbased=rootbased -function file.is_qualified_path(filename) - return filename and lpegmatch(qualified,filename)~=nil -end -function file.is_rootbased_path(filename) - return filename and lpegmatch(rootbased,filename)~=nil -end -function file.strip(name,dir) - if name then - local b,a=match(name,"^(.-)"..dir.."(.*)$") - return a~="" and a or name - end -end -function lfs.mkdirs(path) - local full="" - for sub in gmatch(path,"(/*[^\\/]+)") do - full=full..sub - lfs.mkdir(full) - end -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['l-boolean']={ - version=1.001, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local type,tonumber=type,tonumber -boolean=boolean or {} -local boolean=boolean -function boolean.tonumber(b) - if b then return 1 else return 0 end -end -function toboolean(str,tolerant) - if str==nil then - return false - elseif str==false then - return false - elseif str==true then - return true - elseif str=="true" then - return true - elseif str=="false" then - return false - elseif not tolerant then - return false - elseif str==0 then - return false - elseif (tonumber(str) or 0)>0 then - return true - else - return str=="yes" or str=="on" or str=="t" - end -end -string.toboolean=toboolean -function string.booleanstring(str) - if str=="0" then - return false - elseif str=="1" then - return true - elseif str=="" then - return false - elseif str=="false" then - return false - elseif str=="true" then - return true - elseif (tonumber(str) or 0)>0 then - return true - else - return str=="yes" or str=="on" or str=="t" - end -end -function string.is_boolean(str,default,strict) - if type(str)=="string" then - if str=="true" or str=="yes" or str=="on" or str=="t" or (not strict and str=="1") then - return true - elseif str=="false" or str=="no" or str=="off" or str=="f" or (not strict and str=="0") then - return false - end - end - return default -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['l-math']={ - version=1.001, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local floor,sin,cos,tan=math.floor,math.sin,math.cos,math.tan -if not math.round then - function math.round(x) return floor(x+0.5) end -end -if not math.div then - function math.div(n,m) return floor(n/m) end -end -if not math.mod then - function math.mod(n,m) return n%m end -end -local pipi=2*math.pi/360 -if not math.sind then - function math.sind(d) return sin(d*pipi) end - function math.cosd(d) return cos(d*pipi) end - function math.tand(d) return tan(d*pipi) end -end -if not math.odd then - function math.odd (n) return n%2~=0 end - function math.even(n) return n%2==0 end -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['util-str']={ - version=1.001, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -utilities=utilities or {} -utilities.strings=utilities.strings or {} -local strings=utilities.strings -local format,gsub,rep,sub=string.format,string.gsub,string.rep,string.sub -local load,dump=load,string.dump -local tonumber,type,tostring=tonumber,type,tostring -local unpack,concat=table.unpack,table.concat -local P,V,C,S,R,Ct,Cs,Cp,Carg,Cc=lpeg.P,lpeg.V,lpeg.C,lpeg.S,lpeg.R,lpeg.Ct,lpeg.Cs,lpeg.Cp,lpeg.Carg,lpeg.Cc -local patterns,lpegmatch=lpeg.patterns,lpeg.match -local utfchar,utfbyte=utf.char,utf.byte -local loadstripped=nil -if _LUAVERSION<5.2 then - loadstripped=function(str,shortcuts) - return load(str) - end -else - loadstripped=function(str,shortcuts) - if shortcuts then - return load(dump(load(str),true),nil,nil,shortcuts) - else - return load(dump(load(str),true)) - end - end -end -if not number then number={} end -local stripper=patterns.stripzeros -local function points(n) - n=tonumber(n) - return (not n or n==0) and "0pt" or lpegmatch(stripper,format("%.5fpt",n/65536)) -end -local function basepoints(n) - n=tonumber(n) - return (not n or n==0) and "0bp" or lpegmatch(stripper,format("%.5fbp",n*(7200/7227)/65536)) -end -number.points=points -number.basepoints=basepoints -local rubish=patterns.spaceortab^0*patterns.newline -local anyrubish=patterns.spaceortab+patterns.newline -local anything=patterns.anything -local stripped=(patterns.spaceortab^1/"")*patterns.newline -local leading=rubish^0/"" -local trailing=(anyrubish^1*patterns.endofstring)/"" -local redundant=rubish^3/"\n" -local pattern=Cs(leading*(trailing+redundant+stripped+anything)^0) -function strings.collapsecrlf(str) - return lpegmatch(pattern,str) -end -local repeaters={} -function strings.newrepeater(str,offset) - offset=offset or 0 - local s=repeaters[str] - if not s then - s={} - repeaters[str]=s - end - local t=s[offset] - if t then - return t - end - t={} - setmetatable(t,{ __index=function(t,k) - if not k then - return "" - end - local n=k+offset - local s=n>0 and rep(str,n) or "" - t[k]=s - return s - end }) - s[offset]=t - return t -end -local extra,tab,start=0,0,4,0 -local nspaces=strings.newrepeater(" ") -string.nspaces=nspaces -local pattern=Carg(1)/function(t) - extra,tab,start=0,t or 7,1 - end*Cs(( - Cp()*patterns.tab/function(position) - local current=(position-start+1)+extra - local spaces=tab-(current-1)%tab - if spaces>0 then - extra=extra+spaces-1 - return nspaces[spaces] - else - return "" - end - end+patterns.newline*Cp()/function(position) - extra,start=0,position - end+patterns.anything - )^1) -function strings.tabtospace(str,tab) - return lpegmatch(pattern,str,1,tab or 7) -end -local newline=patterns.newline -local endofstring=patterns.endofstring -local whitespace=patterns.whitespace -local spacer=patterns.spacer -local space=spacer^0 -local nospace=space/"" -local endofline=nospace*newline -local stripend=(whitespace^1*endofstring)/"" -local normalline=(nospace*((1-space*(newline+endofstring))^1)*nospace) -local stripempty=endofline^1/"" -local normalempty=endofline^1 -local singleempty=endofline*(endofline^0/"") -local doubleempty=endofline*endofline^-1*(endofline^0/"") -local stripstart=stripempty^0 -local p_prune_normal=Cs (stripstart*(stripend+normalline+normalempty )^0 ) -local p_prune_collapse=Cs (stripstart*(stripend+normalline+doubleempty )^0 ) -local p_prune_noempty=Cs (stripstart*(stripend+normalline+singleempty )^0 ) -local p_retain_normal=Cs ((normalline+normalempty )^0 ) -local p_retain_collapse=Cs ((normalline+doubleempty )^0 ) -local p_retain_noempty=Cs ((normalline+singleempty )^0 ) -local striplinepatterns={ - ["prune"]=p_prune_normal, - ["prune and collapse"]=p_prune_collapse, - ["prune and no empty"]=p_prune_noempty, - ["retain"]=p_retain_normal, - ["retain and collapse"]=p_retain_collapse, - ["retain and no empty"]=p_retain_noempty, - ["collapse"]=patterns.collapser, -} -strings.striplinepatterns=striplinepatterns -function strings.striplines(str,how) - return str and lpegmatch(how and striplinepatterns[how] or p_prune_collapse,str) or str -end -strings.striplong=strings.striplines -function strings.nice(str) - str=gsub(str,"[:%-+_]+"," ") - return str -end -local n=0 -local sequenced=table.sequenced -function string.autodouble(s,sep) - if s==nil then - return '""' - end - local t=type(s) - if t=="number" then - return tostring(s) - end - if t=="table" then - return ('"'..sequenced(s,sep or ",")..'"') - end - return ('"'..tostring(s)..'"') -end -function string.autosingle(s,sep) - if s==nil then - return "''" - end - local t=type(s) - if t=="number" then - return tostring(s) - end - if t=="table" then - return ("'"..sequenced(s,sep or ",").."'") - end - return ("'"..tostring(s).."'") -end -local tracedchars={} -string.tracedchars=tracedchars -strings.tracers=tracedchars -function string.tracedchar(b) - if type(b)=="number" then - return tracedchars[b] or (utfchar(b).." (U+"..format('%05X',b)..")") - else - local c=utfbyte(b) - return tracedchars[c] or (b.." (U+"..format('%05X',c)..")") - end -end -function number.signed(i) - if i>0 then - return "+",i - else - return "-",-i - end -end -local zero=P("0")^1/"" -local plus=P("+")/"" -local minus=P("-") -local separator=S(".") -local digit=R("09") -local trailing=zero^1*#S("eE") -local exponent=(S("eE")*(plus+Cs((minus*zero^0*P(-1))/"")+minus)*zero^0*(P(-1)*Cc("0")+P(1)^1)) -local pattern_a=Cs(minus^0*digit^1*(separator/""*trailing+separator*(trailing+digit)^0)*exponent) -local pattern_b=Cs((exponent+P(1))^0) -function number.sparseexponent(f,n) - if not n then - n=f - f="%e" - end - local tn=type(n) - if tn=="string" then - local m=tonumber(n) - if m then - return lpegmatch((f=="%e" or f=="%E") and pattern_a or pattern_b,format(f,m)) - end - elseif tn=="number" then - return lpegmatch((f=="%e" or f=="%E") and pattern_a or pattern_b,format(f,n)) - end - return tostring(n) -end -local template=[[ -%s -%s -return function(%s) return %s end -]] -local preamble,environment="",{} -if _LUAVERSION<5.2 then - preamble=[[ -local lpeg=lpeg -local type=type -local tostring=tostring -local tonumber=tonumber -local format=string.format -local concat=table.concat -local signed=number.signed -local points=number.points -local basepoints= number.basepoints -local utfchar=utf.char -local utfbyte=utf.byte -local lpegmatch=lpeg.match -local nspaces=string.nspaces -local tracedchar=string.tracedchar -local autosingle=string.autosingle -local autodouble=string.autodouble -local sequenced=table.sequenced -local formattednumber=number.formatted -local sparseexponent=number.sparseexponent - ]] -else - environment={ - global=global or _G, - lpeg=lpeg, - type=type, - tostring=tostring, - tonumber=tonumber, - format=string.format, - concat=table.concat, - signed=number.signed, - points=number.points, - basepoints=number.basepoints, - utfchar=utf.char, - utfbyte=utf.byte, - lpegmatch=lpeg.match, - nspaces=string.nspaces, - tracedchar=string.tracedchar, - autosingle=string.autosingle, - autodouble=string.autodouble, - sequenced=table.sequenced, - formattednumber=number.formatted, - sparseexponent=number.sparseexponent, - } -end -local arguments={ "a1" } -setmetatable(arguments,{ __index=function(t,k) - local v=t[k-1]..",a"..k - t[k]=v - return v - end -}) -local prefix_any=C((S("+- .")+R("09"))^0) -local prefix_tab=P("{")*C((1-P("}"))^0)*P("}")+C((1-R("az","AZ","09","%%"))^0) -local format_s=function(f) - n=n+1 - if f and f~="" then - return format("format('%%%ss',a%s)",f,n) - else - return format("(a%s or '')",n) - end -end -local format_S=function(f) - n=n+1 - if f and f~="" then - return format("format('%%%ss',tostring(a%s))",f,n) - else - return format("tostring(a%s)",n) - end -end -local format_q=function() - n=n+1 - return format("(a%s and format('%%q',a%s) or '')",n,n) -end -local format_Q=function() - n=n+1 - return format("format('%%q',tostring(a%s))",n) -end -local format_i=function(f) - n=n+1 - if f and f~="" then - return format("format('%%%si',a%s)",f,n) - else - return format("format('%%i',a%s)",n) - end -end -local format_d=format_i -local format_I=function(f) - n=n+1 - return format("format('%%s%%%si',signed(a%s))",f,n) -end -local format_f=function(f) - n=n+1 - return format("format('%%%sf',a%s)",f,n) -end -local format_F=function(f) - n=n+1 - if not f or f=="" then - return format("(((a%s > -0.0000000005 and a%s < 0.0000000005) and '0') or format((a%s %% 1 == 0) and '%%i' or '%%.9f',a%s))",n,n,n,n) - else - return format("format((a%s %% 1 == 0) and '%%i' or '%%%sf',a%s)",n,f,n) - end -end -local format_g=function(f) - n=n+1 - return format("format('%%%sg',a%s)",f,n) -end -local format_G=function(f) - n=n+1 - return format("format('%%%sG',a%s)",f,n) -end -local format_e=function(f) - n=n+1 - return format("format('%%%se',a%s)",f,n) -end -local format_E=function(f) - n=n+1 - return format("format('%%%sE',a%s)",f,n) -end -local format_j=function(f) - n=n+1 - return format("sparseexponent('%%%se',a%s)",f,n) -end -local format_J=function(f) - n=n+1 - return format("sparseexponent('%%%sE',a%s)",f,n) -end -local format_x=function(f) - n=n+1 - return format("format('%%%sx',a%s)",f,n) -end -local format_X=function(f) - n=n+1 - return format("format('%%%sX',a%s)",f,n) -end -local format_o=function(f) - n=n+1 - return format("format('%%%so',a%s)",f,n) -end -local format_c=function() - n=n+1 - return format("utfchar(a%s)",n) -end -local format_C=function() - n=n+1 - return format("tracedchar(a%s)",n) -end -local format_r=function(f) - n=n+1 - return format("format('%%%s.0f',a%s)",f,n) -end -local format_h=function(f) - n=n+1 - if f=="-" then - f=sub(f,2) - return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) - else - return format("format('0x%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) - end -end -local format_H=function(f) - n=n+1 - if f=="-" then - f=sub(f,2) - return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) - else - return format("format('0x%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) - end -end -local format_u=function(f) - n=n+1 - if f=="-" then - f=sub(f,2) - return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) - else - return format("format('u+%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) - end -end -local format_U=function(f) - n=n+1 - if f=="-" then - f=sub(f,2) - return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) - else - return format("format('U+%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) - end -end -local format_p=function() - n=n+1 - return format("points(a%s)",n) -end -local format_b=function() - n=n+1 - return format("basepoints(a%s)",n) -end -local format_t=function(f) - n=n+1 - if f and f~="" then - return format("concat(a%s,%q)",n,f) - else - return format("concat(a%s)",n) - end -end -local format_T=function(f) - n=n+1 - if f and f~="" then - return format("sequenced(a%s,%q)",n,f) - else - return format("sequenced(a%s)",n) - end -end -local format_l=function() - n=n+1 - return format("(a%s and 'true' or 'false')",n) -end -local format_L=function() - n=n+1 - return format("(a%s and 'TRUE' or 'FALSE')",n) -end -local format_N=function() - n=n+1 - return format("tostring(tonumber(a%s) or a%s)",n,n) -end -local format_a=function(f) - n=n+1 - if f and f~="" then - return format("autosingle(a%s,%q)",n,f) - else - return format("autosingle(a%s)",n) - end -end -local format_A=function(f) - n=n+1 - if f and f~="" then - return format("autodouble(a%s,%q)",n,f) - else - return format("autodouble(a%s)",n) - end -end -local format_w=function(f) - n=n+1 - f=tonumber(f) - if f then - return format("nspaces[%s+a%s]",f,n) - else - return format("nspaces[a%s]",n) - end -end -local format_W=function(f) - return format("nspaces[%s]",tonumber(f) or 0) -end -local digit=patterns.digit -local period=patterns.period -local three=digit*digit*digit -local splitter=Cs ( - (((1-(three^1*period))^1+C(three))*(Carg(1)*three)^1+C((1-period)^1))*(P(1)/""*Carg(2))*C(2) -) -patterns.formattednumber=splitter -function number.formatted(n,sep1,sep2) - local s=type(s)=="string" and n or format("%0.2f",n) - if sep1==true then - return lpegmatch(splitter,s,1,".",",") - elseif sep1=="." then - return lpegmatch(splitter,s,1,sep1,sep2 or ",") - elseif sep1=="," then - return lpegmatch(splitter,s,1,sep1,sep2 or ".") - else - return lpegmatch(splitter,s,1,sep1 or ",",sep2 or ".") - end -end -local format_m=function(f) - n=n+1 - if not f or f=="" then - f="," - end - return format([[formattednumber(a%s,%q,".")]],n,f) -end -local format_M=function(f) - n=n+1 - if not f or f=="" then - f="." - end - return format([[formattednumber(a%s,%q,",")]],n,f) -end -local format_z=function(f) - n=n+(tonumber(f) or 1) - return "''" -end -local format_rest=function(s) - return format("%q",s) -end -local format_extension=function(extensions,f,name) - local extension=extensions[name] or "tostring(%s)" - local f=tonumber(f) or 1 - if f==0 then - return extension - elseif f==1 then - n=n+1 - local a="a"..n - return format(extension,a,a) - elseif f<0 then - local a="a"..(n+f+1) - return format(extension,a,a) - else - local t={} - for i=1,f do - n=n+1 - t[#t+1]="a"..n - end - return format(extension,unpack(t)) - end -end -local builder=Cs { "start", - start=( - ( - P("%")/""*( - V("!") -+V("s")+V("q")+V("i")+V("d")+V("f")+V("F")+V("g")+V("G")+V("e")+V("E")+V("x")+V("X")+V("o") -+V("c")+V("C")+V("S") -+V("Q") -+V("N") -+V("r")+V("h")+V("H")+V("u")+V("U")+V("p")+V("b")+V("t")+V("T")+V("l")+V("L")+V("I")+V("w") -+V("W") -+V("a") -+V("A") -+V("j")+V("J") -+V("m")+V("M") -+V("z") - )+V("*") - )*(P(-1)+Carg(1)) - )^0, - ["s"]=(prefix_any*P("s"))/format_s, - ["q"]=(prefix_any*P("q"))/format_q, - ["i"]=(prefix_any*P("i"))/format_i, - ["d"]=(prefix_any*P("d"))/format_d, - ["f"]=(prefix_any*P("f"))/format_f, - ["F"]=(prefix_any*P("F"))/format_F, - ["g"]=(prefix_any*P("g"))/format_g, - ["G"]=(prefix_any*P("G"))/format_G, - ["e"]=(prefix_any*P("e"))/format_e, - ["E"]=(prefix_any*P("E"))/format_E, - ["x"]=(prefix_any*P("x"))/format_x, - ["X"]=(prefix_any*P("X"))/format_X, - ["o"]=(prefix_any*P("o"))/format_o, - ["S"]=(prefix_any*P("S"))/format_S, - ["Q"]=(prefix_any*P("Q"))/format_S, - ["N"]=(prefix_any*P("N"))/format_N, - ["c"]=(prefix_any*P("c"))/format_c, - ["C"]=(prefix_any*P("C"))/format_C, - ["r"]=(prefix_any*P("r"))/format_r, - ["h"]=(prefix_any*P("h"))/format_h, - ["H"]=(prefix_any*P("H"))/format_H, - ["u"]=(prefix_any*P("u"))/format_u, - ["U"]=(prefix_any*P("U"))/format_U, - ["p"]=(prefix_any*P("p"))/format_p, - ["b"]=(prefix_any*P("b"))/format_b, - ["t"]=(prefix_tab*P("t"))/format_t, - ["T"]=(prefix_tab*P("T"))/format_T, - ["l"]=(prefix_any*P("l"))/format_l, - ["L"]=(prefix_any*P("L"))/format_L, - ["I"]=(prefix_any*P("I"))/format_I, - ["w"]=(prefix_any*P("w"))/format_w, - ["W"]=(prefix_any*P("W"))/format_W, - ["j"]=(prefix_any*P("j"))/format_j, - ["J"]=(prefix_any*P("J"))/format_J, - ["m"]=(prefix_tab*P("m"))/format_m, - ["M"]=(prefix_tab*P("M"))/format_M, - ["z"]=(prefix_any*P("z"))/format_z, - ["a"]=(prefix_any*P("a"))/format_a, - ["A"]=(prefix_any*P("A"))/format_A, - ["*"]=Cs(((1-P("%"))^1+P("%%")/"%%")^1)/format_rest, - ["?"]=Cs(((1-P("%"))^1 )^1)/format_rest, - ["!"]=Carg(2)*prefix_any*P("!")*C((1-P("!"))^1)*P("!")/format_extension, -} -local direct=Cs ( - P("%")*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*P(-1)/[[local format = string.format return function(str) return format("%0",str) end]] -) -local function make(t,str) - local f - local p - local p=lpegmatch(direct,str) - if p then - f=loadstripped(p)() - else - n=0 - p=lpegmatch(builder,str,1,t._connector_,t._extensions_) - if n>0 then - p=format(template,preamble,t._preamble_,arguments[n],p) - f=loadstripped(p,t._environment_)() - else - f=function() return str end - end - end - t[str]=f - return f -end -local function use(t,fmt,...) - return t[fmt](...) -end -strings.formatters={} -if _LUAVERSION<5.2 then - function strings.formatters.new(noconcat) - local t={ _type_="formatter",_connector_=noconcat and "," or "..",_extensions_={},_preamble_=preamble,_environment_={} } - setmetatable(t,{ __index=make,__call=use }) - return t - end -else - function strings.formatters.new(noconcat) - local e={} - for k,v in next,environment do - e[k]=v - end - local t={ _type_="formatter",_connector_=noconcat and "," or "..",_extensions_={},_preamble_="",_environment_=e } - setmetatable(t,{ __index=make,__call=use }) - return t - end -end -local formatters=strings.formatters.new() -string.formatters=formatters -string.formatter=function(str,...) return formatters[str](...) end -local function add(t,name,template,preamble) - if type(t)=="table" and t._type_=="formatter" then - t._extensions_[name]=template or "%s" - if type(preamble)=="string" then - t._preamble_=preamble.."\n"..t._preamble_ - elseif type(preamble)=="table" then - for k,v in next,preamble do - t._environment_[k]=v - end - end - end -end -strings.formatters.add=add -patterns.xmlescape=Cs((P("<")/"<"+P(">")/">"+P("&")/"&"+P('"')/"""+P(1))^0) -patterns.texescape=Cs((C(S("#$%\\{}"))/"\\%1"+P(1))^0) -patterns.luaescape=Cs(((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0) -patterns.luaquoted=Cs(Cc('"')*((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0*Cc('"')) -if _LUAVERSION<5.2 then - add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],"local xmlescape = lpeg.patterns.xmlescape") - add(formatters,"tex",[[lpegmatch(texescape,%s)]],"local texescape = lpeg.patterns.texescape") - add(formatters,"lua",[[lpegmatch(luaescape,%s)]],"local luaescape = lpeg.patterns.luaescape") -else - add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],{ xmlescape=lpeg.patterns.xmlescape }) - add(formatters,"tex",[[lpegmatch(texescape,%s)]],{ texescape=lpeg.patterns.texescape }) - add(formatters,"lua",[[lpegmatch(luaescape,%s)]],{ luaescape=lpeg.patterns.luaescape }) -end -local dquote=patterns.dquote -local equote=patterns.escaped+dquote/'\\"'+1 -local space=patterns.space -local cquote=Cc('"') -local pattern=Cs(dquote*(equote-P(-2))^0*dquote) -+Cs(cquote*(equote-space)^0*space*equote^0*cquote) -function string.optionalquoted(str) - return lpegmatch(pattern,str) or str -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['luat-basics-gen']={ - version=1.100, - comment="companion to luatex-*.tex", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end -local dummyfunction=function() -end -local dummyreporter=function(c) - return function(...) - (texio.reporter or texio.write_nl)(c.." : "..string.formatters(...)) - end -end -statistics={ - register=dummyfunction, - starttiming=dummyfunction, - stoptiming=dummyfunction, - elapsedtime=nil, -} -directives={ - register=dummyfunction, - enable=dummyfunction, - disable=dummyfunction, -} -trackers={ - register=dummyfunction, - enable=dummyfunction, - disable=dummyfunction, -} -experiments={ - register=dummyfunction, - enable=dummyfunction, - disable=dummyfunction, -} -storage={ - register=dummyfunction, - shared={}, -} -logs={ - new=dummyreporter, - reporter=dummyreporter, - messenger=dummyreporter, - report=dummyfunction, -} -callbacks={ - register=function(n,f) return callback.register(n,f) end, -} -utilities={ - storage={ - allocate=function(t) return t or {} end, - mark=function(t) return t or {} end, - }, -} -characters=characters or { - data={} -} -texconfig.kpse_init=true -resolvers=resolvers or {} -local remapper={ - otf="opentype fonts", - ttf="truetype fonts", - ttc="truetype fonts", - dfont="truetype fonts", - cid="cid maps", - cidmap="cid maps", - fea="font feature files", - pfa="type1 fonts", - pfb="type1 fonts", - afm="afm", -} -function resolvers.findfile(name,fileformat) - name=string.gsub(name,"\\","/") - if not fileformat or fileformat=="" then - fileformat=file.suffix(name) - if fileformat=="" then - fileformat="tex" - end - end - fileformat=string.lower(fileformat) - fileformat=remapper[fileformat] or fileformat - local found=kpse.find_file(name,fileformat) - if not found or found=="" then - found=kpse.find_file(name,"other text files") - end - return found -end -resolvers.findbinfile=resolvers.findfile -function resolvers.loadbinfile(filename,filetype) - local data=io.loaddata(filename) - return true,data,#data -end -function resolvers.resolve(s) - return s -end -function resolvers.unresolve(s) - return s -end -caches={} -local writable=nil -local readables={} -local usingjit=jit -if not caches.namespace or caches.namespace=="" or caches.namespace=="context" then - caches.namespace='generic' -end -do - local cachepaths=kpse.expand_var('$TEXMFCACHE') or "" - if cachepaths=="" or cachepaths=="$TEXMFCACHE" then - cachepaths=kpse.expand_var('$TEXMFVAR') or "" - end - if cachepaths=="" or cachepaths=="$TEXMFVAR" then - cachepaths=kpse.expand_var('$VARTEXMF') or "" - end - if cachepaths=="" then - local fallbacks={ "TMPDIR","TEMPDIR","TMP","TEMP","HOME","HOMEPATH" } - for i=1,#fallbacks do - cachepaths=os.getenv(fallbacks[i]) or "" - if cachepath~="" and lfs.isdir(cachepath) then - break - end - end - end - if cachepaths=="" then - cachepaths="." - end - cachepaths=string.split(cachepaths,os.type=="windows" and ";" or ":") - for i=1,#cachepaths do - local cachepath=cachepaths[i] - if not lfs.isdir(cachepath) then - lfs.mkdirs(cachepath) - if lfs.isdir(cachepath) then - texio.write(string.format("(created cache path: %s)",cachepath)) - end - end - if file.is_writable(cachepath) then - writable=file.join(cachepath,"luatex-cache") - lfs.mkdir(writable) - writable=file.join(writable,caches.namespace) - lfs.mkdir(writable) - break - end - end - for i=1,#cachepaths do - if file.is_readable(cachepaths[i]) then - readables[#readables+1]=file.join(cachepaths[i],"luatex-cache",caches.namespace) - end - end - if not writable then - texio.write_nl("quiting: fix your writable cache path") - os.exit() - elseif #readables==0 then - texio.write_nl("quiting: fix your readable cache path") - os.exit() - elseif #readables==1 and readables[1]==writable then - texio.write(string.format("(using cache: %s)",writable)) - else - texio.write(string.format("(using write cache: %s)",writable)) - texio.write(string.format("(using read cache: %s)",table.concat(readables," "))) - end -end -function caches.getwritablepath(category,subcategory) - local path=file.join(writable,category) - lfs.mkdir(path) - path=file.join(path,subcategory) - lfs.mkdir(path) - return path -end -function caches.getreadablepaths(category,subcategory) - local t={} - for i=1,#readables do - t[i]=file.join(readables[i],category,subcategory) - end - return t -end -local function makefullname(path,name) - if path and path~="" then - return file.addsuffix(file.join(path,name),"lua"),file.addsuffix(file.join(path,name),usingjit and "lub" or "luc") - end -end -function caches.is_writable(path,name) - local fullname=makefullname(path,name) - return fullname and file.is_writable(fullname) -end -function caches.loaddata(paths,name) - for i=1,#paths do - local data=false - local luaname,lucname=makefullname(paths[i],name) - if lucname and not lfs.isfile(lucname) and type(caches.compile)=="function" then - texio.write(string.format("(compiling luc: %s)",lucname)) - data=loadfile(luaname) - if data then - data=data() - end - if data then - caches.compile(data,luaname,lucname) - return data - end - end - if lucname and lfs.isfile(lucname) then - texio.write(string.format("(load luc: %s)",lucname)) - data=loadfile(lucname) - if data then - data=data() - end - if data then - return data - else - texio.write(string.format("(loading failed: %s)",lucname)) - end - end - if luaname and lfs.isfile(luaname) then - texio.write(string.format("(load lua: %s)",luaname)) - data=loadfile(luaname) - if data then - data=data() - end - if data then - return data - end - end - end -end -function caches.savedata(path,name,data) - local luaname,lucname=makefullname(path,name) - if luaname then - texio.write(string.format("(save: %s)",luaname)) - table.tofile(luaname,data,true) - if lucname and type(caches.compile)=="function" then - os.remove(lucname) - texio.write(string.format("(save: %s)",lucname)) - caches.compile(data,luaname,lucname) - end - end -end -function caches.compile(data,luaname,lucname) - local d=io.loaddata(luaname) - if not d or d=="" then - d=table.serialize(data,true) - end - if d and d~="" then - local f=io.open(lucname,'wb') - if f then - local s=loadstring(d) - if s then - f:write(string.dump(s,true)) - end - f:close() - end - end -end -function table.setmetatableindex(t,f) - if type(t)~="table" then - f=f or t - t={} - end - setmetatable(t,{ __index=f }) - return t -end -arguments={} -if arg then - for i=1,#arg do - local k,v=string.match(arg[i],"^%-%-([^=]+)=?(.-)$") - if k and v then - arguments[k]=v - end - end -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['data-con']={ - version=1.100, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local format,lower,gsub=string.format,string.lower,string.gsub -local trace_cache=false trackers.register("resolvers.cache",function(v) trace_cache=v end) -local trace_containers=false trackers.register("resolvers.containers",function(v) trace_containers=v end) -local trace_storage=false trackers.register("resolvers.storage",function(v) trace_storage=v end) -containers=containers or {} -local containers=containers -containers.usecache=true -local report_containers=logs.reporter("resolvers","containers") -local allocated={} -local mt={ - __index=function(t,k) - if k=="writable" then - local writable=caches.getwritablepath(t.category,t.subcategory) or { "." } - t.writable=writable - return writable - elseif k=="readables" then - local readables=caches.getreadablepaths(t.category,t.subcategory) or { "." } - t.readables=readables - return readables - end - end, - __storage__=true -} -function containers.define(category,subcategory,version,enabled) - if category and subcategory then - local c=allocated[category] - if not c then - c={} - allocated[category]=c - end - local s=c[subcategory] - if not s then - s={ - category=category, - subcategory=subcategory, - storage={}, - enabled=enabled, - version=version or math.pi, - trace=false, - } - setmetatable(s,mt) - c[subcategory]=s - end - return s - end -end -function containers.is_usable(container,name) - return container.enabled and caches and caches.is_writable(container.writable,name) -end -function containers.is_valid(container,name) - if name and name~="" then - local storage=container.storage[name] - return storage and storage.cache_version==container.version - else - return false - end -end -function containers.read(container,name) - local storage=container.storage - local stored=storage[name] - if not stored and container.enabled and caches and containers.usecache then - stored=caches.loaddata(container.readables,name) - if stored and stored.cache_version==container.version then - if trace_cache or trace_containers then - report_containers("action %a, category %a, name %a","load",container.subcategory,name) - end - else - stored=nil - end - storage[name]=stored - elseif stored then - if trace_cache or trace_containers then - report_containers("action %a, category %a, name %a","reuse",container.subcategory,name) - end - end - return stored -end -function containers.write(container,name,data) - if data then - data.cache_version=container.version - if container.enabled and caches then - local unique,shared=data.unique,data.shared - data.unique,data.shared=nil,nil - caches.savedata(container.writable,name,data) - if trace_cache or trace_containers then - report_containers("action %a, category %a, name %a","save",container.subcategory,name) - end - data.unique,data.shared=unique,shared - end - if trace_cache or trace_containers then - report_containers("action %a, category %a, name %a","store",container.subcategory,name) - end - container.storage[name]=data - end - return data -end -function containers.content(container,name) - return container.storage[name] -end -function containers.cleanname(name) - return (gsub(lower(name),"[^%w\128-\255]+","-")) -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['luatex-fonts-nod']={ - version=1.001, - comment="companion to luatex-fonts.lua", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end -if tex.attribute[0]~=0 then - texio.write_nl("log","!") - texio.write_nl("log","! Attribute 0 is reserved for ConTeXt's font feature management and has to be") - texio.write_nl("log","! set to zero. Also, some attributes in the range 1-255 are used for special") - texio.write_nl("log","! purposes so setting them at the TeX end might break the font handler.") - texio.write_nl("log","!") - tex.attribute[0]=0 -end -attributes=attributes or {} -attributes.unsetvalue=-0x7FFFFFFF -local numbers,last={},127 -attributes.private=attributes.private or function(name) - local number=numbers[name] - if not number then - if last<255 then - last=last+1 - end - number=last - numbers[name]=number - end - return number -end -nodes={} -nodes.pool={} -nodes.handlers={} -local nodecodes={} for k,v in next,node.types () do nodecodes[string.gsub(v,"_","")]=k end -local whatcodes={} for k,v in next,node.whatsits() do whatcodes[string.gsub(v,"_","")]=k end -local glyphcodes={ [0]="character","glyph","ligature","ghost","left","right" } -local disccodes={ [0]="discretionary","explicit","automatic","regular","first","second" } -nodes.nodecodes=nodecodes -nodes.whatcodes=whatcodes -nodes.whatsitcodes=whatcodes -nodes.glyphcodes=glyphcodes -nodes.disccodes=disccodes -local free_node=node.free -local remove_node=node.remove -local new_node=node.new -local traverse_id=node.traverse_id -nodes.handlers.protectglyphs=node.protect_glyphs -nodes.handlers.unprotectglyphs=node.unprotect_glyphs -local math_code=nodecodes.math -local end_of_math=node.end_of_math -function node.end_of_math(n) - if n.id==math_code and n.subtype==1 then - return n - else - return end_of_math(n) - end -end -function nodes.remove(head,current,free_too) - local t=current - head,current=remove_node(head,current) - if t then - if free_too then - free_node(t) - t=nil - else - t.next,t.prev=nil,nil - end - end - return head,current,t -end -function nodes.delete(head,current) - return nodes.remove(head,current,true) -end -function nodes.pool.kern(k) - local n=new_node("kern",1) - n.kern=k - return n -end -local getfield=node.getfield -local setfield=node.setfield -nodes.getfield=getfield -nodes.setfield=setfield -nodes.getattr=getfield -nodes.setattr=setfield -nodes.tostring=node.tostring or tostring -nodes.copy=node.copy -nodes.copy_list=node.copy_list -nodes.delete=node.delete -nodes.dimensions=node.dimensions -nodes.end_of_math=node.end_of_math -nodes.flush_list=node.flush_list -nodes.flush_node=node.flush_node -nodes.free=node.free -nodes.insert_after=node.insert_after -nodes.insert_before=node.insert_before -nodes.hpack=node.hpack -nodes.new=node.new -nodes.tail=node.tail -nodes.traverse=node.traverse -nodes.traverse_id=node.traverse_id -nodes.slide=node.slide -nodes.vpack=node.vpack -nodes.first_glyph=node.first_glyph -nodes.first_character=node.first_character -nodes.has_glyph=node.has_glyph or node.first_glyph -nodes.current_attr=node.current_attr -nodes.do_ligature_n=node.do_ligature_n -nodes.has_field=node.has_field -nodes.last_node=node.last_node -nodes.usedlist=node.usedlist -nodes.protrusion_skippable=node.protrusion_skippable -nodes.write=node.write -nodes.has_attribute=node.has_attribute -nodes.set_attribute=node.set_attribute -nodes.unset_attribute=node.unset_attribute -nodes.protect_glyphs=node.protect_glyphs -nodes.unprotect_glyphs=node.unprotect_glyphs -nodes.kerning=node.kerning -nodes.ligaturing=node.ligaturing -nodes.mlist_to_hlist=node.mlist_to_hlist -local direct=node.direct -local nuts={} -nodes.nuts=nuts -local tonode=direct.tonode -local tonut=direct.todirect -nodes.tonode=tonode -nodes.tonut=tonut -nuts.tonode=tonode -nuts.tonut=tonut -local getfield=direct.getfield -local setfield=direct.setfield -nuts.getfield=getfield -nuts.setfield=setfield -nuts.getnext=direct.getnext -nuts.getprev=direct.getprev -nuts.getid=direct.getid -nuts.getattr=getfield -nuts.setattr=setfield -nuts.getfont=direct.getfont -nuts.getsubtype=direct.getsubtype -nuts.getchar=direct.getchar -nuts.insert_before=direct.insert_before -nuts.insert_after=direct.insert_after -nuts.delete=direct.delete -nuts.copy=direct.copy -nuts.tail=direct.tail -nuts.flush_list=direct.flush_list -nuts.end_of_math=direct.end_of_math -nuts.traverse=direct.traverse -nuts.traverse_id=direct.traverse_id -nuts.getprop=nuts.getattr -nuts.setprop=nuts.setattr -local new_nut=direct.new -nuts.new=new_nut -nuts.pool={} -function nuts.pool.kern(k) - local n=new_nut("kern",1) - setfield(n,"kern",k) - return n -end -local propertydata=direct.get_properties_table() -nodes.properties={ data=propertydata } -direct.set_properties_mode(true,true) -function direct.set_properties_mode() end -nuts.getprop=function(n,k) - local p=propertydata[n] - if p then - return p[k] - end -end -nuts.setprop=function(n,k,v) - if v then - local p=propertydata[n] - if p then - p[k]=v - else - propertydata[n]={ [k]=v } - end - end -end -nodes.setprop=nodes.setproperty -nodes.getprop=nodes.getproperty - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-ini']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local allocate=utilities.storage.allocate -local report_defining=logs.reporter("fonts","defining") -fonts=fonts or {} -local fonts=fonts -fonts.hashes={ identifiers=allocate() } -fonts.tables=fonts.tables or {} -fonts.helpers=fonts.helpers or {} -fonts.tracers=fonts.tracers or {} -fonts.specifiers=fonts.specifiers or {} -fonts.analyzers={} -fonts.readers={} -fonts.definers={ methods={} } -fonts.loggers={ register=function() end } -fontloader.totable=fontloader.to_table - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-con']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local next,tostring,rawget=next,tostring,rawget -local format,match,lower,gsub=string.format,string.match,string.lower,string.gsub -local utfbyte=utf.byte -local sort,insert,concat,sortedkeys,serialize,fastcopy=table.sort,table.insert,table.concat,table.sortedkeys,table.serialize,table.fastcopy -local derivetable=table.derive -local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end) -local trace_scaling=false trackers.register("fonts.scaling",function(v) trace_scaling=v end) -local report_defining=logs.reporter("fonts","defining") -local fonts=fonts -local constructors=fonts.constructors or {} -fonts.constructors=constructors -local handlers=fonts.handlers or {} -fonts.handlers=handlers -local allocate=utilities.storage.allocate -local setmetatableindex=table.setmetatableindex -constructors.dontembed=allocate() -constructors.autocleanup=true -constructors.namemode="fullpath" -constructors.version=1.01 -constructors.cache=containers.define("fonts","constructors",constructors.version,false) -constructors.privateoffset=0xF0000 -constructors.cacheintex=true -constructors.keys={ - properties={ - encodingbytes="number", - embedding="number", - cidinfo={}, - format="string", - fontname="string", - fullname="string", - filename="filename", - psname="string", - name="string", - virtualized="boolean", - hasitalics="boolean", - autoitalicamount="basepoints", - nostackmath="boolean", - noglyphnames="boolean", - mode="string", - hasmath="boolean", - mathitalics="boolean", - textitalics="boolean", - finalized="boolean", - }, - parameters={ - mathsize="number", - scriptpercentage="float", - scriptscriptpercentage="float", - units="cardinal", - designsize="scaledpoints", - expansion={ - stretch="integerscale", - shrink="integerscale", - step="integerscale", - auto="boolean", - }, - protrusion={ - auto="boolean", - }, - slantfactor="float", - extendfactor="float", - factor="float", - hfactor="float", - vfactor="float", - size="scaledpoints", - units="scaledpoints", - scaledpoints="scaledpoints", - slantperpoint="scaledpoints", - spacing={ - width="scaledpoints", - stretch="scaledpoints", - shrink="scaledpoints", - extra="scaledpoints", - }, - xheight="scaledpoints", - quad="scaledpoints", - ascender="scaledpoints", - descender="scaledpoints", - synonyms={ - space="spacing.width", - spacestretch="spacing.stretch", - spaceshrink="spacing.shrink", - extraspace="spacing.extra", - x_height="xheight", - space_stretch="spacing.stretch", - space_shrink="spacing.shrink", - extra_space="spacing.extra", - em="quad", - ex="xheight", - slant="slantperpoint", - }, - }, - description={ - width="basepoints", - height="basepoints", - depth="basepoints", - boundingbox={}, - }, - character={ - width="scaledpoints", - height="scaledpoints", - depth="scaledpoints", - italic="scaledpoints", - }, -} -local designsizes=allocate() -constructors.designsizes=designsizes -local loadedfonts=allocate() -constructors.loadedfonts=loadedfonts -local factors={ - pt=65536.0, - bp=65781.8, -} -function constructors.setfactor(f) - constructors.factor=factors[f or 'pt'] or factors.pt -end -constructors.setfactor() -function constructors.scaled(scaledpoints,designsize) - if scaledpoints<0 then - if designsize then - local factor=constructors.factor - if designsize>factor then - return (- scaledpoints/1000)*designsize - else - return (- scaledpoints/1000)*designsize*factor - end - else - return (- scaledpoints/1000)*10*factor - end - else - return scaledpoints - end -end -function constructors.cleanuptable(tfmdata) - if constructors.autocleanup and tfmdata.properties.virtualized then - for k,v in next,tfmdata.characters do - if v.commands then v.commands=nil end - end - end -end -function constructors.calculatescale(tfmdata,scaledpoints) - local parameters=tfmdata.parameters - if scaledpoints<0 then - scaledpoints=(- scaledpoints/1000)*(tfmdata.designsize or parameters.designsize) - end - return scaledpoints,scaledpoints/(parameters.units or 1000) -end -local unscaled={ - ScriptPercentScaleDown=true, - ScriptScriptPercentScaleDown=true, - RadicalDegreeBottomRaisePercent=true -} -function constructors.assignmathparameters(target,original) - local mathparameters=original.mathparameters - if mathparameters and next(mathparameters) then - local targetparameters=target.parameters - local targetproperties=target.properties - local targetmathparameters={} - local factor=targetproperties.math_is_scaled and 1 or targetparameters.factor - for name,value in next,mathparameters do - if unscaled[name] then - targetmathparameters[name]=value - else - targetmathparameters[name]=value*factor - end - end - if not targetmathparameters.FractionDelimiterSize then - targetmathparameters.FractionDelimiterSize=1.01*targetparameters.size - end - if not mathparameters.FractionDelimiterDisplayStyleSize then - targetmathparameters.FractionDelimiterDisplayStyleSize=2.40*targetparameters.size - end - target.mathparameters=targetmathparameters - end -end -function constructors.beforecopyingcharacters(target,original) -end -function constructors.aftercopyingcharacters(target,original) -end -constructors.sharefonts=false -constructors.nofsharedfonts=0 -local sharednames={} -function constructors.trytosharefont(target,tfmdata) - if constructors.sharefonts then - local characters=target.characters - local n=1 - local t={ target.psname } - local u=sortedkeys(characters) - for i=1,#u do - local k=u[i] - n=n+1;t[n]=k - n=n+1;t[n]=characters[k].index or k - end - local h=md5.HEX(concat(t," ")) - local s=sharednames[h] - if s then - if trace_defining then - report_defining("font %a uses backend resources of font %a",target.fullname,s) - end - target.fullname=s - constructors.nofsharedfonts=constructors.nofsharedfonts+1 - target.properties.sharedwith=s - else - sharednames[h]=target.fullname - end - end -end -function constructors.enhanceparameters(parameters) - local xheight=parameters.x_height - local quad=parameters.quad - local space=parameters.space - local stretch=parameters.space_stretch - local shrink=parameters.space_shrink - local extra=parameters.extra_space - local slant=parameters.slant - parameters.xheight=xheight - parameters.spacestretch=stretch - parameters.spaceshrink=shrink - parameters.extraspace=extra - parameters.em=quad - parameters.ex=xheight - parameters.slantperpoint=slant - parameters.spacing={ - width=space, - stretch=stretch, - shrink=shrink, - extra=extra, - } -end -function constructors.scale(tfmdata,specification) - local target={} - if tonumber(specification) then - specification={ size=specification } - end - target.specification=specification - local scaledpoints=specification.size - local relativeid=specification.relativeid - local properties=tfmdata.properties or {} - local goodies=tfmdata.goodies or {} - local resources=tfmdata.resources or {} - local descriptions=tfmdata.descriptions or {} - local characters=tfmdata.characters or {} - local changed=tfmdata.changed or {} - local shared=tfmdata.shared or {} - local parameters=tfmdata.parameters or {} - local mathparameters=tfmdata.mathparameters or {} - local targetcharacters={} - local targetdescriptions=derivetable(descriptions) - local targetparameters=derivetable(parameters) - local targetproperties=derivetable(properties) - local targetgoodies=goodies - target.characters=targetcharacters - target.descriptions=targetdescriptions - target.parameters=targetparameters - target.properties=targetproperties - target.goodies=targetgoodies - target.shared=shared - target.resources=resources - target.unscaled=tfmdata - local mathsize=tonumber(specification.mathsize) or 0 - local textsize=tonumber(specification.textsize) or scaledpoints - local forcedsize=tonumber(parameters.mathsize ) or 0 - local extrafactor=tonumber(specification.factor ) or 1 - if (mathsize==2 or forcedsize==2) and parameters.scriptpercentage then - scaledpoints=parameters.scriptpercentage*textsize/100 - elseif (mathsize==3 or forcedsize==3) and parameters.scriptscriptpercentage then - scaledpoints=parameters.scriptscriptpercentage*textsize/100 - elseif forcedsize>1000 then - scaledpoints=forcedsize - end - targetparameters.mathsize=mathsize - targetparameters.textsize=textsize - targetparameters.forcedsize=forcedsize - targetparameters.extrafactor=extrafactor - local tounicode=fonts.mappings.tounicode - local defaultwidth=resources.defaultwidth or 0 - local defaultheight=resources.defaultheight or 0 - local defaultdepth=resources.defaultdepth or 0 - local units=parameters.units or 1000 - if target.fonts then - target.fonts=fastcopy(target.fonts) - end - targetproperties.language=properties.language or "dflt" - targetproperties.script=properties.script or "dflt" - targetproperties.mode=properties.mode or "base" - local askedscaledpoints=scaledpoints - local scaledpoints,delta=constructors.calculatescale(tfmdata,scaledpoints,nil,specification) - local hdelta=delta - local vdelta=delta - target.designsize=parameters.designsize - target.units_per_em=units - local direction=properties.direction or tfmdata.direction or 0 - target.direction=direction - properties.direction=direction - target.size=scaledpoints - target.encodingbytes=properties.encodingbytes or 1 - target.embedding=properties.embedding or "subset" - target.tounicode=1 - target.cidinfo=properties.cidinfo - target.format=properties.format - target.cache=constructors.cacheintex and "yes" or "renew" - local fontname=properties.fontname or tfmdata.fontname - local fullname=properties.fullname or tfmdata.fullname - local filename=properties.filename or tfmdata.filename - local psname=properties.psname or tfmdata.psname - local name=properties.name or tfmdata.name - if not psname or psname=="" then - psname=fontname or (fullname and fonts.names.cleanname(fullname)) - end - target.fontname=fontname - target.fullname=fullname - target.filename=filename - target.psname=psname - target.name=name - properties.fontname=fontname - properties.fullname=fullname - properties.filename=filename - properties.psname=psname - properties.name=name - local expansion=parameters.expansion - if expansion then - target.stretch=expansion.stretch - target.shrink=expansion.shrink - target.step=expansion.step - target.auto_expand=expansion.auto - end - local protrusion=parameters.protrusion - if protrusion then - target.auto_protrude=protrusion.auto - end - local extendfactor=parameters.extendfactor or 0 - if extendfactor~=0 and extendfactor~=1 then - hdelta=hdelta*extendfactor - target.extend=extendfactor*1000 - else - target.extend=1000 - end - local slantfactor=parameters.slantfactor or 0 - if slantfactor~=0 then - target.slant=slantfactor*1000 - else - target.slant=0 - end - targetparameters.factor=delta - targetparameters.hfactor=hdelta - targetparameters.vfactor=vdelta - targetparameters.size=scaledpoints - targetparameters.units=units - targetparameters.scaledpoints=askedscaledpoints - local isvirtual=properties.virtualized or tfmdata.type=="virtual" - local hasquality=target.auto_expand or target.auto_protrude - local hasitalics=properties.hasitalics - local autoitalicamount=properties.autoitalicamount - local stackmath=not properties.nostackmath - local nonames=properties.noglyphnames - local haskerns=properties.haskerns or properties.mode=="base" - local hasligatures=properties.hasligatures or properties.mode=="base" - if changed and not next(changed) then - changed=false - end - target.type=isvirtual and "virtual" or "real" - target.postprocessors=tfmdata.postprocessors - local targetslant=(parameters.slant or parameters[1] or 0)*factors.pt - local targetspace=(parameters.space or parameters[2] or 0)*hdelta - local targetspace_stretch=(parameters.space_stretch or parameters[3] or 0)*hdelta - local targetspace_shrink=(parameters.space_shrink or parameters[4] or 0)*hdelta - local targetx_height=(parameters.x_height or parameters[5] or 0)*vdelta - local targetquad=(parameters.quad or parameters[6] or 0)*hdelta - local targetextra_space=(parameters.extra_space or parameters[7] or 0)*hdelta - targetparameters.slant=targetslant - targetparameters.space=targetspace - targetparameters.space_stretch=targetspace_stretch - targetparameters.space_shrink=targetspace_shrink - targetparameters.x_height=targetx_height - targetparameters.quad=targetquad - targetparameters.extra_space=targetextra_space - local ascender=parameters.ascender - if ascender then - targetparameters.ascender=delta*ascender - end - local descender=parameters.descender - if descender then - targetparameters.descender=delta*descender - end - constructors.enhanceparameters(targetparameters) - local protrusionfactor=(targetquad~=0 and 1000/targetquad) or 0 - local scaledwidth=defaultwidth*hdelta - local scaledheight=defaultheight*vdelta - local scaleddepth=defaultdepth*vdelta - local hasmath=(properties.hasmath or next(mathparameters)) and true - if hasmath then - constructors.assignmathparameters(target,tfmdata) - properties.hasmath=true - target.nomath=false - target.MathConstants=target.mathparameters - else - properties.hasmath=false - target.nomath=true - target.mathparameters=nil - end - local italickey="italic" - local useitalics=true - if hasmath then - autoitalicamount=false - elseif properties.textitalics then - italickey="italic_correction" - useitalics=false - if properties.delaytextitalics then - autoitalicamount=false - end - end - if trace_defining then - report_defining("defining tfm, name %a, fullname %a, filename %a, hscale %a, vscale %a, math %a, italics %a", - name,fullname,filename,hdelta,vdelta, - hasmath and "enabled" or "disabled",useitalics and "enabled" or "disabled") - end - constructors.beforecopyingcharacters(target,tfmdata) - local sharedkerns={} - for unicode,character in next,characters do - local chr,description,index - if changed then - local c=changed[unicode] - if c then - description=descriptions[c] or descriptions[unicode] or character - character=characters[c] or character - index=description.index or c - else - description=descriptions[unicode] or character - index=description.index or unicode - end - else - description=descriptions[unicode] or character - index=description.index or unicode - end - local width=description.width - local height=description.height - local depth=description.depth - if width then width=hdelta*width else width=scaledwidth end - if height then height=vdelta*height else height=scaledheight end - if depth and depth~=0 then - depth=delta*depth - if nonames then - chr={ - index=index, - height=height, - depth=depth, - width=width, - } - else - chr={ - name=description.name, - index=index, - height=height, - depth=depth, - width=width, - } - end - else - if nonames then - chr={ - index=index, - height=height, - width=width, - } - else - chr={ - name=description.name, - index=index, - height=height, - width=width, - } - end - end - local isunicode=description.unicode - if isunicode then - chr.unicode=isunicode - chr.tounicode=tounicode(isunicode) - end - if hasquality then - local ve=character.expansion_factor - if ve then - chr.expansion_factor=ve*1000 - end - local vl=character.left_protruding - if vl then - chr.left_protruding=protrusionfactor*width*vl - end - local vr=character.right_protruding - if vr then - chr.right_protruding=protrusionfactor*width*vr - end - end - if autoitalicamount then - local vi=description.italic - if not vi then - local vi=description.boundingbox[3]-description.width+autoitalicamount - if vi>0 then - chr[italickey]=vi*hdelta - end - elseif vi~=0 then - chr[italickey]=vi*hdelta - end - elseif hasitalics then - local vi=description.italic - if vi and vi~=0 then - chr[italickey]=vi*hdelta - end - end - if hasmath then - local vn=character.next - if vn then - chr.next=vn - else - local vv=character.vert_variants - if vv then - local t={} - for i=1,#vv do - local vvi=vv[i] - t[i]={ - ["start"]=(vvi["start"] or 0)*vdelta, - ["end"]=(vvi["end"] or 0)*vdelta, - ["advance"]=(vvi["advance"] or 0)*vdelta, - ["extender"]=vvi["extender"], - ["glyph"]=vvi["glyph"], - } - end - chr.vert_variants=t - else - local hv=character.horiz_variants - if hv then - local t={} - for i=1,#hv do - local hvi=hv[i] - t[i]={ - ["start"]=(hvi["start"] or 0)*hdelta, - ["end"]=(hvi["end"] or 0)*hdelta, - ["advance"]=(hvi["advance"] or 0)*hdelta, - ["extender"]=hvi["extender"], - ["glyph"]=hvi["glyph"], - } - end - chr.horiz_variants=t - end - end - end - local va=character.top_accent - if va then - chr.top_accent=vdelta*va - end - if stackmath then - local mk=character.mathkerns - if mk then - local kerns={} - local v=mk.top_right if v then local k={} for i=1,#v do local vi=v[i] - k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern } - end kerns.top_right=k end - local v=mk.top_left if v then local k={} for i=1,#v do local vi=v[i] - k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern } - end kerns.top_left=k end - local v=mk.bottom_left if v then local k={} for i=1,#v do local vi=v[i] - k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern } - end kerns.bottom_left=k end - local v=mk.bottom_right if v then local k={} for i=1,#v do local vi=v[i] - k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern } - end kerns.bottom_right=k end - chr.mathkern=kerns - end - end - end - if haskerns then - local vk=character.kerns - if vk then - local s=sharedkerns[vk] - if not s then - s={} - for k,v in next,vk do s[k]=v*hdelta end - sharedkerns[vk]=s - end - chr.kerns=s - end - end - if hasligatures then - local vl=character.ligatures - if vl then - if true then - chr.ligatures=vl - else - local tt={} - for i,l in next,vl do - tt[i]=l - end - chr.ligatures=tt - end - end - end - if isvirtual then - local vc=character.commands - if vc then - local ok=false - for i=1,#vc do - local key=vc[i][1] - if key=="right" or key=="down" then - ok=true - break - end - end - if ok then - local tt={} - for i=1,#vc do - local ivc=vc[i] - local key=ivc[1] - if key=="right" then - tt[i]={ key,ivc[2]*hdelta } - elseif key=="down" then - tt[i]={ key,ivc[2]*vdelta } - elseif key=="rule" then - tt[i]={ key,ivc[2]*vdelta,ivc[3]*hdelta } - else - tt[i]=ivc - end - end - chr.commands=tt - else - chr.commands=vc - end - chr.index=nil - end - end - targetcharacters[unicode]=chr - end - constructors.aftercopyingcharacters(target,tfmdata) - constructors.trytosharefont(target,tfmdata) - return target -end -function constructors.finalize(tfmdata) - if tfmdata.properties and tfmdata.properties.finalized then - return - end - if not tfmdata.characters then - return nil - end - if not tfmdata.goodies then - tfmdata.goodies={} - end - local parameters=tfmdata.parameters - if not parameters then - return nil - end - if not parameters.expansion then - parameters.expansion={ - stretch=tfmdata.stretch or 0, - shrink=tfmdata.shrink or 0, - step=tfmdata.step or 0, - auto=tfmdata.auto_expand or false, - } - end - if not parameters.protrusion then - parameters.protrusion={ - auto=auto_protrude - } - end - if not parameters.size then - parameters.size=tfmdata.size - end - if not parameters.extendfactor then - parameters.extendfactor=tfmdata.extend or 0 - end - if not parameters.slantfactor then - parameters.slantfactor=tfmdata.slant or 0 - end - if not parameters.designsize then - parameters.designsize=tfmdata.designsize or (factors.pt*10) - end - if not parameters.units then - parameters.units=tfmdata.units_per_em or 1000 - end - if not tfmdata.descriptions then - local descriptions={} - setmetatableindex(descriptions,function(t,k) local v={} t[k]=v return v end) - tfmdata.descriptions=descriptions - end - local properties=tfmdata.properties - if not properties then - properties={} - tfmdata.properties=properties - end - if not properties.virtualized then - properties.virtualized=tfmdata.type=="virtual" - end - if not tfmdata.properties then - tfmdata.properties={ - fontname=tfmdata.fontname, - filename=tfmdata.filename, - fullname=tfmdata.fullname, - name=tfmdata.name, - psname=tfmdata.psname, - encodingbytes=tfmdata.encodingbytes or 1, - embedding=tfmdata.embedding or "subset", - tounicode=tfmdata.tounicode or 1, - cidinfo=tfmdata.cidinfo or nil, - format=tfmdata.format or "type1", - direction=tfmdata.direction or 0, - } - end - if not tfmdata.resources then - tfmdata.resources={} - end - if not tfmdata.shared then - tfmdata.shared={} - end - if not properties.hasmath then - properties.hasmath=not tfmdata.nomath - end - tfmdata.MathConstants=nil - tfmdata.postprocessors=nil - tfmdata.fontname=nil - tfmdata.filename=nil - tfmdata.fullname=nil - tfmdata.name=nil - tfmdata.psname=nil - tfmdata.encodingbytes=nil - tfmdata.embedding=nil - tfmdata.tounicode=nil - tfmdata.cidinfo=nil - tfmdata.format=nil - tfmdata.direction=nil - tfmdata.type=nil - tfmdata.nomath=nil - tfmdata.designsize=nil - tfmdata.size=nil - tfmdata.stretch=nil - tfmdata.shrink=nil - tfmdata.step=nil - tfmdata.auto_expand=nil - tfmdata.auto_protrude=nil - tfmdata.extend=nil - tfmdata.slant=nil - tfmdata.units_per_em=nil - tfmdata.cache=nil - properties.finalized=true - return tfmdata -end -local hashmethods={} -constructors.hashmethods=hashmethods -function constructors.hashfeatures(specification) - local features=specification.features - if features then - local t,tn={},0 - for category,list in next,features do - if next(list) then - local hasher=hashmethods[category] - if hasher then - local hash=hasher(list) - if hash then - tn=tn+1 - t[tn]=category..":"..hash - end - end - end - end - if tn>0 then - return concat(t," & ") - end - end - return "unknown" -end -hashmethods.normal=function(list) - local s={} - local n=0 - for k,v in next,list do - if not k then - elseif k=="number" or k=="features" then - else - n=n+1 - s[n]=k - end - end - if n>0 then - sort(s) - for i=1,n do - local k=s[i] - s[i]=k..'='..tostring(list[k]) - end - return concat(s,"+") - end -end -function constructors.hashinstance(specification,force) - local hash,size,fallbacks=specification.hash,specification.size,specification.fallbacks - if force or not hash then - hash=constructors.hashfeatures(specification) - specification.hash=hash - end - if size<1000 and designsizes[hash] then - size=math.round(constructors.scaled(size,designsizes[hash])) - specification.size=size - end - if fallbacks then - return hash..' @ '..tostring(size)..' @ '..fallbacks - else - return hash..' @ '..tostring(size) - end -end -function constructors.setname(tfmdata,specification) - if constructors.namemode=="specification" then - local specname=specification.specification - if specname then - tfmdata.properties.name=specname - if trace_defining then - report_otf("overloaded fontname %a",specname) - end - end - end -end -function constructors.checkedfilename(data) - local foundfilename=data.foundfilename - if not foundfilename then - local askedfilename=data.filename or "" - if askedfilename~="" then - askedfilename=resolvers.resolve(askedfilename) - foundfilename=resolvers.findbinfile(askedfilename,"") or "" - if foundfilename=="" then - report_defining("source file %a is not found",askedfilename) - foundfilename=resolvers.findbinfile(file.basename(askedfilename),"") or "" - if foundfilename~="" then - report_defining("using source file %a due to cache mismatch",foundfilename) - end - end - end - data.foundfilename=foundfilename - end - return foundfilename -end -local formats=allocate() -fonts.formats=formats -setmetatableindex(formats,function(t,k) - local l=lower(k) - if rawget(t,k) then - t[k]=l - return l - end - return rawget(t,file.suffix(l)) -end) -local locations={} -local function setindeed(mode,target,group,name,action,position) - local t=target[mode] - if not t then - report_defining("fatal error in setting feature %a, group %a, mode %a",name,group,mode) - os.exit() - elseif position then - insert(t,position,{ name=name,action=action }) - else - for i=1,#t do - local ti=t[i] - if ti.name==name then - ti.action=action - return - end - end - insert(t,{ name=name,action=action }) - end -end -local function set(group,name,target,source) - target=target[group] - if not target then - report_defining("fatal target error in setting feature %a, group %a",name,group) - os.exit() - end - local source=source[group] - if not source then - report_defining("fatal source error in setting feature %a, group %a",name,group) - os.exit() - end - local node=source.node - local base=source.base - local position=source.position - if node then - setindeed("node",target,group,name,node,position) - end - if base then - setindeed("base",target,group,name,base,position) - end -end -local function register(where,specification) - local name=specification.name - if name and name~="" then - local default=specification.default - local description=specification.description - local initializers=specification.initializers - local processors=specification.processors - local manipulators=specification.manipulators - local modechecker=specification.modechecker - if default then - where.defaults[name]=default - end - if description and description~="" then - where.descriptions[name]=description - end - if initializers then - set('initializers',name,where,specification) - end - if processors then - set('processors',name,where,specification) - end - if manipulators then - set('manipulators',name,where,specification) - end - if modechecker then - where.modechecker=modechecker - end - end -end -constructors.registerfeature=register -function constructors.getfeatureaction(what,where,mode,name) - what=handlers[what].features - if what then - where=what[where] - if where then - mode=where[mode] - if mode then - for i=1,#mode do - local m=mode[i] - if m.name==name then - return m.action - end - end - end - end - end -end -function constructors.newhandler(what) - local handler=handlers[what] - if not handler then - handler={} - handlers[what]=handler - end - return handler -end -function constructors.newfeatures(what) - local handler=handlers[what] - local features=handler.features - if not features then - local tables=handler.tables - local statistics=handler.statistics - features=allocate { - defaults={}, - descriptions=tables and tables.features or {}, - used=statistics and statistics.usedfeatures or {}, - initializers={ base={},node={} }, - processors={ base={},node={} }, - manipulators={ base={},node={} }, - } - features.register=function(specification) return register(features,specification) end - handler.features=features - end - return features -end -function constructors.checkedfeatures(what,features) - local defaults=handlers[what].features.defaults - if features and next(features) then - features=fastcopy(features) - for key,value in next,defaults do - if features[key]==nil then - features[key]=value - end - end - return features - else - return fastcopy(defaults) - end -end -function constructors.initializefeatures(what,tfmdata,features,trace,report) - if features and next(features) then - local properties=tfmdata.properties or {} - local whathandler=handlers[what] - local whatfeatures=whathandler.features - local whatinitializers=whatfeatures.initializers - local whatmodechecker=whatfeatures.modechecker - local mode=properties.mode or (whatmodechecker and whatmodechecker(tfmdata,features,features.mode)) or features.mode or "base" - properties.mode=mode - features.mode=mode - local done={} - while true do - local redo=false - local initializers=whatfeatures.initializers[mode] - if initializers then - for i=1,#initializers do - local step=initializers[i] - local feature=step.name - local value=features[feature] - if not value then - elseif done[feature] then - else - local action=step.action - if trace then - report("initializing feature %a to %a for mode %a for font %a",feature, - value,mode,tfmdata.properties.fullname) - end - action(tfmdata,value,features) - if mode~=properties.mode or mode~=features.mode then - if whatmodechecker then - properties.mode=whatmodechecker(tfmdata,features,properties.mode) - features.mode=properties.mode - end - if mode~=properties.mode then - mode=properties.mode - redo=true - end - end - done[feature]=true - end - if redo then - break - end - end - if not redo then - break - end - else - break - end - end - properties.mode=mode - return true - else - return false - end -end -function constructors.collectprocessors(what,tfmdata,features,trace,report) - local processes,nofprocesses={},0 - if features and next(features) then - local properties=tfmdata.properties - local whathandler=handlers[what] - local whatfeatures=whathandler.features - local whatprocessors=whatfeatures.processors - local mode=properties.mode - local processors=whatprocessors[mode] - if processors then - for i=1,#processors do - local step=processors[i] - local feature=step.name - if features[feature] then - local action=step.action - if trace then - report("installing feature processor %a for mode %a for font %a",feature,mode,tfmdata.properties.fullname) - end - if action then - nofprocesses=nofprocesses+1 - processes[nofprocesses]=action - end - end - end - elseif trace then - report("no feature processors for mode %a for font %a",mode,properties.fullname) - end - end - return processes -end -function constructors.applymanipulators(what,tfmdata,features,trace,report) - if features and next(features) then - local properties=tfmdata.properties - local whathandler=handlers[what] - local whatfeatures=whathandler.features - local whatmanipulators=whatfeatures.manipulators - local mode=properties.mode - local manipulators=whatmanipulators[mode] - if manipulators then - for i=1,#manipulators do - local step=manipulators[i] - local feature=step.name - local value=features[feature] - if value then - local action=step.action - if trace then - report("applying feature manipulator %a for mode %a for font %a",feature,mode,properties.fullname) - end - if action then - action(tfmdata,feature,value) - end - end - end - end - end -end -function constructors.addcoreunicodes(unicodes) - if not unicodes then - unicodes={} - end - unicodes.space=0x0020 - unicodes.hyphen=0x002D - unicodes.zwj=0x200D - unicodes.zwnj=0x200C - return unicodes -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['luatex-font-enc']={ - version=1.001, - comment="companion to luatex-*.tex", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end -local fonts=fonts -fonts.encodings={} -fonts.encodings.agl={} -fonts.encodings.known={} -setmetatable(fonts.encodings.agl,{ __index=function(t,k) - if k=="unicodes" then - texio.write(" ") - local unicodes=dofile(resolvers.findfile("font-age.lua")) - fonts.encodings.agl={ unicodes=unicodes } - return unicodes - else - return nil - end -end }) - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-cid']={ - version=1.001, - comment="companion to font-otf.lua (cidmaps)", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local format,match,lower=string.format,string.match,string.lower -local tonumber=tonumber -local P,S,R,C,V,lpegmatch=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.V,lpeg.match -local fonts,logs,trackers=fonts,logs,trackers -local trace_loading=false trackers.register("otf.loading",function(v) trace_loading=v end) -local report_otf=logs.reporter("fonts","otf loading") -local cid={} -fonts.cid=cid -local cidmap={} -local cidmax=10 -local number=C(R("09","af","AF")^1) -local space=S(" \n\r\t") -local spaces=space^0 -local period=P(".") -local periods=period*period -local name=P("/")*C((1-space)^1) -local unicodes,names={},{} -local function do_one(a,b) - unicodes[tonumber(a)]=tonumber(b,16) -end -local function do_range(a,b,c) - c=tonumber(c,16) - for i=tonumber(a),tonumber(b) do - unicodes[i]=c - c=c+1 - end -end -local function do_name(a,b) - names[tonumber(a)]=b -end -local grammar=P { "start", - start=number*spaces*number*V("series"), - series=(spaces*(V("one")+V("range")+V("named")))^1, - one=(number*spaces*number)/do_one, - range=(number*periods*number*spaces*number)/do_range, - named=(number*spaces*name)/do_name -} -local function loadcidfile(filename) - local data=io.loaddata(filename) - if data then - unicodes,names={},{} - lpegmatch(grammar,data) - local supplement,registry,ordering=match(filename,"^(.-)%-(.-)%-()%.(.-)$") - return { - supplement=supplement, - registry=registry, - ordering=ordering, - filename=filename, - unicodes=unicodes, - names=names, - } - end -end -cid.loadfile=loadcidfile -local template="%s-%s-%s.cidmap" -local function locate(registry,ordering,supplement) - local filename=format(template,registry,ordering,supplement) - local hashname=lower(filename) - local found=cidmap[hashname] - if not found then - if trace_loading then - report_otf("checking cidmap, registry %a, ordering %a, supplement %a, filename %a",registry,ordering,supplement,filename) - end - local fullname=resolvers.findfile(filename,'cid') or "" - if fullname~="" then - found=loadcidfile(fullname) - if found then - if trace_loading then - report_otf("using cidmap file %a",filename) - end - cidmap[hashname]=found - found.usedname=file.basename(filename) - end - end - end - return found -end -function cid.getmap(specification) - if not specification then - report_otf("invalid cidinfo specification, table expected") - return - end - local registry=specification.registry - local ordering=specification.ordering - local supplement=specification.supplement - local filename=format(registry,ordering,supplement) - local lowername=lower(filename) - local found=cidmap[lowername] - if found then - return found - end - if ordering=="Identity" then - local found={ - supplement=supplement, - registry=registry, - ordering=ordering, - filename=filename, - unicodes={}, - names={}, - } - cidmap[lowername]=found - return found - end - if trace_loading then - report_otf("cidmap needed, registry %a, ordering %a, supplement %a",registry,ordering,supplement) - end - found=locate(registry,ordering,supplement) - if not found then - local supnum=tonumber(supplement) - local cidnum=nil - if supnum0 then - for s=supnum-1,0,-1 do - local c=locate(registry,ordering,s) - if c then - found,cidnum=c,s - break - end - end - end - registry=lower(registry) - ordering=lower(ordering) - if found and cidnum>0 then - for s=0,cidnum-1 do - local filename=format(template,registry,ordering,s) - if not cidmap[filename] then - cidmap[filename]=found - end - end - end - end - return found -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-map']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local tonumber,next,type=tonumber,next,type -local match,format,find,concat,gsub,lower=string.match,string.format,string.find,table.concat,string.gsub,string.lower -local P,R,S,C,Ct,Cc,lpegmatch=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cc,lpeg.match -local utfbyte=utf.byte -local floor=math.floor -local formatters=string.formatters -local trace_loading=false trackers.register("fonts.loading",function(v) trace_loading=v end) -local trace_mapping=false trackers.register("fonts.mapping",function(v) trace_unimapping=v end) -local report_fonts=logs.reporter("fonts","loading") -local fonts=fonts or {} -local mappings=fonts.mappings or {} -fonts.mappings=mappings -local function loadlumtable(filename) - local lumname=file.replacesuffix(file.basename(filename),"lum") - local lumfile=resolvers.findfile(lumname,"map") or "" - if lumfile~="" and lfs.isfile(lumfile) then - if trace_loading or trace_mapping then - report_fonts("loading map table %a",lumfile) - end - lumunic=dofile(lumfile) - return lumunic,lumfile - end -end -local hex=R("AF","09") -local hexfour=(hex*hex*hex*hex)/function(s) return tonumber(s,16) end -local hexsix=(hex*hex*hex*hex*hex*hex)/function(s) return tonumber(s,16) end -local dec=(R("09")^1)/tonumber -local period=P(".") -local unicode=P("uni")*(hexfour*(period+P(-1))*Cc(false)+Ct(hexfour^1)*Cc(true)) -local ucode=P("u")*(hexsix*(period+P(-1))*Cc(false)+Ct(hexsix^1)*Cc(true)) -local index=P("index")*dec*Cc(false) -local parser=unicode+ucode+index -local parsers={} -local function makenameparser(str) - if not str or str=="" then - return parser - else - local p=parsers[str] - if not p then - p=P(str)*period*dec*Cc(false) - parsers[str]=p - end - return p - end -end -local f_single=formatters["%04X"] -local f_double=formatters["%04X%04X"] -local function tounicode16(unicode,name) - if unicode<0x10000 then - return f_single(unicode) - elseif unicode<0x1FFFFFFFFF then - return f_double(floor(unicode/1024),unicode%1024+0xDC00) - else - report_fonts("can't convert %a in %a into tounicode",unicode,name) - end -end -local function tounicode16sequence(unicodes,name) - local t={} - for l=1,#unicodes do - local u=unicodes[l] - if u<0x10000 then - t[l]=f_single(u) - elseif unicode<0x1FFFFFFFFF then - t[l]=f_double(floor(u/1024),u%1024+0xDC00) - else - report_fonts ("can't convert %a in %a into tounicode",u,name) - return - end - end - return concat(t) -end -local function tounicode(unicode,name) - if type(unicode)=="table" then - local t={} - for l=1,#unicode do - local u=unicode[l] - if u<0x10000 then - t[l]=f_single(u) - elseif u<0x1FFFFFFFFF then - t[l]=f_double(floor(u/1024),u%1024+0xDC00) - else - report_fonts ("can't convert %a in %a into tounicode",u,name) - return - end - end - return concat(t) - else - if unicode<0x10000 then - return f_single(unicode) - elseif unicode<0x1FFFFFFFFF then - return f_double(floor(unicode/1024),unicode%1024+0xDC00) - else - report_fonts("can't convert %a in %a into tounicode",unicode,name) - end - end -end -local function fromunicode16(str) - if #str==4 then - return tonumber(str,16) - else - local l,r=match(str,"(....)(....)") - return (tonumber(l,16))*0x400+tonumber(r,16)-0xDC00 - end -end -mappings.loadlumtable=loadlumtable -mappings.makenameparser=makenameparser -mappings.tounicode=tounicode -mappings.tounicode16=tounicode16 -mappings.tounicode16sequence=tounicode16sequence -mappings.fromunicode16=fromunicode16 -local ligseparator=P("_") -local varseparator=P(".") -local namesplitter=Ct(C((1-ligseparator-varseparator)^1)*(ligseparator*C((1-ligseparator-varseparator)^1))^0) -local overloads={ - IJ={ name="I_J",unicode={ 0x49,0x4A },mess=0x0132 }, - ij={ name="i_j",unicode={ 0x69,0x6A },mess=0x0133 }, - ff={ name="f_f",unicode={ 0x66,0x66 },mess=0xFB00 }, - fi={ name="f_i",unicode={ 0x66,0x69 },mess=0xFB01 }, - fl={ name="f_l",unicode={ 0x66,0x6C },mess=0xFB02 }, - ffi={ name="f_f_i",unicode={ 0x66,0x66,0x69 },mess=0xFB03 }, - ffl={ name="f_f_l",unicode={ 0x66,0x66,0x6C },mess=0xFB04 }, - fj={ name="f_j",unicode={ 0x66,0x6A } }, - fk={ name="f_k",unicode={ 0x66,0x6B } }, -} -for k,v in next,overloads do - local name=v.name - local mess=v.mess - if name then - overloads[name]=v - end - if mess then - overloads[mess]=v - end -end -mappings.overloads=overloads -function mappings.addtounicode(data,filename) - local resources=data.resources - local properties=data.properties - local descriptions=data.descriptions - local unicodes=resources.unicodes - local lookuptypes=resources.lookuptypes - if not unicodes then - return - end - unicodes['space']=unicodes['space'] or 32 - unicodes['hyphen']=unicodes['hyphen'] or 45 - unicodes['zwj']=unicodes['zwj'] or 0x200D - unicodes['zwnj']=unicodes['zwnj'] or 0x200C - local private=fonts.constructors.privateoffset - local unicodevector=fonts.encodings.agl.unicodes - local missing={} - local lumunic,uparser,oparser - local cidinfo,cidnames,cidcodes,usedmap - cidinfo=properties.cidinfo - usedmap=cidinfo and fonts.cid.getmap(cidinfo) - if usedmap then - oparser=usedmap and makenameparser(cidinfo.ordering) - cidnames=usedmap.names - cidcodes=usedmap.unicodes - end - uparser=makenameparser() - local ns,nl=0,0 - for unic,glyph in next,descriptions do - local index=glyph.index - local name=glyph.name - local r=overloads[name] - if r then - glyph.unicode=r.unicode - elseif unic==-1 or unic>=private or (unic>=0xE000 and unic<=0xF8FF) or unic==0xFFFE or unic==0xFFFF then - local unicode=lumunic and lumunic[name] or unicodevector[name] - if unicode then - glyph.unicode=unicode - ns=ns+1 - end - if (not unicode) and usedmap then - local foundindex=lpegmatch(oparser,name) - if foundindex then - unicode=cidcodes[foundindex] - if unicode then - glyph.unicode=unicode - ns=ns+1 - else - local reference=cidnames[foundindex] - if reference then - local foundindex=lpegmatch(oparser,reference) - if foundindex then - unicode=cidcodes[foundindex] - if unicode then - glyph.unicode=unicode - ns=ns+1 - end - end - if not unicode or unicode=="" then - local foundcodes,multiple=lpegmatch(uparser,reference) - if foundcodes then - glyph.unicode=foundcodes - if multiple then - nl=nl+1 - unicode=true - else - ns=ns+1 - unicode=foundcodes - end - end - end - end - end - end - end - if not unicode or unicode=="" then - local split=lpegmatch(namesplitter,name) - local nsplit=split and #split or 0 - local t,n={},0 - unicode=true - for l=1,nsplit do - local base=split[l] - local u=unicodes[base] or unicodevector[base] - if not u then - break - elseif type(u)=="table" then - if u[1]>=private then - unicode=false - break - end - n=n+1 - t[n]=u[1] - else - if u>=private then - unicode=false - break - end - n=n+1 - t[n]=u - end - end - if n==0 then - elseif n==1 then - glyph.unicode=t[1] - else - glyph.unicode=t - end - nl=nl+1 - end - if not unicode or unicode=="" then - local foundcodes,multiple=lpegmatch(uparser,name) - if foundcodes then - glyph.unicode=foundcodes - if multiple then - nl=nl+1 - unicode=true - else - ns=ns+1 - unicode=foundcodes - end - end - end - local r=overloads[unicode] - if r then - unicode=r.unicode - glyph.unicode=unicode - end - if not unicode then - missing[name]=true - end - end - end - if next(missing) then - local guess={} - local function check(gname,code,unicode) - local description=descriptions[code] - local variant=description.name - if variant==gname then - return - end - local unic=unicodes[variant] - if unic==-1 or unic>=private or (unic>=0xE000 and unic<=0xF8FF) or unic==0xFFFE or unic==0xFFFF then - else - return - end - if descriptions[code].unicode then - return - end - local g=guess[variant] - if g then - g[gname]=unicode - else - guess[variant]={ [gname]=unicode } - end - end - for unicode,description in next,descriptions do - local slookups=description.slookups - if slookups then - local gname=description.name - for tag,data in next,slookups do - local lookuptype=lookuptypes[tag] - if lookuptype=="alternate" then - for i=1,#data do - check(gname,data[i],unicode) - end - elseif lookuptype=="substitution" then - check(gname,data,unicode) - end - end - end - local mlookups=description.mlookups - if mlookups then - local gname=description.name - for tag,list in next,mlookups do - local lookuptype=lookuptypes[tag] - if lookuptype=="alternate" then - for i=1,#list do - local data=list[i] - for i=1,#data do - check(gname,data[i],unicode) - end - end - elseif lookuptype=="substitution" then - for i=1,#list do - check(gname,list[i],unicode) - end - end - end - end - end - local done=true - while done do - done=false - for k,v in next,guess do - if type(v)~="number" then - for kk,vv in next,v do - if vv==-1 or vv>=private or (vv>=0xE000 and vv<=0xF8FF) or vv==0xFFFE or vv==0xFFFF then - local uu=guess[kk] - if type(uu)=="number" then - guess[k]=uu - done=true - end - else - guess[k]=vv - done=true - end - end - end - end - end - local orphans=0 - local guessed=0 - for k,v in next,guess do - if type(v)=="number" then - descriptions[unicodes[k]].unicode=descriptions[v].unicode or v - guessed=guessed+1 - else - local t=nil - local l=lower(k) - local u=unicodes[l] - if not u then - orphans=orphans+1 - elseif u==-1 or u>=private or (u>=0xE000 and u<=0xF8FF) or u==0xFFFE or u==0xFFFF then - local unicode=descriptions[u].unicode - if unicode then - descriptions[unicodes[k]].unicode=unicode - guessed=guessed+1 - else - orphans=orphans+1 - end - else - orphans=orphans+1 - end - end - end - if trace_loading and orphans>0 or guessed>0 then - report_fonts("%s glyphs with no related unicode, %s guessed, %s orphans",guessed+orphans,guessed,orphans) - end - end - if trace_mapping then - for unic,glyph in table.sortedhash(descriptions) do - local name=glyph.name - local index=glyph.index - local unicode=glyph.unicode - if unicode then - if type(unicode)=="table" then - local unicodes={} - for i=1,#unicode do - unicodes[i]=formatters("%U",unicode[i]) - end - report_fonts("internal slot %U, name %a, unicode %U, tounicode % t",index,name,unic,unicodes) - else - report_fonts("internal slot %U, name %a, unicode %U, tounicode %U",index,name,unic,unicode) - end - else - report_fonts("internal slot %U, name %a, unicode %U",index,name,unic) - end - end - end - if trace_loading and (ns>0 or nl>0) then - report_fonts("%s tounicode entries added, ligatures %s",nl+ns,ns) - end -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['luatex-fonts-syn']={ - version=1.001, - comment="companion to luatex-*.tex", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end -local fonts=fonts -fonts.names=fonts.names or {} -fonts.names.version=1.001 -fonts.names.basename="luatex-fonts-names" -fonts.names.new_to_old={} -fonts.names.old_to_new={} -fonts.names.cache=containers.define("fonts","data",fonts.names.version,true) -local data,loaded=nil,false -local fileformats={ "lua","tex","other text files" } -function fonts.names.reportmissingbase() - texio.write("") - fonts.names.reportmissingbase=nil -end -function fonts.names.reportmissingname() - texio.write("") - fonts.names.reportmissingname=nil -end -function fonts.names.resolve(name,sub) - if not loaded then - local basename=fonts.names.basename - if basename and basename~="" then - data=containers.read(fonts.names.cache,basename) - if not data then - basename=file.addsuffix(basename,"lua") - for i=1,#fileformats do - local format=fileformats[i] - local foundname=resolvers.findfile(basename,format) or "" - if foundname~="" then - data=dofile(foundname) - texio.write("") - break - end - end - end - end - loaded=true - end - if type(data)=="table" and data.version==fonts.names.version then - local condensed=string.gsub(string.lower(name),"[^%a%d]","") - local found=data.mappings and data.mappings[condensed] - if found then - local fontname,filename,subfont=found[1],found[2],found[3] - if subfont then - return filename,fontname - else - return filename,false - end - elseif fonts.names.reportmissingname then - fonts.names.reportmissingname() - return name,false - end - elseif fonts.names.reportmissingbase then - fonts.names.reportmissingbase() - end -end -fonts.names.resolvespec=fonts.names.resolve -function fonts.names.getfilename(askedname,suffix) - return "" -end -function fonts.names.ignoredfile(filename) - return false -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-tfm']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local next=next -local match=string.match -local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end) -local trace_features=false trackers.register("tfm.features",function(v) trace_features=v end) -local report_defining=logs.reporter("fonts","defining") -local report_tfm=logs.reporter("fonts","tfm loading") -local findbinfile=resolvers.findbinfile -local fonts=fonts -local handlers=fonts.handlers -local readers=fonts.readers -local constructors=fonts.constructors -local encodings=fonts.encodings -local tfm=constructors.newhandler("tfm") -local tfmfeatures=constructors.newfeatures("tfm") -local registertfmfeature=tfmfeatures.register -constructors.resolvevirtualtoo=false -fonts.formats.tfm="type1" -function tfm.setfeatures(tfmdata,features) - local okay=constructors.initializefeatures("tfm",tfmdata,features,trace_features,report_tfm) - if okay then - return constructors.collectprocessors("tfm",tfmdata,features,trace_features,report_tfm) - else - return {} - end -end -local function read_from_tfm(specification) - local filename=specification.filename - local size=specification.size - if trace_defining then - report_defining("loading tfm file %a at size %s",filename,size) - end - local tfmdata=font.read_tfm(filename,size) - if tfmdata then - local features=specification.features and specification.features.normal or {} - local resources=tfmdata.resources or {} - local properties=tfmdata.properties or {} - local parameters=tfmdata.parameters or {} - local shared=tfmdata.shared or {} - properties.name=tfmdata.name - properties.fontname=tfmdata.fontname - properties.psname=tfmdata.psname - properties.filename=specification.filename - properties.format=fonts.formats.tfm - parameters.size=size - shared.rawdata={} - shared.features=features - shared.processes=next(features) and tfm.setfeatures(tfmdata,features) or nil - tfmdata.properties=properties - tfmdata.resources=resources - tfmdata.parameters=parameters - tfmdata.shared=shared - parameters.slant=parameters.slant or parameters[1] or 0 - parameters.space=parameters.space or parameters[2] or 0 - parameters.space_stretch=parameters.space_stretch or parameters[3] or 0 - parameters.space_shrink=parameters.space_shrink or parameters[4] or 0 - parameters.x_height=parameters.x_height or parameters[5] or 0 - parameters.quad=parameters.quad or parameters[6] or 0 - parameters.extra_space=parameters.extra_space or parameters[7] or 0 - constructors.enhanceparameters(parameters) - if constructors.resolvevirtualtoo then - fonts.loggers.register(tfmdata,file.suffix(filename),specification) - local vfname=findbinfile(specification.name,'ovf') - if vfname and vfname~="" then - local vfdata=font.read_vf(vfname,size) - if vfdata then - local chars=tfmdata.characters - for k,v in next,vfdata.characters do - chars[k].commands=v.commands - end - properties.virtualized=true - tfmdata.fonts=vfdata.fonts - end - end - end - local allfeatures=tfmdata.shared.features or specification.features.normal - constructors.applymanipulators("tfm",tfmdata,allfeatures.normal,trace_features,report_tfm) - if not features.encoding then - local encoding,filename=match(properties.filename,"^(.-)%-(.*)$") - if filename and encoding and encodings.known and encodings.known[encoding] then - features.encoding=encoding - end - end - properties.haskerns=true - properties.haslogatures=true - resources.unicodes={} - resources.lookuptags={} - return tfmdata - end -end -local function check_tfm(specification,fullname) - local foundname=findbinfile(fullname,'tfm') or "" - if foundname=="" then - foundname=findbinfile(fullname,'ofm') or "" - end - if foundname=="" then - foundname=fonts.names.getfilename(fullname,"tfm") or "" - end - if foundname~="" then - specification.filename=foundname - specification.format="ofm" - return read_from_tfm(specification) - elseif trace_defining then - report_defining("loading tfm with name %a fails",specification.name) - end -end -readers.check_tfm=check_tfm -function readers.tfm(specification) - local fullname=specification.filename or "" - if fullname=="" then - local forced=specification.forced or "" - if forced~="" then - fullname=specification.name.."."..forced - else - fullname=specification.name - end - end - return check_tfm(specification,fullname) -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-afm']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local fonts,logs,trackers,containers,resolvers=fonts,logs,trackers,containers,resolvers -local next,type,tonumber=next,type,tonumber -local format,match,gmatch,lower,gsub,strip=string.format,string.match,string.gmatch,string.lower,string.gsub,string.strip -local abs=math.abs -local P,S,C,R,lpegmatch,patterns=lpeg.P,lpeg.S,lpeg.C,lpeg.R,lpeg.match,lpeg.patterns -local derivetable=table.derive -local trace_features=false trackers.register("afm.features",function(v) trace_features=v end) -local trace_indexing=false trackers.register("afm.indexing",function(v) trace_indexing=v end) -local trace_loading=false trackers.register("afm.loading",function(v) trace_loading=v end) -local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end) -local report_afm=logs.reporter("fonts","afm loading") -local setmetatableindex=table.setmetatableindex -local findbinfile=resolvers.findbinfile -local definers=fonts.definers -local readers=fonts.readers -local constructors=fonts.constructors -local afm=constructors.newhandler("afm") -local pfb=constructors.newhandler("pfb") -local afmfeatures=constructors.newfeatures("afm") -local registerafmfeature=afmfeatures.register -afm.version=1.500 -afm.cache=containers.define("fonts","afm",afm.version,true) -afm.autoprefixed=true -afm.helpdata={} -afm.syncspace=true -afm.addligatures=true -afm.addtexligatures=true -afm.addkerns=true -local overloads=fonts.mappings.overloads -local applyruntimefixes=fonts.treatments and fonts.treatments.applyfixes -local function setmode(tfmdata,value) - if value then - tfmdata.properties.mode=lower(value) - end -end -registerafmfeature { - name="mode", - description="mode", - initializers={ - base=setmode, - node=setmode, - } -} -local comment=P("Comment") -local spacing=patterns.spacer -local lineend=patterns.newline -local words=C((1-lineend)^1) -local number=C((R("09")+S("."))^1)/tonumber*spacing^0 -local data=lpeg.Carg(1) -local pattern=( - comment*spacing*( - data*( - ("CODINGSCHEME"*spacing*words )/function(fd,a) end+("DESIGNSIZE"*spacing*number*words )/function(fd,a) fd[ 1]=a end+("CHECKSUM"*spacing*number*words )/function(fd,a) fd[ 2]=a end+("SPACE"*spacing*number*"plus"*number*"minus"*number)/function(fd,a,b,c) fd[ 3],fd[ 4],fd[ 5]=a,b,c end+("QUAD"*spacing*number )/function(fd,a) fd[ 6]=a end+("EXTRASPACE"*spacing*number )/function(fd,a) fd[ 7]=a end+("NUM"*spacing*number*number*number )/function(fd,a,b,c) fd[ 8],fd[ 9],fd[10]=a,b,c end+("DENOM"*spacing*number*number )/function(fd,a,b ) fd[11],fd[12]=a,b end+("SUP"*spacing*number*number*number )/function(fd,a,b,c) fd[13],fd[14],fd[15]=a,b,c end+("SUB"*spacing*number*number )/function(fd,a,b) fd[16],fd[17]=a,b end+("SUPDROP"*spacing*number )/function(fd,a) fd[18]=a end+("SUBDROP"*spacing*number )/function(fd,a) fd[19]=a end+("DELIM"*spacing*number*number )/function(fd,a,b) fd[20],fd[21]=a,b end+("AXISHEIGHT"*spacing*number )/function(fd,a) fd[22]=a end - )+(1-lineend)^0 - )+(1-comment)^1 -)^0 -local function scan_comment(str) - local fd={} - lpegmatch(pattern,str,1,fd) - return fd -end -local keys={} -function keys.FontName (data,line) data.metadata.fontname=strip (line) - data.metadata.fullname=strip (line) end -function keys.ItalicAngle (data,line) data.metadata.italicangle=tonumber (line) end -function keys.IsFixedPitch(data,line) data.metadata.isfixedpitch=toboolean(line,true) end -function keys.CharWidth (data,line) data.metadata.charwidth=tonumber (line) end -function keys.XHeight (data,line) data.metadata.xheight=tonumber (line) end -function keys.Descender (data,line) data.metadata.descender=tonumber (line) end -function keys.Ascender (data,line) data.metadata.ascender=tonumber (line) end -function keys.Comment (data,line) - line=lower(line) - local designsize=match(line,"designsize[^%d]*(%d+)") - if designsize then data.metadata.designsize=tonumber(designsize) end -end -local function get_charmetrics(data,charmetrics,vector) - local characters=data.characters - local chr,ind={},0 - for k,v in gmatch(charmetrics,"([%a]+) +(.-) *;") do - if k=='C' then - v=tonumber(v) - if v<0 then - ind=ind+1 - else - ind=v - end - chr={ - index=ind - } - elseif k=='WX' then - chr.width=tonumber(v) - elseif k=='N' then - characters[v]=chr - elseif k=='B' then - local llx,lly,urx,ury=match(v,"^ *(.-) +(.-) +(.-) +(.-)$") - chr.boundingbox={ tonumber(llx),tonumber(lly),tonumber(urx),tonumber(ury) } - elseif k=='L' then - local plus,becomes=match(v,"^(.-) +(.-)$") - local ligatures=chr.ligatures - if ligatures then - ligatures[plus]=becomes - else - chr.ligatures={ [plus]=becomes } - end - end - end -end -local function get_kernpairs(data,kernpairs) - local characters=data.characters - for one,two,value in gmatch(kernpairs,"KPX +(.-) +(.-) +(.-)\n") do - local chr=characters[one] - if chr then - local kerns=chr.kerns - if kerns then - kerns[two]=tonumber(value) - else - chr.kerns={ [two]=tonumber(value) } - end - end - end -end -local function get_variables(data,fontmetrics) - for key,rest in gmatch(fontmetrics,"(%a+) *(.-)[\n\r]") do - local keyhandler=keys[key] - if keyhandler then - keyhandler(data,rest) - end - end -end -local function get_indexes(data,pfbname) - data.resources.filename=resolvers.unresolve(pfbname) - local pfbblob=fontloader.open(pfbname) - if pfbblob then - local characters=data.characters - local pfbdata=fontloader.to_table(pfbblob) - if pfbdata then - local glyphs=pfbdata.glyphs - if glyphs then - if trace_loading then - report_afm("getting index data from %a",pfbname) - end - for index,glyph in next,glyphs do - local name=glyph.name - if name then - local char=characters[name] - if char then - if trace_indexing then - report_afm("glyph %a has index %a",name,index) - end - char.index=index - end - end - end - elseif trace_loading then - report_afm("no glyph data in pfb file %a",pfbname) - end - elseif trace_loading then - report_afm("no data in pfb file %a",pfbname) - end - fontloader.close(pfbblob) - elseif trace_loading then - report_afm("invalid pfb file %a",pfbname) - end -end -local function readafm(filename) - local ok,afmblob,size=resolvers.loadbinfile(filename) - if ok and afmblob then - local data={ - resources={ - filename=resolvers.unresolve(filename), - version=afm.version, - creator="context mkiv", - }, - properties={ - hasitalics=false, - }, - goodies={}, - metadata={ - filename=file.removesuffix(file.basename(filename)) - }, - characters={ - }, - descriptions={ - }, - } - afmblob=gsub(afmblob,"StartCharMetrics(.-)EndCharMetrics",function(charmetrics) - if trace_loading then - report_afm("loading char metrics") - end - get_charmetrics(data,charmetrics,vector) - return "" - end) - afmblob=gsub(afmblob,"StartKernPairs(.-)EndKernPairs",function(kernpairs) - if trace_loading then - report_afm("loading kern pairs") - end - get_kernpairs(data,kernpairs) - return "" - end) - afmblob=gsub(afmblob,"StartFontMetrics%s+([%d%.]+)(.-)EndFontMetrics",function(version,fontmetrics) - if trace_loading then - report_afm("loading variables") - end - data.afmversion=version - get_variables(data,fontmetrics) - data.fontdimens=scan_comment(fontmetrics) - return "" - end) - return data - else - if trace_loading then - report_afm("no valid afm file %a",filename) - end - return nil - end -end -local addkerns,addligatures,addtexligatures,unify,normalize,fixnames -function afm.load(filename) - filename=resolvers.findfile(filename,'afm') or "" - if filename~="" and not fonts.names.ignoredfile(filename) then - local name=file.removesuffix(file.basename(filename)) - local data=containers.read(afm.cache,name) - local attr=lfs.attributes(filename) - local size,time=attr.size or 0,attr.modification or 0 - local pfbfile=file.replacesuffix(name,"pfb") - local pfbname=resolvers.findfile(pfbfile,"pfb") or "" - if pfbname=="" then - pfbname=resolvers.findfile(file.basename(pfbfile),"pfb") or "" - end - local pfbsize,pfbtime=0,0 - if pfbname~="" then - local attr=lfs.attributes(pfbname) - pfbsize=attr.size or 0 - pfbtime=attr.modification or 0 - end - if not data or data.size~=size or data.time~=time or data.pfbsize~=pfbsize or data.pfbtime~=pfbtime then - report_afm("reading %a",filename) - data=readafm(filename) - if data then - if pfbname~="" then - get_indexes(data,pfbname) - elseif trace_loading then - report_afm("no pfb file for %a",filename) - end - report_afm("unifying %a",filename) - unify(data,filename) - if afm.addligatures then - report_afm("add ligatures") - addligatures(data) - end - if afm.addtexligatures then - report_afm("add tex ligatures") - addtexligatures(data) - end - if afm.addkerns then - report_afm("add extra kerns") - addkerns(data) - end - normalize(data) - fixnames(data) - report_afm("add tounicode data") - fonts.mappings.addtounicode(data,filename) - data.size=size - data.time=time - data.pfbsize=pfbsize - data.pfbtime=pfbtime - report_afm("saving %a in cache",name) - data.resources.unicodes=nil - data=containers.write(afm.cache,name,data) - data=containers.read(afm.cache,name) - end - if applyruntimefixes and data then - applyruntimefixes(filename,data) - end - end - return data - else - return nil - end -end -local uparser=fonts.mappings.makenameparser() -unify=function(data,filename) - local unicodevector=fonts.encodings.agl.unicodes - local unicodes,names={},{} - local private=constructors.privateoffset - local descriptions=data.descriptions - for name,blob in next,data.characters do - local code=unicodevector[name] - if not code then - code=lpegmatch(uparser,name) - if not code then - code=private - private=private+1 - report_afm("assigning private slot %U for unknown glyph name %a",code,name) - end - end - local index=blob.index - unicodes[name]=code - names[name]=index - blob.name=name - descriptions[code]={ - boundingbox=blob.boundingbox, - width=blob.width, - kerns=blob.kerns, - index=index, - name=name, - } - end - for unicode,description in next,descriptions do - local kerns=description.kerns - if kerns then - local krn={} - for name,kern in next,kerns do - local unicode=unicodes[name] - if unicode then - krn[unicode]=kern - else - end - end - description.kerns=krn - end - end - data.characters=nil - local resources=data.resources - local filename=resources.filename or file.removesuffix(file.basename(filename)) - resources.filename=resolvers.unresolve(filename) - resources.unicodes=unicodes - resources.marks={} - resources.private=private -end -normalize=function(data) -end -fixnames=function(data) - for k,v in next,data.descriptions do - local n=v.name - local r=overloads[n] - if r then - local name=r.name - if trace_indexing then - report_afm("renaming characters %a to %a",n,name) - end - v.name=name - v.unicode=r.unicode - end - end -end -local addthem=function(rawdata,ligatures) - if ligatures then - local descriptions=rawdata.descriptions - local resources=rawdata.resources - local unicodes=resources.unicodes - for ligname,ligdata in next,ligatures do - local one=descriptions[unicodes[ligname]] - if one then - for _,pair in next,ligdata do - local two,three=unicodes[pair[1]],unicodes[pair[2]] - if two and three then - local ol=one.ligatures - if ol then - if not ol[two] then - ol[two]=three - end - else - one.ligatures={ [two]=three } - end - end - end - end - end - end -end -addligatures=function(rawdata) addthem(rawdata,afm.helpdata.ligatures ) end -addtexligatures=function(rawdata) addthem(rawdata,afm.helpdata.texligatures) end -addkerns=function(rawdata) - local descriptions=rawdata.descriptions - local resources=rawdata.resources - local unicodes=resources.unicodes - local function do_it_left(what) - if what then - for unicode,description in next,descriptions do - local kerns=description.kerns - if kerns then - local extrakerns - for complex,simple in next,what do - complex=unicodes[complex] - simple=unicodes[simple] - if complex and simple then - local ks=kerns[simple] - if ks and not kerns[complex] then - if extrakerns then - extrakerns[complex]=ks - else - extrakerns={ [complex]=ks } - end - end - end - end - if extrakerns then - description.extrakerns=extrakerns - end - end - end - end - end - local function do_it_copy(what) - if what then - for complex,simple in next,what do - complex=unicodes[complex] - simple=unicodes[simple] - if complex and simple then - local complexdescription=descriptions[complex] - if complexdescription then - local simpledescription=descriptions[complex] - if simpledescription then - local extrakerns - local kerns=simpledescription.kerns - if kerns then - for unicode,kern in next,kerns do - if extrakerns then - extrakerns[unicode]=kern - else - extrakerns={ [unicode]=kern } - end - end - end - local extrakerns=simpledescription.extrakerns - if extrakerns then - for unicode,kern in next,extrakerns do - if extrakerns then - extrakerns[unicode]=kern - else - extrakerns={ [unicode]=kern } - end - end - end - if extrakerns then - complexdescription.extrakerns=extrakerns - end - end - end - end - end - end - end - do_it_left(afm.helpdata.leftkerned) - do_it_left(afm.helpdata.bothkerned) - do_it_copy(afm.helpdata.bothkerned) - do_it_copy(afm.helpdata.rightkerned) -end -local function adddimensions(data) - if data then - for unicode,description in next,data.descriptions do - local bb=description.boundingbox - if bb then - local ht,dp=bb[4],-bb[2] - if ht==0 or ht<0 then - else - description.height=ht - end - if dp==0 or dp<0 then - else - description.depth=dp - end - end - end - end -end -local function copytotfm(data) - if data and data.descriptions then - local metadata=data.metadata - local resources=data.resources - local properties=derivetable(data.properties) - local descriptions=derivetable(data.descriptions) - local goodies=derivetable(data.goodies) - local characters={} - local parameters={} - local unicodes=resources.unicodes - for unicode,description in next,data.descriptions do - characters[unicode]={} - end - local filename=constructors.checkedfilename(resources) - local fontname=metadata.fontname or metadata.fullname - local fullname=metadata.fullname or metadata.fontname - local endash=0x0020 - local emdash=0x2014 - local spacer="space" - local spaceunits=500 - local monospaced=metadata.isfixedpitch - local charwidth=metadata.charwidth - local italicangle=metadata.italicangle - local charxheight=metadata.xheight and metadata.xheight>0 and metadata.xheight - properties.monospaced=monospaced - parameters.italicangle=italicangle - parameters.charwidth=charwidth - parameters.charxheight=charxheight - if properties.monospaced then - if descriptions[endash] then - spaceunits,spacer=descriptions[endash].width,"space" - end - if not spaceunits and descriptions[emdash] then - spaceunits,spacer=descriptions[emdash].width,"emdash" - end - if not spaceunits and charwidth then - spaceunits,spacer=charwidth,"charwidth" - end - else - if descriptions[endash] then - spaceunits,spacer=descriptions[endash].width,"space" - end - if not spaceunits and charwidth then - spaceunits,spacer=charwidth,"charwidth" - end - end - spaceunits=tonumber(spaceunits) - if spaceunits<200 then - end - parameters.slant=0 - parameters.space=spaceunits - parameters.space_stretch=500 - parameters.space_shrink=333 - parameters.x_height=400 - parameters.quad=1000 - if italicangle and italicangle~=0 then - parameters.italicangle=italicangle - parameters.italicfactor=math.cos(math.rad(90+italicangle)) - parameters.slant=- math.tan(italicangle*math.pi/180) - end - if monospaced then - parameters.space_stretch=0 - parameters.space_shrink=0 - elseif afm.syncspace then - parameters.space_stretch=spaceunits/2 - parameters.space_shrink=spaceunits/3 - end - parameters.extra_space=parameters.space_shrink - if charxheight then - parameters.x_height=charxheight - else - local x=0x0078 - if x then - local x=descriptions[x] - if x then - parameters.x_height=x.height - end - end - end - local fd=data.fontdimens - if fd and fd[8] and fd[9] and fd[10] then - for k,v in next,fd do - parameters[k]=v - end - end - parameters.designsize=(metadata.designsize or 10)*65536 - parameters.ascender=abs(metadata.ascender or 0) - parameters.descender=abs(metadata.descender or 0) - parameters.units=1000 - properties.spacer=spacer - properties.encodingbytes=2 - properties.format=fonts.formats[filename] or "type1" - properties.filename=filename - properties.fontname=fontname - properties.fullname=fullname - properties.psname=fullname - properties.name=filename or fullname or fontname - if next(characters) then - return { - characters=characters, - descriptions=descriptions, - parameters=parameters, - resources=resources, - properties=properties, - goodies=goodies, - } - end - end - return nil -end -function afm.setfeatures(tfmdata,features) - local okay=constructors.initializefeatures("afm",tfmdata,features,trace_features,report_afm) - if okay then - return constructors.collectprocessors("afm",tfmdata,features,trace_features,report_afm) - else - return {} - end -end -local function addtables(data) - local resources=data.resources - local lookuptags=resources.lookuptags - local unicodes=resources.unicodes - if not lookuptags then - lookuptags={} - resources.lookuptags=lookuptags - end - setmetatableindex(lookuptags,function(t,k) - local v=type(k)=="number" and ("lookup "..k) or k - t[k]=v - return v - end) - if not unicodes then - unicodes={} - resources.unicodes=unicodes - setmetatableindex(unicodes,function(t,k) - setmetatableindex(unicodes,nil) - for u,d in next,data.descriptions do - local n=d.name - if n then - t[n]=u - end - end - return rawget(t,k) - end) - end - constructors.addcoreunicodes(unicodes) -end -local function afmtotfm(specification) - local afmname=specification.filename or specification.name - if specification.forced=="afm" or specification.format=="afm" then - if trace_loading then - report_afm("forcing afm format for %a",afmname) - end - else - local tfmname=findbinfile(afmname,"ofm") or "" - if tfmname~="" then - if trace_loading then - report_afm("fallback from afm to tfm for %a",afmname) - end - return - end - end - if afmname~="" then - local features=constructors.checkedfeatures("afm",specification.features.normal) - specification.features.normal=features - constructors.hashinstance(specification,true) - specification=definers.resolve(specification) - local cache_id=specification.hash - local tfmdata=containers.read(constructors.cache,cache_id) - if not tfmdata then - local rawdata=afm.load(afmname) - if rawdata and next(rawdata) then - addtables(rawdata) - adddimensions(rawdata) - tfmdata=copytotfm(rawdata) - if tfmdata and next(tfmdata) then - local shared=tfmdata.shared - if not shared then - shared={} - tfmdata.shared=shared - end - shared.rawdata=rawdata - shared.features=features - shared.processes=afm.setfeatures(tfmdata,features) - end - elseif trace_loading then - report_afm("no (valid) afm file found with name %a",afmname) - end - tfmdata=containers.write(constructors.cache,cache_id,tfmdata) - end - return tfmdata - end -end -local function read_from_afm(specification) - local tfmdata=afmtotfm(specification) - if tfmdata then - tfmdata.properties.name=specification.name - tfmdata=constructors.scale(tfmdata,specification) - local allfeatures=tfmdata.shared.features or specification.features.normal - constructors.applymanipulators("afm",tfmdata,allfeatures,trace_features,report_afm) - fonts.loggers.register(tfmdata,'afm',specification) - end - return tfmdata -end -local function prepareligatures(tfmdata,ligatures,value) - if value then - local descriptions=tfmdata.descriptions - local hasligatures=false - for unicode,character in next,tfmdata.characters do - local description=descriptions[unicode] - local dligatures=description.ligatures - if dligatures then - local cligatures=character.ligatures - if not cligatures then - cligatures={} - character.ligatures=cligatures - end - for unicode,ligature in next,dligatures do - cligatures[unicode]={ - char=ligature, - type=0 - } - end - hasligatures=true - end - end - tfmdata.properties.hasligatures=hasligatures - end -end -local function preparekerns(tfmdata,kerns,value) - if value then - local rawdata=tfmdata.shared.rawdata - local resources=rawdata.resources - local unicodes=resources.unicodes - local descriptions=tfmdata.descriptions - local haskerns=false - for u,chr in next,tfmdata.characters do - local d=descriptions[u] - local newkerns=d[kerns] - if newkerns then - local kerns=chr.kerns - if not kerns then - kerns={} - chr.kerns=kerns - end - for k,v in next,newkerns do - local uk=unicodes[k] - if uk then - kerns[uk]=v - end - end - haskerns=true - end - end - tfmdata.properties.haskerns=haskerns - end -end -local list={ - [0x0027]=0x2019, -} -local function texreplacements(tfmdata,value) - local descriptions=tfmdata.descriptions - local characters=tfmdata.characters - for k,v in next,list do - characters [k]=characters [v] - descriptions[k]=descriptions[v] - end -end -local function ligatures (tfmdata,value) prepareligatures(tfmdata,'ligatures',value) end -local function texligatures(tfmdata,value) prepareligatures(tfmdata,'texligatures',value) end -local function kerns (tfmdata,value) preparekerns (tfmdata,'kerns',value) end -local function extrakerns (tfmdata,value) preparekerns (tfmdata,'extrakerns',value) end -registerafmfeature { - name="liga", - description="traditional ligatures", - initializers={ - base=ligatures, - node=ligatures, - } -} -registerafmfeature { - name="kern", - description="intercharacter kerning", - initializers={ - base=kerns, - node=kerns, - } -} -registerafmfeature { - name="extrakerns", - description="additional intercharacter kerning", - initializers={ - base=extrakerns, - node=extrakerns, - } -} -registerafmfeature { - name='tlig', - description='tex ligatures', - initializers={ - base=texligatures, - node=texligatures, - } -} -registerafmfeature { - name='trep', - description='tex replacements', - initializers={ - base=texreplacements, - node=texreplacements, - } -} -local check_tfm=readers.check_tfm -fonts.formats.afm="type1" -fonts.formats.pfb="type1" -local function check_afm(specification,fullname) - local foundname=findbinfile(fullname,'afm') or "" - if foundname=="" then - foundname=fonts.names.getfilename(fullname,"afm") or "" - end - if foundname=="" and afm.autoprefixed then - local encoding,shortname=match(fullname,"^(.-)%-(.*)$") - if encoding and shortname and fonts.encodings.known[encoding] then - shortname=findbinfile(shortname,'afm') or "" - if shortname~="" then - foundname=shortname - if trace_defining then - report_afm("stripping encoding prefix from filename %a",afmname) - end - end - end - end - if foundname~="" then - specification.filename=foundname - specification.format="afm" - return read_from_afm(specification) - end -end -function readers.afm(specification,method) - local fullname,tfmdata=specification.filename or "",nil - if fullname=="" then - local forced=specification.forced or "" - if forced~="" then - tfmdata=check_afm(specification,specification.name.."."..forced) - end - if not tfmdata then - method=method or definers.method or "afm or tfm" - if method=="tfm" then - tfmdata=check_tfm(specification,specification.name) - elseif method=="afm" then - tfmdata=check_afm(specification,specification.name) - elseif method=="tfm or afm" then - tfmdata=check_tfm(specification,specification.name) or check_afm(specification,specification.name) - else - tfmdata=check_afm(specification,specification.name) or check_tfm(specification,specification.name) - end - end - else - tfmdata=check_afm(specification,fullname) - end - return tfmdata -end -function readers.pfb(specification,method) - local original=specification.specification - if trace_defining then - report_afm("using afm reader for %a",original) - end - specification.specification=gsub(original,"%.pfb",".afm") - specification.forced="afm" - return readers.afm(specification,method) -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-afk']={ - version=1.001, - comment="companion to font-afm.lua", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files", - dataonly=true, -} -local allocate=utilities.storage.allocate -fonts.handlers.afm.helpdata={ - ligatures=allocate { - ['f']={ - { 'f','ff' }, - { 'i','fi' }, - { 'l','fl' }, - }, - ['ff']={ - { 'i','ffi' } - }, - ['fi']={ - { 'i','fii' } - }, - ['fl']={ - { 'i','fli' } - }, - ['s']={ - { 't','st' } - }, - ['i']={ - { 'j','ij' } - }, - }, - texligatures=allocate { - ['quoteleft']={ - { 'quoteleft','quotedblleft' } - }, - ['quoteright']={ - { 'quoteright','quotedblright' } - }, - ['hyphen']={ - { 'hyphen','endash' } - }, - ['endash']={ - { 'hyphen','emdash' } - } - }, - leftkerned=allocate { - AEligature="A",aeligature="a", - OEligature="O",oeligature="o", - IJligature="I",ijligature="i", - AE="A",ae="a", - OE="O",oe="o", - IJ="I",ij="i", - Ssharp="S",ssharp="s", - }, - rightkerned=allocate { - AEligature="E",aeligature="e", - OEligature="E",oeligature="e", - IJligature="J",ijligature="j", - AE="E",ae="e", - OE="E",oe="e", - IJ="J",ij="j", - Ssharp="S",ssharp="s", - }, - bothkerned=allocate { - Acircumflex="A",acircumflex="a", - Ccircumflex="C",ccircumflex="c", - Ecircumflex="E",ecircumflex="e", - Gcircumflex="G",gcircumflex="g", - Hcircumflex="H",hcircumflex="h", - Icircumflex="I",icircumflex="i", - Jcircumflex="J",jcircumflex="j", - Ocircumflex="O",ocircumflex="o", - Scircumflex="S",scircumflex="s", - Ucircumflex="U",ucircumflex="u", - Wcircumflex="W",wcircumflex="w", - Ycircumflex="Y",ycircumflex="y", - Agrave="A",agrave="a", - Egrave="E",egrave="e", - Igrave="I",igrave="i", - Ograve="O",ograve="o", - Ugrave="U",ugrave="u", - Ygrave="Y",ygrave="y", - Atilde="A",atilde="a", - Itilde="I",itilde="i", - Otilde="O",otilde="o", - Utilde="U",utilde="u", - Ntilde="N",ntilde="n", - Adiaeresis="A",adiaeresis="a",Adieresis="A",adieresis="a", - Ediaeresis="E",ediaeresis="e",Edieresis="E",edieresis="e", - Idiaeresis="I",idiaeresis="i",Idieresis="I",idieresis="i", - Odiaeresis="O",odiaeresis="o",Odieresis="O",odieresis="o", - Udiaeresis="U",udiaeresis="u",Udieresis="U",udieresis="u", - Ydiaeresis="Y",ydiaeresis="y",Ydieresis="Y",ydieresis="y", - Aacute="A",aacute="a", - Cacute="C",cacute="c", - Eacute="E",eacute="e", - Iacute="I",iacute="i", - Lacute="L",lacute="l", - Nacute="N",nacute="n", - Oacute="O",oacute="o", - Racute="R",racute="r", - Sacute="S",sacute="s", - Uacute="U",uacute="u", - Yacute="Y",yacute="y", - Zacute="Z",zacute="z", - Dstroke="D",dstroke="d", - Hstroke="H",hstroke="h", - Tstroke="T",tstroke="t", - Cdotaccent="C",cdotaccent="c", - Edotaccent="E",edotaccent="e", - Gdotaccent="G",gdotaccent="g", - Idotaccent="I",idotaccent="i", - Zdotaccent="Z",zdotaccent="z", - Amacron="A",amacron="a", - Emacron="E",emacron="e", - Imacron="I",imacron="i", - Omacron="O",omacron="o", - Umacron="U",umacron="u", - Ccedilla="C",ccedilla="c", - Kcedilla="K",kcedilla="k", - Lcedilla="L",lcedilla="l", - Ncedilla="N",ncedilla="n", - Rcedilla="R",rcedilla="r", - Scedilla="S",scedilla="s", - Tcedilla="T",tcedilla="t", - Ohungarumlaut="O",ohungarumlaut="o", - Uhungarumlaut="U",uhungarumlaut="u", - Aogonek="A",aogonek="a", - Eogonek="E",eogonek="e", - Iogonek="I",iogonek="i", - Uogonek="U",uogonek="u", - Aring="A",aring="a", - Uring="U",uring="u", - Abreve="A",abreve="a", - Ebreve="E",ebreve="e", - Gbreve="G",gbreve="g", - Ibreve="I",ibreve="i", - Obreve="O",obreve="o", - Ubreve="U",ubreve="u", - Ccaron="C",ccaron="c", - Dcaron="D",dcaron="d", - Ecaron="E",ecaron="e", - Lcaron="L",lcaron="l", - Ncaron="N",ncaron="n", - Rcaron="R",rcaron="r", - Scaron="S",scaron="s", - Tcaron="T",tcaron="t", - Zcaron="Z",zcaron="z", - dotlessI="I",dotlessi="i", - dotlessJ="J",dotlessj="j", - AEligature="AE",aeligature="ae",AE="AE",ae="ae", - OEligature="OE",oeligature="oe",OE="OE",oe="oe", - IJligature="IJ",ijligature="ij",IJ="IJ",ij="ij", - Lstroke="L",lstroke="l",Lslash="L",lslash="l", - Ostroke="O",ostroke="o",Oslash="O",oslash="o", - Ssharp="SS",ssharp="ss", - Aumlaut="A",aumlaut="a", - Eumlaut="E",eumlaut="e", - Iumlaut="I",iumlaut="i", - Oumlaut="O",oumlaut="o", - Uumlaut="U",uumlaut="u", - } -} - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['luatex-fonts-tfm']={ - version=1.001, - comment="companion to luatex-*.tex", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end -local fonts=fonts -local tfm={} -fonts.handlers.tfm=tfm -fonts.formats.tfm="type1" -function fonts.readers.tfm(specification) - local fullname=specification.filename or "" - if fullname=="" then - local forced=specification.forced or "" - if forced~="" then - fullname=specification.name.."."..forced - else - fullname=specification.name - end - end - local foundname=resolvers.findbinfile(fullname,'tfm') or "" - if foundname=="" then - foundname=resolvers.findbinfile(fullname,'ofm') or "" - end - if foundname~="" then - specification.filename=foundname - specification.format="ofm" - return font.read_tfm(specification.filename,specification.size) - end -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-oti']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local lower=string.lower -local fonts=fonts -local constructors=fonts.constructors -local otf=constructors.newhandler("otf") -local otffeatures=constructors.newfeatures("otf") -local otftables=otf.tables -local registerotffeature=otffeatures.register -local allocate=utilities.storage.allocate -registerotffeature { - name="features", - description="initialization of feature handler", - default=true, -} -local function setmode(tfmdata,value) - if value then - tfmdata.properties.mode=lower(value) - end -end -local function setlanguage(tfmdata,value) - if value then - local cleanvalue=lower(value) - local languages=otftables and otftables.languages - local properties=tfmdata.properties - if not languages then - properties.language=cleanvalue - elseif languages[value] then - properties.language=cleanvalue - else - properties.language="dflt" - end - end -end -local function setscript(tfmdata,value) - if value then - local cleanvalue=lower(value) - local scripts=otftables and otftables.scripts - local properties=tfmdata.properties - if not scripts then - properties.script=cleanvalue - elseif scripts[value] then - properties.script=cleanvalue - else - properties.script="dflt" - end - end -end -registerotffeature { - name="mode", - description="mode", - initializers={ - base=setmode, - node=setmode, - } -} -registerotffeature { - name="language", - description="language", - initializers={ - base=setlanguage, - node=setlanguage, - } -} -registerotffeature { - name="script", - description="script", - initializers={ - base=setscript, - node=setscript, - } -} - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-otf']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local utfbyte=utf.byte -local format,gmatch,gsub,find,match,lower,strip=string.format,string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip -local type,next,tonumber,tostring=type,next,tonumber,tostring -local abs=math.abs -local insert=table.insert -local lpegmatch=lpeg.match -local reversed,concat,remove,sortedkeys=table.reversed,table.concat,table.remove,table.sortedkeys -local ioflush=io.flush -local fastcopy,tohash,derivetable=table.fastcopy,table.tohash,table.derive -local formatters=string.formatters -local P,R,S,C,Ct,lpegmatch=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Ct,lpeg.match -local setmetatableindex=table.setmetatableindex -local allocate=utilities.storage.allocate -local registertracker=trackers.register -local registerdirective=directives.register -local starttiming=statistics.starttiming -local stoptiming=statistics.stoptiming -local elapsedtime=statistics.elapsedtime -local findbinfile=resolvers.findbinfile -local trace_private=false registertracker("otf.private",function(v) trace_private=v end) -local trace_loading=false registertracker("otf.loading",function(v) trace_loading=v end) -local trace_features=false registertracker("otf.features",function(v) trace_features=v end) -local trace_dynamics=false registertracker("otf.dynamics",function(v) trace_dynamics=v end) -local trace_sequences=false registertracker("otf.sequences",function(v) trace_sequences=v end) -local trace_markwidth=false registertracker("otf.markwidth",function(v) trace_markwidth=v end) -local trace_defining=false registertracker("fonts.defining",function(v) trace_defining=v end) -local compact_lookups=true registertracker("otf.compactlookups",function(v) compact_lookups=v end) -local purge_names=true registertracker("otf.purgenames",function(v) purge_names=v end) -local report_otf=logs.reporter("fonts","otf loading") -local fonts=fonts -local otf=fonts.handlers.otf -otf.glists={ "gsub","gpos" } -otf.version=2.802 -otf.cache=containers.define("fonts","otf",otf.version,true) -local fontdata=fonts.hashes.identifiers -local chardata=characters and characters.data -local definers=fonts.definers -local readers=fonts.readers -local constructors=fonts.constructors -local otffeatures=constructors.newfeatures("otf") -local registerotffeature=otffeatures.register -local enhancers=allocate() -otf.enhancers=enhancers -local patches={} -enhancers.patches=patches -local forceload=false -local cleanup=0 -local packdata=true -local syncspace=true -local forcenotdef=false -local includesubfonts=false -local overloadkerns=false -local applyruntimefixes=fonts.treatments and fonts.treatments.applyfixes -local wildcard="*" -local default="dflt" -local fontloaderfields=fontloader.fields -local mainfields=nil -local glyphfields=nil -local formats=fonts.formats -formats.otf="opentype" -formats.ttf="truetype" -formats.ttc="truetype" -formats.dfont="truetype" -registerdirective("fonts.otf.loader.cleanup",function(v) cleanup=tonumber(v) or (v and 1) or 0 end) -registerdirective("fonts.otf.loader.force",function(v) forceload=v end) -registerdirective("fonts.otf.loader.pack",function(v) packdata=v end) -registerdirective("fonts.otf.loader.syncspace",function(v) syncspace=v end) -registerdirective("fonts.otf.loader.forcenotdef",function(v) forcenotdef=v end) -registerdirective("fonts.otf.loader.overloadkerns",function(v) overloadkerns=v end) -function otf.fileformat(filename) - local leader=lower(io.loadchunk(filename,4)) - local suffix=lower(file.suffix(filename)) - if leader=="otto" then - return formats.otf,suffix=="otf" - elseif leader=="ttcf" then - return formats.ttc,suffix=="ttc" - elseif suffix=="ttc" then - return formats.ttc,true - elseif suffix=="dfont" then - return formats.dfont,true - else - return formats.ttf,suffix=="ttf" - end -end -local function otf_format(filename) - local format,okay=otf.fileformat(filename) - if not okay then - report_otf("font %a is actually an %a file",filename,format) - end - return format -end -local function load_featurefile(raw,featurefile) - if featurefile and featurefile~="" then - if trace_loading then - report_otf("using featurefile %a",featurefile) - end - fontloader.apply_featurefile(raw,featurefile) - end -end -local function showfeatureorder(rawdata,filename) - local sequences=rawdata.resources.sequences - if sequences and #sequences>0 then - if trace_loading then - report_otf("font %a has %s sequences",filename,#sequences) - report_otf(" ") - end - for nos=1,#sequences do - local sequence=sequences[nos] - local typ=sequence.type or "no-type" - local name=sequence.name or "no-name" - local subtables=sequence.subtables or { "no-subtables" } - local features=sequence.features - if trace_loading then - report_otf("%3i %-15s %-20s [% t]",nos,name,typ,subtables) - end - if features then - for feature,scripts in next,features do - local tt={} - if type(scripts)=="table" then - for script,languages in next,scripts do - local ttt={} - for language,_ in next,languages do - ttt[#ttt+1]=language - end - tt[#tt+1]=formatters["[%s: % t]"](script,ttt) - end - if trace_loading then - report_otf(" %s: % t",feature,tt) - end - else - if trace_loading then - report_otf(" %s: %S",feature,scripts) - end - end - end - end - end - if trace_loading then - report_otf("\n") - end - elseif trace_loading then - report_otf("font %a has no sequences",filename) - end -end -local valid_fields=table.tohash { - "ascent", - "cidinfo", - "copyright", - "descent", - "design_range_bottom", - "design_range_top", - "design_size", - "encodingchanged", - "extrema_bound", - "familyname", - "fontname", - "fontstyle_id", - "fontstyle_name", - "fullname", - "hasvmetrics", - "horiz_base", - "issans", - "isserif", - "italicangle", - "macstyle", - "onlybitmaps", - "origname", - "os2_version", - "pfminfo", - "serifcheck", - "sfd_version", - "strokedfont", - "strokewidth", - "table_version", - "ttf_tables", - "uni_interp", - "uniqueid", - "units_per_em", - "upos", - "use_typo_metrics", - "uwidth", - "validation_state", - "version", - "vert_base", - "weight", - "weight_width_slope_only", -} -local ordered_enhancers={ - "prepare tables", - "prepare glyphs", - "prepare lookups", - "analyze glyphs", - "analyze math", - "reorganize lookups", - "reorganize mark classes", - "reorganize anchor classes", - "reorganize glyph kerns", - "reorganize glyph lookups", - "reorganize glyph anchors", - "merge kern classes", - "reorganize features", - "reorganize subtables", - "check glyphs", - "check metadata", - "check extra features", - "prepare tounicode", - "check encoding", - "add duplicates", - "cleanup tables", - "compact lookups", - "purge names", -} -local actions=allocate() -local before=allocate() -local after=allocate() -patches.before=before -patches.after=after -local function enhance(name,data,filename,raw) - local enhancer=actions[name] - if enhancer then - if trace_loading then - report_otf("apply enhancement %a to file %a",name,filename) - ioflush() - end - enhancer(data,filename,raw) - else - end -end -function enhancers.apply(data,filename,raw) - local basename=file.basename(lower(filename)) - if trace_loading then - report_otf("%s enhancing file %a","start",filename) - end - ioflush() - for e=1,#ordered_enhancers do - local enhancer=ordered_enhancers[e] - local b=before[enhancer] - if b then - for pattern,action in next,b do - if find(basename,pattern) then - action(data,filename,raw) - end - end - end - enhance(enhancer,data,filename,raw) - local a=after[enhancer] - if a then - for pattern,action in next,a do - if find(basename,pattern) then - action(data,filename,raw) - end - end - end - ioflush() - end - if trace_loading then - report_otf("%s enhancing file %a","stop",filename) - end - ioflush() -end -function patches.register(what,where,pattern,action) - local pw=patches[what] - if pw then - local ww=pw[where] - if ww then - ww[pattern]=action - else - pw[where]={ [pattern]=action} - end - end -end -function patches.report(fmt,...) - if trace_loading then - report_otf("patching: %s",formatters[fmt](...)) - end -end -function enhancers.register(what,action) - actions[what]=action -end -function otf.load(filename,sub,featurefile) - local base=file.basename(file.removesuffix(filename)) - local name=file.removesuffix(base) - local attr=lfs.attributes(filename) - local size=attr and attr.size or 0 - local time=attr and attr.modification or 0 - if featurefile then - name=name.."@"..file.removesuffix(file.basename(featurefile)) - end - if sub=="" then - sub=false - end - local hash=name - if sub then - hash=hash.."-"..sub - end - hash=containers.cleanname(hash) - local featurefiles - if featurefile then - featurefiles={} - for s in gmatch(featurefile,"[^,]+") do - local name=resolvers.findfile(file.addsuffix(s,'fea'),'fea') or "" - if name=="" then - report_otf("loading error, no featurefile %a",s) - else - local attr=lfs.attributes(name) - featurefiles[#featurefiles+1]={ - name=name, - size=attr and attr.size or 0, - time=attr and attr.modification or 0, - } - end - end - if #featurefiles==0 then - featurefiles=nil - end - end - local data=containers.read(otf.cache,hash) - local reload=not data or data.size~=size or data.time~=time - if forceload then - report_otf("forced reload of %a due to hard coded flag",filename) - reload=true - end - if not reload then - local featuredata=data.featuredata - if featurefiles then - if not featuredata or #featuredata~=#featurefiles then - reload=true - else - for i=1,#featurefiles do - local fi,fd=featurefiles[i],featuredata[i] - if fi.name~=fd.name or fi.size~=fd.size or fi.time~=fd.time then - reload=true - break - end - end - end - elseif featuredata then - reload=true - end - if reload then - report_otf("loading: forced reload due to changed featurefile specification %a",featurefile) - end - end - if reload then - report_otf("loading %a, hash %a",filename,hash) - local fontdata,messages - if sub then - fontdata,messages=fontloader.open(filename,sub) - else - fontdata,messages=fontloader.open(filename) - end - if fontdata then - mainfields=mainfields or (fontloaderfields and fontloaderfields(fontdata)) - end - if trace_loading and messages and #messages>0 then - if type(messages)=="string" then - report_otf("warning: %s",messages) - else - for m=1,#messages do - report_otf("warning: %S",messages[m]) - end - end - else - report_otf("loading done") - end - if fontdata then - if featurefiles then - for i=1,#featurefiles do - load_featurefile(fontdata,featurefiles[i].name) - end - end - local unicodes={ - } - local splitter=lpeg.splitter(" ",unicodes) - data={ - size=size, - time=time, - format=otf_format(filename), - featuredata=featurefiles, - resources={ - filename=resolvers.unresolve(filename), - version=otf.version, - creator="context mkiv", - unicodes=unicodes, - indices={ - }, - duplicates={ - }, - variants={ - }, - lookuptypes={}, - }, - warnings={}, - metadata={ - }, - properties={ - }, - descriptions={}, - goodies={}, - helpers={ - tounicodelist=splitter, - tounicodetable=Ct(splitter), - }, - } - starttiming(data) - report_otf("file size: %s",size) - enhancers.apply(data,filename,fontdata) - local packtime={} - if packdata then - if cleanup>0 then - collectgarbage("collect") - end - starttiming(packtime) - enhance("pack",data,filename,nil) - stoptiming(packtime) - end - report_otf("saving %a in cache",filename) - data=containers.write(otf.cache,hash,data) - if cleanup>1 then - collectgarbage("collect") - end - stoptiming(data) - if elapsedtime then - report_otf("preprocessing and caching time %s, packtime %s", - elapsedtime(data),packdata and elapsedtime(packtime) or 0) - end - fontloader.close(fontdata) - if cleanup>3 then - collectgarbage("collect") - end - data=containers.read(otf.cache,hash) - if cleanup>2 then - collectgarbage("collect") - end - else - data=nil - report_otf("loading failed due to read error") - end - end - if data then - if trace_defining then - report_otf("loading from cache using hash %a",hash) - end - enhance("unpack",data,filename,nil,false) - local resources=data.resources - local lookuptags=resources.lookuptags - local unicodes=resources.unicodes - if not lookuptags then - lookuptags={} - resources.lookuptags=lookuptags - end - setmetatableindex(lookuptags,function(t,k) - local v=type(k)=="number" and ("lookup "..k) or k - t[k]=v - return v - end) - if not unicodes then - unicodes={} - resources.unicodes=unicodes - setmetatableindex(unicodes,function(t,k) - setmetatableindex(unicodes,nil) - for u,d in next,data.descriptions do - local n=d.name - if n then - t[n]=u - else - end - end - return rawget(t,k) - end) - end - constructors.addcoreunicodes(unicodes) - if applyruntimefixes then - applyruntimefixes(filename,data) - end - enhance("add dimensions",data,filename,nil,false) - if trace_sequences then - showfeatureorder(data,filename) - end - end - return data -end -local mt={ - __index=function(t,k) - if k=="height" then - local ht=t.boundingbox[4] - return ht<0 and 0 or ht - elseif k=="depth" then - local dp=-t.boundingbox[2] - return dp<0 and 0 or dp - elseif k=="width" then - return 0 - elseif k=="name" then - return forcenotdef and ".notdef" - end - end -} -actions["prepare tables"]=function(data,filename,raw) - data.properties.hasitalics=false -end -actions["add dimensions"]=function(data,filename) - if data then - local descriptions=data.descriptions - local resources=data.resources - local defaultwidth=resources.defaultwidth or 0 - local defaultheight=resources.defaultheight or 0 - local defaultdepth=resources.defaultdepth or 0 - local basename=trace_markwidth and file.basename(filename) - for _,d in next,descriptions do - local bb,wd=d.boundingbox,d.width - if not wd then - d.width=defaultwidth - elseif trace_markwidth and wd~=0 and d.class=="mark" then - report_otf("mark %a with width %b found in %a",d.name or "",wd,basename) - end - if bb then - local ht,dp=bb[4],-bb[2] - if ht==0 or ht<0 then - else - d.height=ht - end - if dp==0 or dp<0 then - else - d.depth=dp - end - end - end - end -end -local function somecopy(old) - if old then - local new={} - if type(old)=="table" then - for k,v in next,old do - if k=="glyphs" then - elseif type(v)=="table" then - new[k]=somecopy(v) - else - new[k]=v - end - end - else - for i=1,#mainfields do - local k=mainfields[i] - local v=old[k] - if k=="glyphs" then - elseif type(v)=="table" then - new[k]=somecopy(v) - else - new[k]=v - end - end - end - return new - else - return {} - end -end -actions["prepare glyphs"]=function(data,filename,raw) - local rawglyphs=raw.glyphs - local rawsubfonts=raw.subfonts - local rawcidinfo=raw.cidinfo - local criterium=constructors.privateoffset - local private=criterium - local resources=data.resources - local metadata=data.metadata - local properties=data.properties - local descriptions=data.descriptions - local unicodes=resources.unicodes - local indices=resources.indices - local duplicates=resources.duplicates - local variants=resources.variants - if rawsubfonts then - metadata.subfonts=includesubfonts and {} - properties.cidinfo=rawcidinfo - if rawcidinfo.registry then - local cidmap=fonts.cid.getmap(rawcidinfo) - if cidmap then - rawcidinfo.usedname=cidmap.usedname - local nofnames,nofunicodes=0,0 - local cidunicodes,cidnames=cidmap.unicodes,cidmap.names - for cidindex=1,#rawsubfonts do - local subfont=rawsubfonts[cidindex] - local cidglyphs=subfont.glyphs - if includesubfonts then - metadata.subfonts[cidindex]=somecopy(subfont) - end - for index=0,subfont.glyphcnt-1 do - local glyph=cidglyphs[index] - if glyph then - local unicode=glyph.unicode - if unicode>=0x00E000 and unicode<=0x00F8FF then - unicode=-1 - elseif unicode>=0x0F0000 and unicode<=0x0FFFFD then - unicode=-1 - elseif unicode>=0x100000 and unicode<=0x10FFFD then - unicode=-1 - end - local name=glyph.name or cidnames[index] - if not unicode or unicode==-1 then - unicode=cidunicodes[index] - end - if unicode and descriptions[unicode] then - if trace_private then - report_otf("preventing glyph %a at index %H to overload unicode %U",name or "noname",index,unicode) - end - unicode=-1 - end - if not unicode or unicode==-1 then - if not name then - name=format("u%06X.ctx",private) - end - unicode=private - unicodes[name]=private - if trace_private then - report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private) - end - private=private+1 - nofnames=nofnames+1 - else - if not name then - name=format("u%06X.ctx",unicode) - end - unicodes[name]=unicode - nofunicodes=nofunicodes+1 - end - indices[index]=unicode - local description={ - boundingbox=glyph.boundingbox, - name=glyph.name or name or "unknown", - cidindex=cidindex, - index=index, - glyph=glyph, - } - descriptions[unicode]=description - else - end - end - end - if trace_loading then - report_otf("cid font remapped, %s unicode points, %s symbolic names, %s glyphs",nofunicodes,nofnames,nofunicodes+nofnames) - end - elseif trace_loading then - report_otf("unable to remap cid font, missing cid file for %a",filename) - end - elseif trace_loading then - report_otf("font %a has no glyphs",filename) - end - else - for index=0,raw.glyphcnt-1 do - local glyph=rawglyphs[index] - if glyph then - local unicode=glyph.unicode - local name=glyph.name - if not unicode or unicode==-1 then - unicode=private - unicodes[name]=private - if trace_private then - report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private) - end - private=private+1 - else - if unicode>criterium then - local taken=descriptions[unicode] - if taken then - if unicode>=private then - private=unicode+1 - else - private=private+1 - end - descriptions[private]=taken - unicodes[taken.name]=private - indices[taken.index]=private - if trace_private then - report_otf("slot %U is moved to %U due to private in font",unicode) - end - else - if unicode>=private then - private=unicode+1 - end - end - end - unicodes[name]=unicode - end - indices[index]=unicode - descriptions[unicode]={ - boundingbox=glyph.boundingbox, - name=name, - index=index, - glyph=glyph, - } - local altuni=glyph.altuni - if altuni then - for i=1,#altuni do - local a=altuni[i] - local u=a.unicode - local v=a.variant - if v then - local vv=variants[v] - if vv then - vv[u]=unicode - else - vv={ [u]=unicode } - variants[v]=vv - end - end - end - end - else - report_otf("potential problem: glyph %U is used but empty",index) - end - end - end - resources.private=private -end -actions["check encoding"]=function(data,filename,raw) - local descriptions=data.descriptions - local resources=data.resources - local properties=data.properties - local unicodes=resources.unicodes - local indices=resources.indices - local duplicates=resources.duplicates - local mapdata=raw.map or {} - local unicodetoindex=mapdata and mapdata.map or {} - local indextounicode=mapdata and mapdata.backmap or {} - local encname=lower(data.enc_name or mapdata.enc_name or "") - local criterium=0xFFFF - local privateoffset=constructors.privateoffset - if find(encname,"unicode") then - if trace_loading then - report_otf("checking embedded unicode map %a",encname) - end - local reported={} - for maybeunicode,index in next,unicodetoindex do - if descriptions[maybeunicode] then - else - local unicode=indices[index] - if not unicode then - elseif maybeunicode==unicode then - elseif unicode>privateoffset then - else - local d=descriptions[unicode] - if d then - local c=d.copies - if c then - c[maybeunicode]=true - else - d.copies={ [maybeunicode]=true } - end - elseif index and not reported[index] then - report_otf("missing index %i",index) - reported[index]=true - end - end - end - end - for unicode,data in next,descriptions do - local d=data.copies - if d then - duplicates[unicode]=sortedkeys(d) - data.copies=nil - end - end - elseif properties.cidinfo then - report_otf("warning: no unicode map, used cidmap %a",properties.cidinfo.usedname) - else - report_otf("warning: non unicode map %a, only using glyph unicode data",encname or "whatever") - end - if mapdata then - mapdata.map={} - mapdata.backmap={} - end -end -actions["add duplicates"]=function(data,filename,raw) - local descriptions=data.descriptions - local resources=data.resources - local properties=data.properties - local unicodes=resources.unicodes - local indices=resources.indices - local duplicates=resources.duplicates - for unicode,d in next,duplicates do - local nofduplicates=#d - if nofduplicates>4 then - if trace_loading then - report_otf("ignoring excessive duplicates of %U (n=%s)",unicode,nofduplicates) - end - else - for i=1,nofduplicates do - local u=d[i] - if not descriptions[u] then - local description=descriptions[unicode] - local n=0 - for _,description in next,descriptions do - local kerns=description.kerns - if kerns then - for _,k in next,kerns do - local ku=k[unicode] - if ku then - k[u]=ku - n=n+1 - end - end - end - end - if u>0 then - local duplicate=table.copy(description) - duplicate.comment=format("copy of U+%05X",unicode) - descriptions[u]=duplicate - if trace_loading then - report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n) - end - end - end - end - end - end -end -actions["analyze glyphs"]=function(data,filename,raw) - local descriptions=data.descriptions - local resources=data.resources - local metadata=data.metadata - local properties=data.properties - local hasitalics=false - local widths={} - local marks={} - for unicode,description in next,descriptions do - local glyph=description.glyph - local italic=glyph.italic_correction - if not italic then - elseif italic==0 then - else - description.italic=italic - hasitalics=true - end - local width=glyph.width - widths[width]=(widths[width] or 0)+1 - local class=glyph.class - if class then - if class=="mark" then - marks[unicode]=true - end - description.class=class - end - end - properties.hasitalics=hasitalics - resources.marks=marks - local wd,most=0,1 - for k,v in next,widths do - if v>most then - wd,most=k,v - end - end - if most>1000 then - if trace_loading then - report_otf("most common width: %s (%s times), sharing (cjk font)",wd,most) - end - for unicode,description in next,descriptions do - if description.width==wd then - else - description.width=description.glyph.width - end - end - resources.defaultwidth=wd - else - for unicode,description in next,descriptions do - description.width=description.glyph.width - end - end -end -actions["reorganize mark classes"]=function(data,filename,raw) - local mark_classes=raw.mark_classes - if mark_classes then - local resources=data.resources - local unicodes=resources.unicodes - local markclasses={} - resources.markclasses=markclasses - for name,class in next,mark_classes do - local t={} - for s in gmatch(class,"[^ ]+") do - t[unicodes[s]]=true - end - markclasses[name]=t - end - end -end -actions["reorganize features"]=function(data,filename,raw) - local features={} - data.resources.features=features - for k,what in next,otf.glists do - local dw=raw[what] - if dw then - local f={} - features[what]=f - for i=1,#dw do - local d=dw[i] - local dfeatures=d.features - if dfeatures then - for i=1,#dfeatures do - local df=dfeatures[i] - local tag=strip(lower(df.tag)) - local ft=f[tag] - if not ft then - ft={} - f[tag]=ft - end - local dscripts=df.scripts - for i=1,#dscripts do - local d=dscripts[i] - local languages=d.langs - local script=strip(lower(d.script)) - local fts=ft[script] if not fts then fts={} ft[script]=fts end - for i=1,#languages do - fts[strip(lower(languages[i]))]=true - end - end - end - end - end - end - end -end -actions["reorganize anchor classes"]=function(data,filename,raw) - local resources=data.resources - local anchor_to_lookup={} - local lookup_to_anchor={} - resources.anchor_to_lookup=anchor_to_lookup - resources.lookup_to_anchor=lookup_to_anchor - local classes=raw.anchor_classes - if classes then - for c=1,#classes do - local class=classes[c] - local anchor=class.name - local lookups=class.lookup - if type(lookups)~="table" then - lookups={ lookups } - end - local a=anchor_to_lookup[anchor] - if not a then - a={} - anchor_to_lookup[anchor]=a - end - for l=1,#lookups do - local lookup=lookups[l] - local l=lookup_to_anchor[lookup] - if l then - l[anchor]=true - else - l={ [anchor]=true } - lookup_to_anchor[lookup]=l - end - a[lookup]=true - end - end - end -end -actions["prepare tounicode"]=function(data,filename,raw) - fonts.mappings.addtounicode(data,filename) -end -local g_directions={ - gsub_contextchain=1, - gpos_contextchain=1, - gsub_reversecontextchain=-1, - gpos_reversecontextchain=-1, -} -actions["reorganize subtables"]=function(data,filename,raw) - local resources=data.resources - local sequences={} - local lookups={} - local chainedfeatures={} - resources.sequences=sequences - resources.lookups=lookups - for _,what in next,otf.glists do - local dw=raw[what] - if dw then - for k=1,#dw do - local gk=dw[k] - local features=gk.features - local typ=gk.type - local chain=g_directions[typ] or 0 - local subtables=gk.subtables - if subtables then - local t={} - for s=1,#subtables do - t[s]=subtables[s].name - end - subtables=t - end - local flags,markclass=gk.flags,nil - if flags then - local t={ - (flags.ignorecombiningmarks and "mark") or false, - (flags.ignoreligatures and "ligature") or false, - (flags.ignorebaseglyphs and "base") or false, - flags.r2l or false, - } - markclass=flags.mark_class - if markclass then - markclass=resources.markclasses[markclass] - end - flags=t - end - local name=gk.name - if not name then - report_otf("skipping weird lookup number %s",k) - elseif features then - local f={} - local o={} - for i=1,#features do - local df=features[i] - local tag=strip(lower(df.tag)) - local ft=f[tag] - if not ft then - ft={} - f[tag]=ft - o[#o+1]=tag - end - local dscripts=df.scripts - for i=1,#dscripts do - local d=dscripts[i] - local languages=d.langs - local script=strip(lower(d.script)) - local fts=ft[script] if not fts then fts={} ft[script]=fts end - for i=1,#languages do - fts[strip(lower(languages[i]))]=true - end - end - end - sequences[#sequences+1]={ - type=typ, - chain=chain, - flags=flags, - name=name, - subtables=subtables, - markclass=markclass, - features=f, - order=o, - } - else - lookups[name]={ - type=typ, - chain=chain, - flags=flags, - subtables=subtables, - markclass=markclass, - } - end - end - end - end -end -actions["prepare lookups"]=function(data,filename,raw) - local lookups=raw.lookups - if lookups then - data.lookups=lookups - end -end -local function t_uncover(splitter,cache,covers) - local result={} - for n=1,#covers do - local cover=covers[n] - local uncovered=cache[cover] - if not uncovered then - uncovered=lpegmatch(splitter,cover) - cache[cover]=uncovered - end - result[n]=uncovered - end - return result -end -local function s_uncover(splitter,cache,cover) - if cover=="" then - return nil - else - local uncovered=cache[cover] - if not uncovered then - uncovered=lpegmatch(splitter,cover) - cache[cover]=uncovered - end - return { uncovered } - end -end -local function t_hashed(t,cache) - if t then - local ht={} - for i=1,#t do - local ti=t[i] - local tih=cache[ti] - if not tih then - local tn=#ti - if tn==1 then - tih={ [ti[1]]=true } - else - tih={} - for i=1,tn do - tih[ti[i]]=true - end - end - cache[ti]=tih - end - ht[i]=tih - end - return ht - else - return nil - end -end -local function s_hashed(t,cache) - if t then - local tf=t[1] - local nf=#tf - if nf==1 then - return { [tf[1]]=true } - else - local ht={} - for i=1,nf do - ht[i]={ [tf[i]]=true } - end - return ht - end - else - return nil - end -end -local function r_uncover(splitter,cache,cover,replacements) - if cover=="" then - return nil - else - local uncovered=cover[1] - local replaced=cache[replacements] - if not replaced then - replaced=lpegmatch(splitter,replacements) - cache[replacements]=replaced - end - local nu,nr=#uncovered,#replaced - local r={} - if nu==nr then - for i=1,nu do - r[uncovered[i]]=replaced[i] - end - end - return r - end -end -actions["reorganize lookups"]=function(data,filename,raw) - if data.lookups then - local splitter=data.helpers.tounicodetable - local t_u_cache={} - local s_u_cache=t_u_cache - local t_h_cache={} - local s_h_cache=t_h_cache - local r_u_cache={} - for _,lookup in next,data.lookups do - local rules=lookup.rules - if rules then - local format=lookup.format - if format=="class" then - local before_class=lookup.before_class - if before_class then - before_class=t_uncover(splitter,t_u_cache,reversed(before_class)) - end - local current_class=lookup.current_class - if current_class then - current_class=t_uncover(splitter,t_u_cache,current_class) - end - local after_class=lookup.after_class - if after_class then - after_class=t_uncover(splitter,t_u_cache,after_class) - end - for i=1,#rules do - local rule=rules[i] - local class=rule.class - local before=class.before - if before then - for i=1,#before do - before[i]=before_class[before[i]] or {} - end - rule.before=t_hashed(before,t_h_cache) - end - local current=class.current - local lookups=rule.lookups - if current then - for i=1,#current do - current[i]=current_class[current[i]] or {} - if lookups and not lookups[i] then - lookups[i]="" - end - end - rule.current=t_hashed(current,t_h_cache) - end - local after=class.after - if after then - for i=1,#after do - after[i]=after_class[after[i]] or {} - end - rule.after=t_hashed(after,t_h_cache) - end - rule.class=nil - end - lookup.before_class=nil - lookup.current_class=nil - lookup.after_class=nil - lookup.format="coverage" - elseif format=="coverage" then - for i=1,#rules do - local rule=rules[i] - local coverage=rule.coverage - if coverage then - local before=coverage.before - if before then - before=t_uncover(splitter,t_u_cache,reversed(before)) - rule.before=t_hashed(before,t_h_cache) - end - local current=coverage.current - if current then - current=t_uncover(splitter,t_u_cache,current) - local lookups=rule.lookups - if lookups then - for i=1,#current do - if not lookups[i] then - lookups[i]="" - end - end - end - rule.current=t_hashed(current,t_h_cache) - end - local after=coverage.after - if after then - after=t_uncover(splitter,t_u_cache,after) - rule.after=t_hashed(after,t_h_cache) - end - rule.coverage=nil - end - end - elseif format=="reversecoverage" then - for i=1,#rules do - local rule=rules[i] - local reversecoverage=rule.reversecoverage - if reversecoverage then - local before=reversecoverage.before - if before then - before=t_uncover(splitter,t_u_cache,reversed(before)) - rule.before=t_hashed(before,t_h_cache) - end - local current=reversecoverage.current - if current then - current=t_uncover(splitter,t_u_cache,current) - rule.current=t_hashed(current,t_h_cache) - end - local after=reversecoverage.after - if after then - after=t_uncover(splitter,t_u_cache,after) - rule.after=t_hashed(after,t_h_cache) - end - local replacements=reversecoverage.replacements - if replacements then - rule.replacements=r_uncover(splitter,r_u_cache,current,replacements) - end - rule.reversecoverage=nil - end - end - elseif format=="glyphs" then - for i=1,#rules do - local rule=rules[i] - local glyphs=rule.glyphs - if glyphs then - local fore=glyphs.fore - if fore and fore~="" then - fore=s_uncover(splitter,s_u_cache,fore) - rule.after=s_hashed(fore,s_h_cache) - end - local back=glyphs.back - if back then - back=s_uncover(splitter,s_u_cache,back) - rule.before=s_hashed(back,s_h_cache) - end - local names=glyphs.names - if names then - names=s_uncover(splitter,s_u_cache,names) - rule.current=s_hashed(names,s_h_cache) - end - rule.glyphs=nil - local lookups=rule.lookups - if lookups then - for i=1,#names do - if not lookups[i] then - lookups[i]="" - end - end - end - end - end - end - end - end - end -end -local function check_variants(unicode,the_variants,splitter,unicodes) - local variants=the_variants.variants - if variants then - local glyphs=lpegmatch(splitter,variants) - local done={ [unicode]=true } - local n=0 - for i=1,#glyphs do - local g=glyphs[i] - if done[g] then - if i>1 then - report_otf("skipping cyclic reference %U in math variant %U",g,unicode) - end - else - if n==0 then - n=1 - variants={ g } - else - n=n+1 - variants[n]=g - end - done[g]=true - end - end - if n==0 then - variants=nil - end - end - local parts=the_variants.parts - if parts then - local p=#parts - if p>0 then - for i=1,p do - local pi=parts[i] - pi.glyph=unicodes[pi.component] or 0 - pi.component=nil - end - else - parts=nil - end - end - local italic_correction=the_variants.italic_correction - if italic_correction and italic_correction==0 then - italic_correction=nil - end - return variants,parts,italic_correction -end -actions["analyze math"]=function(data,filename,raw) - if raw.math then - data.metadata.math=raw.math - local unicodes=data.resources.unicodes - local splitter=data.helpers.tounicodetable - for unicode,description in next,data.descriptions do - local glyph=description.glyph - local mathkerns=glyph.mathkern - local horiz_variants=glyph.horiz_variants - local vert_variants=glyph.vert_variants - local top_accent=glyph.top_accent - if mathkerns or horiz_variants or vert_variants or top_accent then - local math={} - if top_accent then - math.top_accent=top_accent - end - if mathkerns then - for k,v in next,mathkerns do - if not next(v) then - mathkerns[k]=nil - else - for k,v in next,v do - if v==0 then - k[v]=nil - end - end - end - end - math.kerns=mathkerns - end - if horiz_variants then - math.horiz_variants,math.horiz_parts,math.horiz_italic_correction=check_variants(unicode,horiz_variants,splitter,unicodes) - end - if vert_variants then - math.vert_variants,math.vert_parts,math.vert_italic_correction=check_variants(unicode,vert_variants,splitter,unicodes) - end - local italic_correction=description.italic - if italic_correction and italic_correction~=0 then - math.italic_correction=italic_correction - end - description.math=math - end - end - end -end -actions["reorganize glyph kerns"]=function(data,filename,raw) - local descriptions=data.descriptions - local resources=data.resources - local unicodes=resources.unicodes - for unicode,description in next,descriptions do - local kerns=description.glyph.kerns - if kerns then - local newkerns={} - for k,kern in next,kerns do - local name=kern.char - local offset=kern.off - local lookup=kern.lookup - if name and offset and lookup then - local unicode=unicodes[name] - if unicode then - if type(lookup)=="table" then - for l=1,#lookup do - local lookup=lookup[l] - local lookupkerns=newkerns[lookup] - if lookupkerns then - lookupkerns[unicode]=offset - else - newkerns[lookup]={ [unicode]=offset } - end - end - else - local lookupkerns=newkerns[lookup] - if lookupkerns then - lookupkerns[unicode]=offset - else - newkerns[lookup]={ [unicode]=offset } - end - end - elseif trace_loading then - report_otf("problems with unicode %a of kern %a of glyph %U",name,k,unicode) - end - end - end - description.kerns=newkerns - end - end -end -actions["merge kern classes"]=function(data,filename,raw) - local gposlist=raw.gpos - if gposlist then - local descriptions=data.descriptions - local resources=data.resources - local unicodes=resources.unicodes - local splitter=data.helpers.tounicodetable - local ignored=0 - local blocked=0 - for gp=1,#gposlist do - local gpos=gposlist[gp] - local subtables=gpos.subtables - if subtables then - local first_done={} - local split={} - for s=1,#subtables do - local subtable=subtables[s] - local kernclass=subtable.kernclass - local lookup=subtable.lookup or subtable.name - if kernclass then - if #kernclass>0 then - kernclass=kernclass[1] - lookup=type(kernclass.lookup)=="string" and kernclass.lookup or lookup - report_otf("fixing kernclass table of lookup %a",lookup) - end - local firsts=kernclass.firsts - local seconds=kernclass.seconds - local offsets=kernclass.offsets - for n,s in next,firsts do - split[s]=split[s] or lpegmatch(splitter,s) - end - local maxseconds=0 - for n,s in next,seconds do - if n>maxseconds then - maxseconds=n - end - split[s]=split[s] or lpegmatch(splitter,s) - end - for fk=1,#firsts do - local fv=firsts[fk] - local splt=split[fv] - if splt then - local extrakerns={} - local baseoffset=(fk-1)*maxseconds - for sk=2,maxseconds do - local sv=seconds[sk] - local splt=split[sv] - if splt then - local offset=offsets[baseoffset+sk] - if offset then - for i=1,#splt do - extrakerns[splt[i]]=offset - end - end - end - end - for i=1,#splt do - local first_unicode=splt[i] - if first_done[first_unicode] then - report_otf("lookup %a: ignoring further kerns of %C",lookup,first_unicode) - blocked=blocked+1 - else - first_done[first_unicode]=true - local description=descriptions[first_unicode] - if description then - local kerns=description.kerns - if not kerns then - kerns={} - description.kerns=kerns - end - local lookupkerns=kerns[lookup] - if not lookupkerns then - lookupkerns={} - kerns[lookup]=lookupkerns - end - if overloadkerns then - for second_unicode,kern in next,extrakerns do - lookupkerns[second_unicode]=kern - end - else - for second_unicode,kern in next,extrakerns do - local k=lookupkerns[second_unicode] - if not k then - lookupkerns[second_unicode]=kern - elseif k~=kern then - if trace_loading then - report_otf("lookup %a: ignoring overload of kern between %C and %C, rejecting %a, keeping %a",lookup,first_unicode,second_unicode,k,kern) - end - ignored=ignored+1 - end - end - end - elseif trace_loading then - report_otf("no glyph data for %U",first_unicode) - end - end - end - end - end - subtable.kernclass={} - end - end - end - end - if ignored>0 then - report_otf("%s kern overloads ignored",ignored) - end - if blocked>0 then - report_otf("%s succesive kerns blocked",blocked) - end - end -end -actions["check glyphs"]=function(data,filename,raw) - for unicode,description in next,data.descriptions do - description.glyph=nil - end -end -local valid=(R("\x00\x7E")-S("(){}[]<>%/ \n\r\f\v"))^0*P(-1) -local function valid_ps_name(str) - return str and str~="" and #str<64 and lpegmatch(valid,str) and true or false -end -actions["check metadata"]=function(data,filename,raw) - local metadata=data.metadata - for _,k in next,mainfields do - if valid_fields[k] then - local v=raw[k] - if not metadata[k] then - metadata[k]=v - end - end - end - local ttftables=metadata.ttf_tables - if ttftables then - for i=1,#ttftables do - ttftables[i].data="deleted" - end - end - if metadata.validation_state and table.contains(metadata.validation_state,"bad_ps_fontname") then - local function valid(what) - local names=raw.names - for i=1,#names do - local list=names[i] - local names=list.names - if names then - local name=names[what] - if name and valid_ps_name(name) then - return name - end - end - end - end - local function check(what) - local oldname=metadata[what] - if valid_ps_name(oldname) then - report_otf("ignoring warning %a because %s %a is proper ASCII","bad_ps_fontname",what,oldname) - else - local newname=valid(what) - if not newname then - newname=formatters["bad-%s-%s"](what,file.nameonly(filename)) - end - local warning=formatters["overloading %s from invalid ASCII name %a to %a"](what,oldname,newname) - data.warnings[#data.warnings+1]=warning - report_otf(warning) - metadata[what]=newname - end - end - check("fontname") - check("fullname") - end -end -actions["cleanup tables"]=function(data,filename,raw) - local duplicates=data.resources.duplicates - if duplicates then - for k,v in next,duplicates do - if #v==1 then - duplicates[k]=v[1] - end - end - end - data.resources.indices=nil - data.resources.unicodes=nil - data.helpers=nil -end -actions["reorganize glyph lookups"]=function(data,filename,raw) - local resources=data.resources - local unicodes=resources.unicodes - local descriptions=data.descriptions - local splitter=data.helpers.tounicodelist - local lookuptypes=resources.lookuptypes - for unicode,description in next,descriptions do - local lookups=description.glyph.lookups - if lookups then - for tag,lookuplist in next,lookups do - for l=1,#lookuplist do - local lookup=lookuplist[l] - local specification=lookup.specification - local lookuptype=lookup.type - local lt=lookuptypes[tag] - if not lt then - lookuptypes[tag]=lookuptype - elseif lt~=lookuptype then - report_otf("conflicting lookuptypes, %a points to %a and %a",tag,lt,lookuptype) - end - if lookuptype=="ligature" then - lookuplist[l]={ lpegmatch(splitter,specification.components) } - elseif lookuptype=="alternate" then - lookuplist[l]={ lpegmatch(splitter,specification.components) } - elseif lookuptype=="substitution" then - lookuplist[l]=unicodes[specification.variant] - elseif lookuptype=="multiple" then - lookuplist[l]={ lpegmatch(splitter,specification.components) } - elseif lookuptype=="position" then - lookuplist[l]={ - specification.x or 0, - specification.y or 0, - specification.h or 0, - specification.v or 0 - } - elseif lookuptype=="pair" then - local one=specification.offsets[1] - local two=specification.offsets[2] - local paired=unicodes[specification.paired] - if one then - if two then - lookuplist[l]={ paired,{ one.x or 0,one.y or 0,one.h or 0,one.v or 0 },{ two.x or 0,two.y or 0,two.h or 0,two.v or 0 } } - else - lookuplist[l]={ paired,{ one.x or 0,one.y or 0,one.h or 0,one.v or 0 } } - end - else - if two then - lookuplist[l]={ paired,{},{ two.x or 0,two.y or 0,two.h or 0,two.v or 0} } - else - lookuplist[l]={ paired } - end - end - end - end - end - local slookups,mlookups - for tag,lookuplist in next,lookups do - if #lookuplist==1 then - if slookups then - slookups[tag]=lookuplist[1] - else - slookups={ [tag]=lookuplist[1] } - end - else - if mlookups then - mlookups[tag]=lookuplist - else - mlookups={ [tag]=lookuplist } - end - end - end - if slookups then - description.slookups=slookups - end - if mlookups then - description.mlookups=mlookups - end - end - end -end -actions["reorganize glyph anchors"]=function(data,filename,raw) - local descriptions=data.descriptions - for unicode,description in next,descriptions do - local anchors=description.glyph.anchors - if anchors then - for class,data in next,anchors do - if class=="baselig" then - for tag,specification in next,data do - for i=1,#specification do - local si=specification[i] - specification[i]={ si.x or 0,si.y or 0 } - end - end - else - for tag,specification in next,data do - data[tag]={ specification.x or 0,specification.y or 0 } - end - end - end - description.anchors=anchors - end - end -end -local bogusname=(P("uni")+P("u"))*R("AF","09")^4+(P("index")+P("glyph")+S("Ii")*P("dentity")*P(".")^0)*R("09")^1 -local uselessname=(1-bogusname)^0*bogusname -actions["purge names"]=function(data,filename,raw) - if purge_names then - local n=0 - for u,d in next,data.descriptions do - if lpegmatch(uselessname,d.name) then - n=n+1 - d.name=nil - end - end - if n>0 then - report_otf("%s bogus names removed",n) - end - end -end -actions["compact lookups"]=function(data,filename,raw) - if not compact_lookups then - report_otf("not compacting") - return - end - local last=0 - local tags=table.setmetatableindex({}, - function(t,k) - last=last+1 - t[k]=last - return last - end - ) - local descriptions=data.descriptions - local resources=data.resources - for u,d in next,descriptions do - local slookups=d.slookups - if type(slookups)=="table" then - local s={} - for k,v in next,slookups do - s[tags[k]]=v - end - d.slookups=s - end - local mlookups=d.mlookups - if type(mlookups)=="table" then - local m={} - for k,v in next,mlookups do - m[tags[k]]=v - end - d.mlookups=m - end - local kerns=d.kerns - if type(kerns)=="table" then - local t={} - for k,v in next,kerns do - t[tags[k]]=v - end - d.kerns=t - end - end - local lookups=data.lookups - if lookups then - local l={} - for k,v in next,lookups do - local rules=v.rules - if rules then - for i=1,#rules do - local l=rules[i].lookups - if type(l)=="table" then - for i=1,#l do - l[i]=tags[l[i]] - end - end - end - end - l[tags[k]]=v - end - data.lookups=l - end - local lookups=resources.lookups - if lookups then - local l={} - for k,v in next,lookups do - local s=v.subtables - if type(s)=="table" then - for i=1,#s do - s[i]=tags[s[i]] - end - end - l[tags[k]]=v - end - resources.lookups=l - end - local sequences=resources.sequences - if sequences then - for i=1,#sequences do - local s=sequences[i] - local n=s.name - if n then - s.name=tags[n] - end - local t=s.subtables - if type(t)=="table" then - for i=1,#t do - t[i]=tags[t[i]] - end - end - end - end - local lookuptypes=resources.lookuptypes - if lookuptypes then - local l={} - for k,v in next,lookuptypes do - l[tags[k]]=v - end - resources.lookuptypes=l - end - local anchor_to_lookup=resources.anchor_to_lookup - if anchor_to_lookup then - for anchor,lookups in next,anchor_to_lookup do - local l={} - for lookup,value in next,lookups do - l[tags[lookup]]=value - end - anchor_to_lookup[anchor]=l - end - end - local lookup_to_anchor=resources.lookup_to_anchor - if lookup_to_anchor then - local l={} - for lookup,value in next,lookup_to_anchor do - l[tags[lookup]]=value - end - resources.lookup_to_anchor=l - end - tags=table.swapped(tags) - report_otf("%s lookup tags compacted",#tags) - resources.lookuptags=tags -end -function otf.setfeatures(tfmdata,features) - local okay=constructors.initializefeatures("otf",tfmdata,features,trace_features,report_otf) - if okay then - return constructors.collectprocessors("otf",tfmdata,features,trace_features,report_otf) - else - return {} - end -end -local function copytotfm(data,cache_id) - if data then - local metadata=data.metadata - local warnings=data.warnings - local resources=data.resources - local properties=derivetable(data.properties) - local descriptions=derivetable(data.descriptions) - local goodies=derivetable(data.goodies) - local characters={} - local parameters={} - local mathparameters={} - local pfminfo=metadata.pfminfo or {} - local resources=data.resources - local unicodes=resources.unicodes - local spaceunits=500 - local spacer="space" - local designsize=metadata.designsize or metadata.design_size or 100 - local mathspecs=metadata.math - if designsize==0 then - designsize=100 - end - if mathspecs then - for name,value in next,mathspecs do - mathparameters[name]=value - end - end - for unicode,_ in next,data.descriptions do - characters[unicode]={} - end - if mathspecs then - for unicode,character in next,characters do - local d=descriptions[unicode] - local m=d.math - if m then - local variants=m.horiz_variants - local parts=m.horiz_parts - if variants then - local c=character - for i=1,#variants do - local un=variants[i] - c.next=un - c=characters[un] - end - c.horiz_variants=parts - elseif parts then - character.horiz_variants=parts - end - local variants=m.vert_variants - local parts=m.vert_parts - if variants then - local c=character - for i=1,#variants do - local un=variants[i] - c.next=un - c=characters[un] - end - c.vert_variants=parts - elseif parts then - character.vert_variants=parts - end - local italic_correction=m.vert_italic_correction - if italic_correction then - character.vert_italic_correction=italic_correction - end - local top_accent=m.top_accent - if top_accent then - character.top_accent=top_accent - end - local kerns=m.kerns - if kerns then - character.mathkerns=kerns - end - end - end - end - local filename=constructors.checkedfilename(resources) - local fontname=metadata.fontname - local fullname=metadata.fullname or fontname - local psname=fontname or fullname - local units=metadata.units_per_em or 1000 - if units==0 then - units=1000 - metadata.units_per_em=1000 - report_otf("changing %a units to %a",0,units) - end - local monospaced=metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose.proportion=="Monospaced") - local charwidth=pfminfo.avgwidth - local charxheight=pfminfo.os2_xheight and pfminfo.os2_xheight>0 and pfminfo.os2_xheight - local italicangle=metadata.italicangle - properties.monospaced=monospaced - parameters.italicangle=italicangle - parameters.charwidth=charwidth - parameters.charxheight=charxheight - local space=0x0020 - local emdash=0x2014 - if monospaced then - if descriptions[space] then - spaceunits,spacer=descriptions[space].width,"space" - end - if not spaceunits and descriptions[emdash] then - spaceunits,spacer=descriptions[emdash].width,"emdash" - end - if not spaceunits and charwidth then - spaceunits,spacer=charwidth,"charwidth" - end - else - if descriptions[space] then - spaceunits,spacer=descriptions[space].width,"space" - end - if not spaceunits and descriptions[emdash] then - spaceunits,spacer=descriptions[emdash].width/2,"emdash/2" - end - if not spaceunits and charwidth then - spaceunits,spacer=charwidth,"charwidth" - end - end - spaceunits=tonumber(spaceunits) or 500 - parameters.slant=0 - parameters.space=spaceunits - parameters.space_stretch=units/2 - parameters.space_shrink=1*units/3 - parameters.x_height=2*units/5 - parameters.quad=units - if spaceunits<2*units/5 then - end - if italicangle and italicangle~=0 then - parameters.italicangle=italicangle - parameters.italicfactor=math.cos(math.rad(90+italicangle)) - parameters.slant=- math.tan(italicangle*math.pi/180) - end - if monospaced then - parameters.space_stretch=0 - parameters.space_shrink=0 - elseif syncspace then - parameters.space_stretch=spaceunits/2 - parameters.space_shrink=spaceunits/3 - end - parameters.extra_space=parameters.space_shrink - if charxheight then - parameters.x_height=charxheight - else - local x=0x0078 - if x then - local x=descriptions[x] - if x then - parameters.x_height=x.height - end - end - end - parameters.designsize=(designsize/10)*65536 - parameters.ascender=abs(metadata.ascent or 0) - parameters.descender=abs(metadata.descent or 0) - parameters.units=units - properties.space=spacer - properties.encodingbytes=2 - properties.format=data.format or otf_format(filename) or formats.otf - properties.noglyphnames=true - properties.filename=filename - properties.fontname=fontname - properties.fullname=fullname - properties.psname=psname - properties.name=filename or fullname - if warnings and #warnings>0 then - report_otf("warnings for font: %s",filename) - report_otf() - for i=1,#warnings do - report_otf(" %s",warnings[i]) - end - report_otf() - end - return { - characters=characters, - descriptions=descriptions, - parameters=parameters, - mathparameters=mathparameters, - resources=resources, - properties=properties, - goodies=goodies, - warnings=warnings, - } - end -end -local function otftotfm(specification) - local cache_id=specification.hash - local tfmdata=containers.read(constructors.cache,cache_id) - if not tfmdata then - local name=specification.name - local sub=specification.sub - local filename=specification.filename - local features=specification.features.normal - local rawdata=otf.load(filename,sub,features and features.featurefile) - if rawdata and next(rawdata) then - local descriptions=rawdata.descriptions - local duplicates=rawdata.resources.duplicates - if duplicates then - local nofduplicates,nofduplicated=0,0 - for parent,list in next,duplicates do - if type(list)=="table" then - local n=#list - for i=1,n do - local unicode=list[i] - if not descriptions[unicode] then - descriptions[unicode]=descriptions[parent] - nofduplicated=nofduplicated+1 - end - end - nofduplicates=nofduplicates+n - else - if not descriptions[list] then - descriptions[list]=descriptions[parent] - nofduplicated=nofduplicated+1 - end - nofduplicates=nofduplicates+1 - end - end - if trace_otf and nofduplicated~=nofduplicates then - report_otf("%i extra duplicates copied out of %i",nofduplicated,nofduplicates) - end - end - rawdata.lookuphash={} - tfmdata=copytotfm(rawdata,cache_id) - if tfmdata and next(tfmdata) then - local features=constructors.checkedfeatures("otf",features) - local shared=tfmdata.shared - if not shared then - shared={} - tfmdata.shared=shared - end - shared.rawdata=rawdata - shared.dynamics={} - tfmdata.changed={} - shared.features=features - shared.processes=otf.setfeatures(tfmdata,features) - end - end - containers.write(constructors.cache,cache_id,tfmdata) - end - return tfmdata -end -local function read_from_otf(specification) - local tfmdata=otftotfm(specification) - if tfmdata then - tfmdata.properties.name=specification.name - tfmdata.properties.sub=specification.sub - tfmdata=constructors.scale(tfmdata,specification) - local allfeatures=tfmdata.shared.features or specification.features.normal - constructors.applymanipulators("otf",tfmdata,allfeatures,trace_features,report_otf) - constructors.setname(tfmdata,specification) - fonts.loggers.register(tfmdata,file.suffix(specification.filename),specification) - end - return tfmdata -end -local function checkmathsize(tfmdata,mathsize) - local mathdata=tfmdata.shared.rawdata.metadata.math - local mathsize=tonumber(mathsize) - if mathdata then - local parameters=tfmdata.parameters - parameters.scriptpercentage=mathdata.ScriptPercentScaleDown - parameters.scriptscriptpercentage=mathdata.ScriptScriptPercentScaleDown - parameters.mathsize=mathsize - end -end -registerotffeature { - name="mathsize", - description="apply mathsize specified in the font", - initializers={ - base=checkmathsize, - node=checkmathsize, - } -} -function otf.collectlookups(rawdata,kind,script,language) - local sequences=rawdata.resources.sequences - if sequences then - local featuremap,featurelist={},{} - for s=1,#sequences do - local sequence=sequences[s] - local features=sequence.features - features=features and features[kind] - features=features and (features[script] or features[default] or features[wildcard]) - features=features and (features[language] or features[default] or features[wildcard]) - if features then - local subtables=sequence.subtables - if subtables then - for s=1,#subtables do - local ss=subtables[s] - if not featuremap[s] then - featuremap[ss]=true - featurelist[#featurelist+1]=ss - end - end - end - end - end - if #featurelist>0 then - return featuremap,featurelist - end - end - return nil,nil -end -local function check_otf(forced,specification,suffix) - local name=specification.name - if forced then - name=specification.forcedname - end - local fullname=findbinfile(name,suffix) or "" - if fullname=="" then - fullname=fonts.names.getfilename(name,suffix) or "" - end - if fullname~="" and not fonts.names.ignoredfile(fullname) then - specification.filename=fullname - return read_from_otf(specification) - end -end -local function opentypereader(specification,suffix) - local forced=specification.forced or "" - if formats[forced] then - return check_otf(true,specification,forced) - else - return check_otf(false,specification,suffix) - end -end -readers.opentype=opentypereader -function readers.otf (specification) return opentypereader(specification,"otf") end -function readers.ttf (specification) return opentypereader(specification,"ttf") end -function readers.ttc (specification) return opentypereader(specification,"ttf") end -function readers.dfont(specification) return opentypereader(specification,"ttf") end -function otf.scriptandlanguage(tfmdata,attr) - local properties=tfmdata.properties - return properties.script or "dflt",properties.language or "dflt" -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-otb']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local concat=table.concat -local format,gmatch,gsub,find,match,lower,strip=string.format,string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip -local type,next,tonumber,tostring,rawget=type,next,tonumber,tostring,rawget -local lpegmatch=lpeg.match -local utfchar=utf.char -local trace_baseinit=false trackers.register("otf.baseinit",function(v) trace_baseinit=v end) -local trace_singles=false trackers.register("otf.singles",function(v) trace_singles=v end) -local trace_multiples=false trackers.register("otf.multiples",function(v) trace_multiples=v end) -local trace_alternatives=false trackers.register("otf.alternatives",function(v) trace_alternatives=v end) -local trace_ligatures=false trackers.register("otf.ligatures",function(v) trace_ligatures=v end) -local trace_ligatures_detail=false trackers.register("otf.ligatures.detail",function(v) trace_ligatures_detail=v end) -local trace_kerns=false trackers.register("otf.kerns",function(v) trace_kerns=v end) -local trace_preparing=false trackers.register("otf.preparing",function(v) trace_preparing=v end) -local report_prepare=logs.reporter("fonts","otf prepare") -local fonts=fonts -local otf=fonts.handlers.otf -local otffeatures=otf.features -local registerotffeature=otffeatures.register -otf.defaultbasealternate="none" -local wildcard="*" -local default="dflt" -local formatters=string.formatters -local f_unicode=formatters["%U"] -local f_uniname=formatters["%U (%s)"] -local f_unilist=formatters["% t (% t)"] -local function gref(descriptions,n) - if type(n)=="number" then - local name=descriptions[n].name - if name then - return f_uniname(n,name) - else - return f_unicode(n) - end - elseif n then - local num,nam,j={},{},0 - for i=1,#n do - local ni=n[i] - if tonumber(ni) then - j=j+1 - local di=descriptions[ni] - num[j]=f_unicode(ni) - nam[j]=di and di.name or "-" - end - end - return f_unilist(num,nam) - else - return "" - end -end -local function cref(feature,lookuptags,lookupname) - if lookupname then - return formatters["feature %a, lookup %a"](feature,lookuptags[lookupname]) - else - return formatters["feature %a"](feature) - end -end -local function report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,comment) - report_prepare("%s: base alternate %s => %s (%S => %S)", - cref(feature,lookuptags,lookupname), - gref(descriptions,unicode), - replacement and gref(descriptions,replacement), - value, - comment) -end -local function report_substitution(feature,lookuptags,lookupname,descriptions,unicode,substitution) - report_prepare("%s: base substitution %s => %S", - cref(feature,lookuptags,lookupname), - gref(descriptions,unicode), - gref(descriptions,substitution)) -end -local function report_ligature(feature,lookuptags,lookupname,descriptions,unicode,ligature) - report_prepare("%s: base ligature %s => %S", - cref(feature,lookuptags,lookupname), - gref(descriptions,ligature), - gref(descriptions,unicode)) -end -local function report_kern(feature,lookuptags,lookupname,descriptions,unicode,otherunicode,value) - report_prepare("%s: base kern %s + %s => %S", - cref(feature,lookuptags,lookupname), - gref(descriptions,unicode), - gref(descriptions,otherunicode), - value) -end -local basemethods={} -local basemethod="" -local function applybasemethod(what,...) - local m=basemethods[basemethod][what] - if m then - return m(...) - end -end -local basehash,basehashes,applied={},1,{} -local function registerbasehash(tfmdata) - local properties=tfmdata.properties - local hash=concat(applied," ") - local base=basehash[hash] - if not base then - basehashes=basehashes+1 - base=basehashes - basehash[hash]=base - end - properties.basehash=base - properties.fullname=properties.fullname.."-"..base - applied={} -end -local function registerbasefeature(feature,value) - applied[#applied+1]=feature.."="..tostring(value) -end -local trace=false -local function finalize_ligatures(tfmdata,ligatures) - local nofligatures=#ligatures - if nofligatures>0 then - local characters=tfmdata.characters - local descriptions=tfmdata.descriptions - local resources=tfmdata.resources - local unicodes=resources.unicodes - local private=resources.private - local alldone=false - while not alldone do - local done=0 - for i=1,nofligatures do - local ligature=ligatures[i] - if ligature then - local unicode,lookupdata=ligature[1],ligature[2] - if trace_ligatures_detail then - report_prepare("building % a into %a",lookupdata,unicode) - end - local size=#lookupdata - local firstcode=lookupdata[1] - local firstdata=characters[firstcode] - local okay=false - if firstdata then - local firstname="ctx_"..firstcode - for i=1,size-1 do - local firstdata=characters[firstcode] - if not firstdata then - firstcode=private - if trace_ligatures_detail then - report_prepare("defining %a as %a",firstname,firstcode) - end - unicodes[firstname]=firstcode - firstdata={ intermediate=true,ligatures={} } - characters[firstcode]=firstdata - descriptions[firstcode]={ name=firstname } - private=private+1 - end - local target - local secondcode=lookupdata[i+1] - local secondname=firstname.."_"..secondcode - if i==size-1 then - target=unicode - if not rawget(unicodes,secondname) then - unicodes[secondname]=unicode - end - okay=true - else - target=rawget(unicodes,secondname) - if not target then - break - end - end - if trace_ligatures_detail then - report_prepare("codes (%a,%a) + (%a,%a) -> %a",firstname,firstcode,secondname,secondcode,target) - end - local firstligs=firstdata.ligatures - if firstligs then - firstligs[secondcode]={ char=target } - else - firstdata.ligatures={ [secondcode]={ char=target } } - end - firstcode=target - firstname=secondname - end - elseif trace_ligatures_detail then - report_prepare("no glyph (%a,%a) for building %a",firstname,firstcode,target) - end - if okay then - ligatures[i]=false - done=done+1 - end - end - end - alldone=done==0 - end - if trace_ligatures_detail then - for k,v in table.sortedhash(characters) do - if v.ligatures then - table.print(v,k) - end - end - end - resources.private=private - return true - end -end -local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist) - local characters=tfmdata.characters - local descriptions=tfmdata.descriptions - local resources=tfmdata.resources - local properties=tfmdata.properties - local changed=tfmdata.changed - local lookuphash=resources.lookuphash - local lookuptypes=resources.lookuptypes - local lookuptags=resources.lookuptags - local ligatures={} - local alternate=tonumber(value) or true and 1 - local defaultalt=otf.defaultbasealternate - local trace_singles=trace_baseinit and trace_singles - local trace_alternatives=trace_baseinit and trace_alternatives - local trace_ligatures=trace_baseinit and trace_ligatures - local actions={ - substitution=function(lookupdata,lookuptags,lookupname,description,unicode) - if trace_singles then - report_substitution(feature,lookuptags,lookupname,descriptions,unicode,lookupdata) - end - changed[unicode]=lookupdata - end, - alternate=function(lookupdata,lookuptags,lookupname,description,unicode) - local replacement=lookupdata[alternate] - if replacement then - changed[unicode]=replacement - if trace_alternatives then - report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"normal") - end - elseif defaultalt=="first" then - replacement=lookupdata[1] - changed[unicode]=replacement - if trace_alternatives then - report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt) - end - elseif defaultalt=="last" then - replacement=lookupdata[#data] - if trace_alternatives then - report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt) - end - else - if trace_alternatives then - report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"unknown") - end - end - end, - ligature=function(lookupdata,lookuptags,lookupname,description,unicode) - if trace_ligatures then - report_ligature(feature,lookuptags,lookupname,descriptions,unicode,lookupdata) - end - ligatures[#ligatures+1]={ unicode,lookupdata } - end, - } - for unicode,character in next,characters do - local description=descriptions[unicode] - local lookups=description.slookups - if lookups then - for l=1,#lookuplist do - local lookupname=lookuplist[l] - local lookupdata=lookups[lookupname] - if lookupdata then - local lookuptype=lookuptypes[lookupname] - local action=actions[lookuptype] - if action then - action(lookupdata,lookuptags,lookupname,description,unicode) - end - end - end - end - local lookups=description.mlookups - if lookups then - for l=1,#lookuplist do - local lookupname=lookuplist[l] - local lookuplist=lookups[lookupname] - if lookuplist then - local lookuptype=lookuptypes[lookupname] - local action=actions[lookuptype] - if action then - for i=1,#lookuplist do - action(lookuplist[i],lookuptags,lookupname,description,unicode) - end - end - end - end - end - end - properties.hasligatures=finalize_ligatures(tfmdata,ligatures) -end -local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist) - local characters=tfmdata.characters - local descriptions=tfmdata.descriptions - local resources=tfmdata.resources - local properties=tfmdata.properties - local lookuptags=resources.lookuptags - local sharedkerns={} - local traceindeed=trace_baseinit and trace_kerns - local haskerns=false - for unicode,character in next,characters do - local description=descriptions[unicode] - local rawkerns=description.kerns - if rawkerns then - local s=sharedkerns[rawkerns] - if s==false then - elseif s then - character.kerns=s - else - local newkerns=character.kerns - local done=false - for l=1,#lookuplist do - local lookup=lookuplist[l] - local kerns=rawkerns[lookup] - if kerns then - for otherunicode,value in next,kerns do - if value==0 then - elseif not newkerns then - newkerns={ [otherunicode]=value } - done=true - if traceindeed then - report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,value) - end - elseif not newkerns[otherunicode] then - newkerns[otherunicode]=value - done=true - if traceindeed then - report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,value) - end - end - end - end - end - if done then - sharedkerns[rawkerns]=newkerns - character.kerns=newkerns - haskerns=true - else - sharedkerns[rawkerns]=false - end - end - end - end - properties.haskerns=haskerns -end -basemethods.independent={ - preparesubstitutions=preparesubstitutions, - preparepositionings=preparepositionings, -} -local function makefake(tfmdata,name,present) - local resources=tfmdata.resources - local private=resources.private - local character={ intermediate=true,ligatures={} } - resources.unicodes[name]=private - tfmdata.characters[private]=character - tfmdata.descriptions[private]={ name=name } - resources.private=private+1 - present[name]=private - return character -end -local function make_1(present,tree,name) - for k,v in next,tree do - if k=="ligature" then - present[name]=v - else - make_1(present,v,name.."_"..k) - end - end -end -local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,done,lookuptags,lookupname) - for k,v in next,tree do - if k=="ligature" then - local character=characters[preceding] - if not character then - if trace_baseinit then - report_prepare("weird ligature in lookup %a, current %C, preceding %C",lookuptags[lookupname],v,preceding) - end - character=makefake(tfmdata,name,present) - end - local ligatures=character.ligatures - if ligatures then - ligatures[unicode]={ char=v } - else - character.ligatures={ [unicode]={ char=v } } - end - if done then - local d=done[lookupname] - if not d then - done[lookupname]={ "dummy",v } - else - d[#d+1]=v - end - end - else - local code=present[name] or unicode - local name=name.."_"..k - make_2(present,tfmdata,characters,v,name,code,k,done,lookuptags,lookupname) - end - end -end -local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist) - local characters=tfmdata.characters - local descriptions=tfmdata.descriptions - local resources=tfmdata.resources - local changed=tfmdata.changed - local lookuphash=resources.lookuphash - local lookuptypes=resources.lookuptypes - local lookuptags=resources.lookuptags - local ligatures={} - local alternate=tonumber(value) or true and 1 - local defaultalt=otf.defaultbasealternate - local trace_singles=trace_baseinit and trace_singles - local trace_alternatives=trace_baseinit and trace_alternatives - local trace_ligatures=trace_baseinit and trace_ligatures - for l=1,#lookuplist do - local lookupname=lookuplist[l] - local lookupdata=lookuphash[lookupname] - local lookuptype=lookuptypes[lookupname] - for unicode,data in next,lookupdata do - if lookuptype=="substitution" then - if trace_singles then - report_substitution(feature,lookuptags,lookupname,descriptions,unicode,data) - end - changed[unicode]=data - elseif lookuptype=="alternate" then - local replacement=data[alternate] - if replacement then - changed[unicode]=replacement - if trace_alternatives then - report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"normal") - end - elseif defaultalt=="first" then - replacement=data[1] - changed[unicode]=replacement - if trace_alternatives then - report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt) - end - elseif defaultalt=="last" then - replacement=data[#data] - if trace_alternatives then - report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt) - end - else - if trace_alternatives then - report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"unknown") - end - end - elseif lookuptype=="ligature" then - ligatures[#ligatures+1]={ unicode,data,lookupname } - if trace_ligatures then - report_ligature(feature,lookuptags,lookupname,descriptions,unicode,data) - end - end - end - end - local nofligatures=#ligatures - if nofligatures>0 then - local characters=tfmdata.characters - local present={} - local done=trace_baseinit and trace_ligatures and {} - for i=1,nofligatures do - local ligature=ligatures[i] - local unicode,tree=ligature[1],ligature[2] - make_1(present,tree,"ctx_"..unicode) - end - for i=1,nofligatures do - local ligature=ligatures[i] - local unicode,tree,lookupname=ligature[1],ligature[2],ligature[3] - make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,lookuptags,lookupname) - end - end -end -local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist) - local characters=tfmdata.characters - local descriptions=tfmdata.descriptions - local resources=tfmdata.resources - local properties=tfmdata.properties - local lookuphash=resources.lookuphash - local lookuptags=resources.lookuptags - local traceindeed=trace_baseinit and trace_kerns - for l=1,#lookuplist do - local lookupname=lookuplist[l] - local lookupdata=lookuphash[lookupname] - for unicode,data in next,lookupdata do - local character=characters[unicode] - local kerns=character.kerns - if not kerns then - kerns={} - character.kerns=kerns - end - if traceindeed then - for otherunicode,kern in next,data do - if not kerns[otherunicode] and kern~=0 then - kerns[otherunicode]=kern - report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,kern) - end - end - else - for otherunicode,kern in next,data do - if not kerns[otherunicode] and kern~=0 then - kerns[otherunicode]=kern - end - end - end - end - end -end -local function initializehashes(tfmdata) - nodeinitializers.features(tfmdata) -end -basemethods.shared={ - initializehashes=initializehashes, - preparesubstitutions=preparesubstitutions, - preparepositionings=preparepositionings, -} -basemethod="independent" -local function featuresinitializer(tfmdata,value) - if true then - local starttime=trace_preparing and os.clock() - local features=tfmdata.shared.features - local fullname=tfmdata.properties.fullname or "?" - if features then - applybasemethod("initializehashes",tfmdata) - local collectlookups=otf.collectlookups - local rawdata=tfmdata.shared.rawdata - local properties=tfmdata.properties - local script=properties.script - local language=properties.language - local basesubstitutions=rawdata.resources.features.gsub - local basepositionings=rawdata.resources.features.gpos - if basesubstitutions or basepositionings then - local sequences=tfmdata.resources.sequences - for s=1,#sequences do - local sequence=sequences[s] - local sfeatures=sequence.features - if sfeatures then - local order=sequence.order - if order then - for i=1,#order do - local feature=order[i] - local value=features[feature] - if value then - local validlookups,lookuplist=collectlookups(rawdata,feature,script,language) - if not validlookups then - elseif basesubstitutions and basesubstitutions[feature] then - if trace_preparing then - report_prepare("filtering base %s feature %a for %a with value %a","sub",feature,fullname,value) - end - applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist) - registerbasefeature(feature,value) - elseif basepositionings and basepositionings[feature] then - if trace_preparing then - report_prepare("filtering base %a feature %a for %a with value %a","pos",feature,fullname,value) - end - applybasemethod("preparepositionings",tfmdata,feature,value,validlookups,lookuplist) - registerbasefeature(feature,value) - end - end - end - end - end - end - end - registerbasehash(tfmdata) - end - if trace_preparing then - report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,fullname) - end - end -end -registerotffeature { - name="features", - description="features", - default=true, - initializers={ - base=featuresinitializer, - } -} -directives.register("fonts.otf.loader.basemethod",function(v) - if basemethods[v] then - basemethod=v - end -end) - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['node-inj']={ - version=1.001, - comment="companion to node-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files", -} -local next=next -local utfchar=utf.char -local trace_injections=false trackers.register("nodes.injections",function(v) trace_injections=v end) -local report_injections=logs.reporter("nodes","injections") -local attributes,nodes,node=attributes,nodes,node -fonts=fonts -local fontdata=fonts.hashes.identifiers -nodes.injections=nodes.injections or {} -local injections=nodes.injections -local nodecodes=nodes.nodecodes -local glyph_code=nodecodes.glyph -local kern_code=nodecodes.kern -local nuts=nodes.nuts -local nodepool=nuts.pool -local newkern=nodepool.kern -local tonode=nuts.tonode -local tonut=nuts.tonut -local getfield=nuts.getfield -local getnext=nuts.getnext -local getprev=nuts.getprev -local getid=nuts.getid -local getattr=nuts.getattr -local getfont=nuts.getfont -local getsubtype=nuts.getsubtype -local getchar=nuts.getchar -local setfield=nuts.setfield -local setattr=nuts.setattr -local traverse_id=nuts.traverse_id -local insert_node_before=nuts.insert_before -local insert_node_after=nuts.insert_after -local a_kernpair=attributes.private('kernpair') -local a_ligacomp=attributes.private('ligacomp') -local a_markbase=attributes.private('markbase') -local a_markmark=attributes.private('markmark') -local a_markdone=attributes.private('markdone') -local a_cursbase=attributes.private('cursbase') -local a_curscurs=attributes.private('curscurs') -local a_cursdone=attributes.private('cursdone') -local unsetvalue=attributes.unsetvalue -function injections.installnewkern(nk) - newkern=nk or newkern -end -local cursives={} -local marks={} -local kerns={} -function injections.reset(n) -end -function injections.setligaindex(n,index) - setattr(n,a_ligacomp,index) -end -function injections.getligaindex(n,default) - return getattr(n,a_ligacomp) or default -end -function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext) - local dx,dy=factor*(exit[1]-entry[1]),factor*(exit[2]-entry[2]) - local ws,wn=tfmstart.width,tfmnext.width - local bound=#cursives+1 - setattr(start,a_cursbase,bound) - setattr(nxt,a_curscurs,bound) - cursives[bound]={ rlmode,dx,dy,ws,wn } - return dx,dy,bound -end -function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr) - local x,y,w,h=factor*spec[1],factor*spec[2],factor*spec[3],factor*spec[4] - if x~=0 or w~=0 or y~=0 or h~=0 then - local bound=getattr(current,a_kernpair) - if bound then - local kb=kerns[bound] - kb[2],kb[3],kb[4],kb[5]=(kb[2] or 0)+x,(kb[3] or 0)+y,(kb[4] or 0)+w,(kb[5] or 0)+h - else - bound=#kerns+1 - setattr(current,a_kernpair,bound) - kerns[bound]={ rlmode,x,y,w,h,r2lflag,tfmchr.width } - end - return x,y,w,h,bound - end - return x,y,w,h -end -function injections.setkern(current,factor,rlmode,x,tfmchr) - local dx=factor*x - if dx~=0 then - local bound=#kerns+1 - setattr(current,a_kernpair,bound) - kerns[bound]={ rlmode,dx } - return dx,bound - else - return 0,0 - end -end -function injections.setmark(start,base,factor,rlmode,ba,ma) - local dx,dy=factor*(ba[1]-ma[1]),factor*(ba[2]-ma[2]) - local bound=getattr(base,a_markbase) - local index=1 - if bound then - local mb=marks[bound] - if mb then - index=#mb+1 - mb[index]={ dx,dy,rlmode } - setattr(start,a_markmark,bound) - setattr(start,a_markdone,index) - return dx,dy,bound - else - report_injections("possible problem, %U is base mark without data (id %a)",getchar(base),bound) - end - end - index=index or 1 - bound=#marks+1 - setattr(base,a_markbase,bound) - setattr(start,a_markmark,bound) - setattr(start,a_markdone,index) - marks[bound]={ [index]={ dx,dy,rlmode } } - return dx,dy,bound -end -local function dir(n) - return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset" -end -local function trace(head) - report_injections("begin run") - for n in traverse_id(glyph_code,head) do - if getsubtype(n)<256 then - local kp=getattr(n,a_kernpair) - local mb=getattr(n,a_markbase) - local mm=getattr(n,a_markmark) - local md=getattr(n,a_markdone) - local cb=getattr(n,a_cursbase) - local cc=getattr(n,a_curscurs) - local char=getchar(n) - report_injections("font %s, char %U, glyph %c",getfont(n),char,char) - if kp then - local k=kerns[kp] - if k[3] then - report_injections(" pairkern: dir %a, x %p, y %p, w %p, h %p",dir(k[1]),k[2],k[3],k[4],k[5]) - else - report_injections(" kern: dir %a, dx %p",dir(k[1]),k[2]) - end - end - if mb then - report_injections(" markbase: bound %a",mb) - end - if mm then - local m=marks[mm] - if mb then - local m=m[mb] - if m then - report_injections(" markmark: bound %a, index %a, dx %p, dy %p",mm,md,m[1],m[2]) - else - report_injections(" markmark: bound %a, missing index",mm) - end - else - m=m[1] - report_injections(" markmark: bound %a, dx %p, dy %p",mm,m and m[1],m and m[2]) - end - end - if cb then - report_injections(" cursbase: bound %a",cb) - end - if cc then - local c=cursives[cc] - report_injections(" curscurs: bound %a, dir %a, dx %p, dy %p",cc,dir(c[1]),c[2],c[3]) - end - end - end - report_injections("end run") -end -local function show_result(head) - local current=head - local skipping=false - while current do - local id=getid(current) - if id==glyph_code then - report_injections("char: %C, width %p, xoffset %p, yoffset %p", - getchar(current),getfield(current,"width"),getfield(current,"xoffset"),getfield(current,"yoffset")) - skipping=false - elseif id==kern_code then - report_injections("kern: %p",getfield(current,"kern")) - skipping=false - elseif not skipping then - report_injections() - skipping=true - end - current=getnext(current) - end -end -function injections.handler(head,where,keep) - head=tonut(head) - local has_marks,has_cursives,has_kerns=next(marks),next(cursives),next(kerns) - if has_marks or has_cursives then - if trace_injections then - trace(head) - end - local done,ky,rl,valid,cx,wx,mk,nofvalid=false,{},{},{},{},{},{},0 - if has_kerns then - local nf,tm=nil,nil - for n in traverse_id(glyph_code,head) do - if getsubtype(n)<256 then - nofvalid=nofvalid+1 - valid[nofvalid]=n - local f=getfont(n) - if f~=nf then - nf=f - tm=fontdata[nf].resources.marks - end - if tm then - mk[n]=tm[getchar(n)] - end - local k=getattr(n,a_kernpair) - if k then - local kk=kerns[k] - if kk then - local x,y,w,h=kk[2] or 0,kk[3] or 0,kk[4] or 0,kk[5] or 0 - local dy=y-h - if dy~=0 then - ky[n]=dy - end - if w~=0 or x~=0 then - wx[n]=kk - end - rl[n]=kk[1] - end - end - end - end - else - local nf,tm=nil,nil - for n in traverse_id(glyph_code,head) do - if getsubtype(n)<256 then - nofvalid=nofvalid+1 - valid[nofvalid]=n - local f=getfont(n) - if f~=nf then - nf=f - tm=fontdata[nf].resources.marks - end - if tm then - mk[n]=tm[getchar(n)] - end - end - end - end - if nofvalid>0 then - local cx={} - if has_kerns and next(ky) then - for n,k in next,ky do - setfield(n,"yoffset",k) - end - end - if has_cursives then - local p_cursbase,p=nil,nil - local t,d,maxt={},{},0 - for i=1,nofvalid do - local n=valid[i] - if not mk[n] then - local n_cursbase=getattr(n,a_cursbase) - if p_cursbase then - local n_curscurs=getattr(n,a_curscurs) - if p_cursbase==n_curscurs then - local c=cursives[n_curscurs] - if c then - local rlmode,dx,dy,ws,wn=c[1],c[2],c[3],c[4],c[5] - if rlmode>=0 then - dx=dx-ws - else - dx=dx+wn - end - if dx~=0 then - cx[n]=dx - rl[n]=rlmode - end - dy=-dy - maxt=maxt+1 - t[maxt]=p - d[maxt]=dy - else - maxt=0 - end - end - elseif maxt>0 then - local ny=getfield(n,"yoffset") - for i=maxt,1,-1 do - ny=ny+d[i] - local ti=t[i] - setfield(ti,"yoffset",getfield(ti,"yoffset")+ny) - end - maxt=0 - end - if not n_cursbase and maxt>0 then - local ny=getfield(n,"yoffset") - for i=maxt,1,-1 do - ny=ny+d[i] - local ti=t[i] - setfield(ti,"yoffset",ny) - end - maxt=0 - end - p_cursbase,p=n_cursbase,n - end - end - if maxt>0 then - local ny=getfield(n,"yoffset") - for i=maxt,1,-1 do - ny=ny+d[i] - local ti=t[i] - setfield(ti,"yoffset",ny) - end - maxt=0 - end - if not keep then - cursives={} - end - end - if has_marks then - for i=1,nofvalid do - local p=valid[i] - local p_markbase=getattr(p,a_markbase) - if p_markbase then - local mrks=marks[p_markbase] - local nofmarks=#mrks - for n in traverse_id(glyph_code,getnext(p)) do - local n_markmark=getattr(n,a_markmark) - if p_markbase==n_markmark then - local index=getattr(n,a_markdone) or 1 - local d=mrks[index] - if d then - local rlmode=d[3] - local k=wx[p] - local px=getfield(p,"xoffset") - local ox=0 - if k then - local x=k[2] - local w=k[4] - if w then - if rlmode and rlmode>=0 then - ox=px-getfield(p,"width")+d[1]-(w-x) - else - ox=px-d[1]-x - end - else - if rlmode and rlmode>=0 then - ox=px-getfield(p,"width")+d[1] - else - ox=px-d[1]-x - end - end - else - local wp=getfield(p,"width") - local wn=getfield(n,"width") - if rlmode and rlmode>=0 then - ox=px-wp+d[1] - else - ox=px-d[1] - end - if wn~=0 then - insert_node_before(head,n,newkern(-wn/2)) - insert_node_after(head,n,newkern(-wn/2)) - end - end - setfield(n,"xoffset",ox) - local py=getfield(p,"yoffset") - local oy=0 - if mk[p] then - oy=py+d[2] - else - oy=getfield(n,"yoffset")+py+d[2] - end - setfield(n,"yoffset",oy) - if nofmarks==1 then - break - else - nofmarks=nofmarks-1 - end - end - elseif not n_markmark then - break - else - end - end - end - end - if not keep then - marks={} - end - end - if next(wx) then - for n,k in next,wx do - local x=k[2] - local w=k[4] - if w then - local rl=k[1] - local wx=w-x - if rl<0 then - if wx~=0 then - insert_node_before(head,n,newkern(wx)) - end - if x~=0 then - insert_node_after (head,n,newkern(x)) - end - else - if x~=0 then - insert_node_before(head,n,newkern(x)) - end - if wx~=0 then - insert_node_after (head,n,newkern(wx)) - end - end - elseif x~=0 then - insert_node_before(head,n,newkern(x)) - end - end - end - if next(cx) then - for n,k in next,cx do - if k~=0 then - local rln=rl[n] - if rln and rln<0 then - insert_node_before(head,n,newkern(-k)) - else - insert_node_before(head,n,newkern(k)) - end - end - end - end - if not keep then - kerns={} - end - return tonode(head),true - elseif not keep then - kerns,cursives,marks={},{},{} - end - elseif has_kerns then - if trace_injections then - trace(head) - end - for n in traverse_id(glyph_code,head) do - if getsubtype(n)<256 then - local k=getattr(n,a_kernpair) - if k then - local kk=kerns[k] - if kk then - local rl,x,y,w=kk[1],kk[2] or 0,kk[3],kk[4] - if y and y~=0 then - setfield(n,"yoffset",y) - end - if w then - local wx=w-x - if rl<0 then - if wx~=0 then - insert_node_before(head,n,newkern(wx)) - end - if x~=0 then - insert_node_after (head,n,newkern(x)) - end - else - if x~=0 then - insert_node_before(head,n,newkern(x)) - end - if wx~=0 then - insert_node_after(head,n,newkern(wx)) - end - end - else - if x~=0 then - insert_node_before(head,n,newkern(x)) - end - end - end - end - end - end - if not keep then - kerns={} - end - return tonode(head),true - else - end - return tonode(head),false -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-otx']={ - version=1.001, - comment="companion to font-otf.lua (analysing)", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local type=type -if not trackers then trackers={ register=function() end } end -local fonts,nodes,node=fonts,nodes,node -local allocate=utilities.storage.allocate -local otf=fonts.handlers.otf -local analyzers=fonts.analyzers -local initializers=allocate() -local methods=allocate() -analyzers.initializers=initializers -analyzers.methods=methods -analyzers.useunicodemarks=false -local a_state=attributes.private('state') -local nuts=nodes.nuts -local tonut=nuts.tonut -local getfield=nuts.getfield -local getnext=nuts.getnext -local getprev=nuts.getprev -local getid=nuts.getid -local getprop=nuts.getprop -local setprop=nuts.setprop -local getfont=nuts.getfont -local getsubtype=nuts.getsubtype -local getchar=nuts.getchar -local traverse_id=nuts.traverse_id -local traverse_node_list=nuts.traverse -local end_of_math=nuts.end_of_math -local nodecodes=nodes.nodecodes -local glyph_code=nodecodes.glyph -local disc_code=nodecodes.disc -local math_code=nodecodes.math -local fontdata=fonts.hashes.identifiers -local categories=characters and characters.categories or {} -local otffeatures=fonts.constructors.newfeatures("otf") -local registerotffeature=otffeatures.register -local s_init=1 local s_rphf=7 -local s_medi=2 local s_half=8 -local s_fina=3 local s_pref=9 -local s_isol=4 local s_blwf=10 -local s_mark=5 local s_pstf=11 -local s_rest=6 -local states={ - init=s_init, - medi=s_medi, - fina=s_fina, - isol=s_isol, - mark=s_mark, - rest=s_rest, - rphf=s_rphf, - half=s_half, - pref=s_pref, - blwf=s_blwf, - pstf=s_pstf, -} -local features={ - init=s_init, - medi=s_medi, - fina=s_fina, - isol=s_isol, - rphf=s_rphf, - half=s_half, - pref=s_pref, - blwf=s_blwf, - pstf=s_pstf, -} -analyzers.states=states -analyzers.features=features -function analyzers.setstate(head,font) - local useunicodemarks=analyzers.useunicodemarks - local tfmdata=fontdata[font] - local descriptions=tfmdata.descriptions - local first,last,current,n,done=nil,nil,head,0,false - current=tonut(current) - while current do - local id=getid(current) - if id==glyph_code and getfont(current)==font then - done=true - local char=getchar(current) - local d=descriptions[char] - if d then - if d.class=="mark" or (useunicodemarks and categories[char]=="mn") then - done=true - setprop(current,a_state,s_mark) - elseif n==0 then - first,last,n=current,current,1 - setprop(current,a_state,s_init) - else - last,n=current,n+1 - setprop(current,a_state,s_medi) - end - else - if first and first==last then - setprop(last,a_state,s_isol) - elseif last then - setprop(last,a_state,s_fina) - end - first,last,n=nil,nil,0 - end - elseif id==disc_code then - setprop(current,a_state,s_medi) - last=current - else - if first and first==last then - setprop(last,a_state,s_isol) - elseif last then - setprop(last,a_state,s_fina) - end - first,last,n=nil,nil,0 - if id==math_code then - current=end_of_math(current) - end - end - current=getnext(current) - end - if first and first==last then - setprop(last,a_state,s_isol) - elseif last then - setprop(last,a_state,s_fina) - end - return head,done -end -local function analyzeinitializer(tfmdata,value) - local script,language=otf.scriptandlanguage(tfmdata) - local action=initializers[script] - if not action then - elseif type(action)=="function" then - return action(tfmdata,value) - else - local action=action[language] - if action then - return action(tfmdata,value) - end - end -end -local function analyzeprocessor(head,font,attr) - local tfmdata=fontdata[font] - local script,language=otf.scriptandlanguage(tfmdata,attr) - local action=methods[script] - if not action then - elseif type(action)=="function" then - return action(head,font,attr) - else - action=action[language] - if action then - return action(head,font,attr) - end - end - return head,false -end -registerotffeature { - name="analyze", - description="analysis of character classes", - default=true, - initializers={ - node=analyzeinitializer, - }, - processors={ - position=1, - node=analyzeprocessor, - } -} -methods.latn=analyzers.setstate -local tatweel=0x0640 -local zwnj=0x200C -local zwj=0x200D -local isolated={ - [0x0600]=true,[0x0601]=true,[0x0602]=true,[0x0603]=true, - [0x0604]=true, - [0x0608]=true,[0x060B]=true,[0x0621]=true,[0x0674]=true, - [0x06DD]=true, - [0x0856]=true,[0x0858]=true,[0x0857]=true, - [0x07FA]=true, - [zwnj]=true, - [0x08AD]=true, -} -local final={ - [0x0622]=true,[0x0623]=true,[0x0624]=true,[0x0625]=true, - [0x0627]=true,[0x0629]=true,[0x062F]=true,[0x0630]=true, - [0x0631]=true,[0x0632]=true,[0x0648]=true,[0x0671]=true, - [0x0672]=true,[0x0673]=true,[0x0675]=true,[0x0676]=true, - [0x0677]=true,[0x0688]=true,[0x0689]=true,[0x068A]=true, - [0x068B]=true,[0x068C]=true,[0x068D]=true,[0x068E]=true, - [0x068F]=true,[0x0690]=true,[0x0691]=true,[0x0692]=true, - [0x0693]=true,[0x0694]=true,[0x0695]=true,[0x0696]=true, - [0x0697]=true,[0x0698]=true,[0x0699]=true,[0x06C0]=true, - [0x06C3]=true,[0x06C4]=true,[0x06C5]=true,[0x06C6]=true, - [0x06C7]=true,[0x06C8]=true,[0x06C9]=true,[0x06CA]=true, - [0x06CB]=true,[0x06CD]=true,[0x06CF]=true,[0x06D2]=true, - [0x06D3]=true,[0x06D5]=true,[0x06EE]=true,[0x06EF]=true, - [0x0759]=true,[0x075A]=true,[0x075B]=true,[0x076B]=true, - [0x076C]=true,[0x0771]=true,[0x0773]=true,[0x0774]=true, - [0x0778]=true,[0x0779]=true, - [0x08AA]=true,[0x08AB]=true,[0x08AC]=true, - [0xFEF5]=true,[0xFEF7]=true,[0xFEF9]=true,[0xFEFB]=true, - [0x0710]=true,[0x0715]=true,[0x0716]=true,[0x0717]=true, - [0x0718]=true,[0x0719]=true,[0x0728]=true,[0x072A]=true, - [0x072C]=true,[0x071E]=true, - [0x072F]=true,[0x074D]=true, - [0x0840]=true,[0x0849]=true,[0x0854]=true,[0x0846]=true, - [0x084F]=true, - [0x08AE]=true,[0x08B1]=true,[0x08B2]=true, -} -local medial={ - [0x0626]=true,[0x0628]=true,[0x062A]=true,[0x062B]=true, - [0x062C]=true,[0x062D]=true,[0x062E]=true,[0x0633]=true, - [0x0634]=true,[0x0635]=true,[0x0636]=true,[0x0637]=true, - [0x0638]=true,[0x0639]=true,[0x063A]=true,[0x063B]=true, - [0x063C]=true,[0x063D]=true,[0x063E]=true,[0x063F]=true, - [0x0641]=true,[0x0642]=true,[0x0643]=true, - [0x0644]=true,[0x0645]=true,[0x0646]=true,[0x0647]=true, - [0x0649]=true,[0x064A]=true,[0x066E]=true,[0x066F]=true, - [0x0678]=true,[0x0679]=true,[0x067A]=true,[0x067B]=true, - [0x067C]=true,[0x067D]=true,[0x067E]=true,[0x067F]=true, - [0x0680]=true,[0x0681]=true,[0x0682]=true,[0x0683]=true, - [0x0684]=true,[0x0685]=true,[0x0686]=true,[0x0687]=true, - [0x069A]=true,[0x069B]=true,[0x069C]=true,[0x069D]=true, - [0x069E]=true,[0x069F]=true,[0x06A0]=true,[0x06A1]=true, - [0x06A2]=true,[0x06A3]=true,[0x06A4]=true,[0x06A5]=true, - [0x06A6]=true,[0x06A7]=true,[0x06A8]=true,[0x06A9]=true, - [0x06AA]=true,[0x06AB]=true,[0x06AC]=true,[0x06AD]=true, - [0x06AE]=true,[0x06AF]=true,[0x06B0]=true,[0x06B1]=true, - [0x06B2]=true,[0x06B3]=true,[0x06B4]=true,[0x06B5]=true, - [0x06B6]=true,[0x06B7]=true,[0x06B8]=true,[0x06B9]=true, - [0x06BA]=true,[0x06BB]=true,[0x06BC]=true,[0x06BD]=true, - [0x06BE]=true,[0x06BF]=true,[0x06C1]=true,[0x06C2]=true, - [0x06CC]=true,[0x06CE]=true,[0x06D0]=true,[0x06D1]=true, - [0x06FA]=true,[0x06FB]=true,[0x06FC]=true,[0x06FF]=true, - [0x0750]=true,[0x0751]=true,[0x0752]=true,[0x0753]=true, - [0x0754]=true,[0x0755]=true,[0x0756]=true,[0x0757]=true, - [0x0758]=true,[0x075C]=true,[0x075D]=true,[0x075E]=true, - [0x075F]=true,[0x0760]=true,[0x0761]=true,[0x0762]=true, - [0x0763]=true,[0x0764]=true,[0x0765]=true,[0x0766]=true, - [0x0767]=true,[0x0768]=true,[0x0769]=true,[0x076A]=true, - [0x076D]=true,[0x076E]=true,[0x076F]=true,[0x0770]=true, - [0x0772]=true,[0x0775]=true,[0x0776]=true,[0x0777]=true, - [0x077A]=true,[0x077B]=true,[0x077C]=true,[0x077D]=true, - [0x077E]=true,[0x077F]=true, - [0x08A0]=true,[0x08A2]=true,[0x08A4]=true,[0x08A5]=true, - [0x08A6]=true,[0x0620]=true,[0x08A8]=true,[0x08A9]=true, - [0x08A7]=true,[0x08A3]=true, - [0x0712]=true,[0x0713]=true,[0x0714]=true,[0x071A]=true, - [0x071B]=true,[0x071C]=true,[0x071D]=true,[0x071F]=true, - [0x0720]=true,[0x0721]=true,[0x0722]=true,[0x0723]=true, - [0x0724]=true,[0x0725]=true,[0x0726]=true,[0x0727]=true, - [0x0729]=true,[0x072B]=true,[0x072D]=true,[0x072E]=true, - [0x074E]=true,[0x074F]=true, - [0x0841]=true,[0x0842]=true,[0x0843]=true,[0x0844]=true, - [0x0845]=true,[0x0847]=true,[0x0848]=true,[0x0855]=true, - [0x0851]=true,[0x084E]=true,[0x084D]=true,[0x084A]=true, - [0x084B]=true,[0x084C]=true,[0x0850]=true,[0x0852]=true, - [0x0853]=true, - [0x07D7]=true,[0x07E8]=true,[0x07D9]=true,[0x07EA]=true, - [0x07CA]=true,[0x07DB]=true,[0x07CC]=true,[0x07DD]=true, - [0x07CE]=true,[0x07DF]=true,[0x07D4]=true,[0x07E5]=true, - [0x07E9]=true,[0x07E7]=true,[0x07E3]=true,[0x07E2]=true, - [0x07E0]=true,[0x07E1]=true,[0x07DE]=true,[0x07DC]=true, - [0x07D1]=true,[0x07DA]=true,[0x07D8]=true,[0x07D6]=true, - [0x07D2]=true,[0x07D0]=true,[0x07CF]=true,[0x07CD]=true, - [0x07CB]=true,[0x07D3]=true,[0x07E4]=true,[0x07D5]=true, - [0x07E6]=true, - [tatweel]=true,[zwj]=true, - [0x08A1]=true,[0x08AF]=true,[0x08B0]=true, -} -local arab_warned={} -local function warning(current,what) - local char=getchar(current) - if not arab_warned[char] then - log.report("analyze","arab: character %C has no %a class",char,what) - arab_warned[char]=true - end -end -local function finish(first,last) - if last then - if first==last then - local fc=getchar(first) - if medial[fc] or final[fc] then - setprop(first,a_state,s_isol) - else - warning(first,"isol") - setprop(first,a_state,s_error) - end - else - local lc=getchar(last) - if medial[lc] or final[lc] then - setprop(last,a_state,s_fina) - else - warning(last,"fina") - setprop(last,a_state,s_error) - end - end - first,last=nil,nil - elseif first then - local fc=getchar(first) - if medial[fc] or final[fc] then - setprop(first,a_state,s_isol) - else - warning(first,"isol") - setprop(first,a_state,s_error) - end - first=nil - end - return first,last -end -function methods.arab(head,font,attr) - local useunicodemarks=analyzers.useunicodemarks - local tfmdata=fontdata[font] - local marks=tfmdata.resources.marks - local first,last,current,done=nil,nil,head,false - current=tonut(current) - while current do - local id=getid(current) - if id==glyph_code and getfont(current)==font and getsubtype(current)<256 and not getprop(current,a_state) then - done=true - local char=getchar(current) - if marks[char] or (useunicodemarks and categories[char]=="mn") then - setprop(current,a_state,s_mark) - elseif isolated[char] then - first,last=finish(first,last) - setprop(current,a_state,s_isol) - first,last=nil,nil - elseif not first then - if medial[char] then - setprop(current,a_state,s_init) - first,last=first or current,current - elseif final[char] then - setprop(current,a_state,s_isol) - first,last=nil,nil - else - first,last=finish(first,last) - end - elseif medial[char] then - first,last=first or current,current - setprop(current,a_state,s_medi) - elseif final[char] then - if getprop(last,a_state)~=s_init then - setprop(last,a_state,s_medi) - end - setprop(current,a_state,s_fina) - first,last=nil,nil - elseif char>=0x0600 and char<=0x06FF then - setprop(current,a_state,s_rest) - first,last=finish(first,last) - else - first,last=finish(first,last) - end - else - if first or last then - first,last=finish(first,last) - end - if id==math_code then - current=end_of_math(current) - end - end - current=getnext(current) - end - if first or last then - finish(first,last) - end - return head,done -end -methods.syrc=methods.arab -methods.mand=methods.arab -methods.nko=methods.arab -directives.register("otf.analyze.useunicodemarks",function(v) - analyzers.useunicodemarks=v -end) - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-otn']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files", -} -local concat,insert,remove=table.concat,table.insert,table.remove -local gmatch,gsub,find,match,lower,strip=string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip -local type,next,tonumber,tostring=type,next,tonumber,tostring -local lpegmatch=lpeg.match -local random=math.random -local formatters=string.formatters -local logs,trackers,nodes,attributes=logs,trackers,nodes,attributes -local registertracker=trackers.register -local fonts=fonts -local otf=fonts.handlers.otf -local trace_lookups=false registertracker("otf.lookups",function(v) trace_lookups=v end) -local trace_singles=false registertracker("otf.singles",function(v) trace_singles=v end) -local trace_multiples=false registertracker("otf.multiples",function(v) trace_multiples=v end) -local trace_alternatives=false registertracker("otf.alternatives",function(v) trace_alternatives=v end) -local trace_ligatures=false registertracker("otf.ligatures",function(v) trace_ligatures=v end) -local trace_contexts=false registertracker("otf.contexts",function(v) trace_contexts=v end) -local trace_marks=false registertracker("otf.marks",function(v) trace_marks=v end) -local trace_kerns=false registertracker("otf.kerns",function(v) trace_kerns=v end) -local trace_cursive=false registertracker("otf.cursive",function(v) trace_cursive=v end) -local trace_preparing=false registertracker("otf.preparing",function(v) trace_preparing=v end) -local trace_bugs=false registertracker("otf.bugs",function(v) trace_bugs=v end) -local trace_details=false registertracker("otf.details",function(v) trace_details=v end) -local trace_applied=false registertracker("otf.applied",function(v) trace_applied=v end) -local trace_steps=false registertracker("otf.steps",function(v) trace_steps=v end) -local trace_skips=false registertracker("otf.skips",function(v) trace_skips=v end) -local trace_directions=false registertracker("otf.directions",function(v) trace_directions=v end) -local report_direct=logs.reporter("fonts","otf direct") -local report_subchain=logs.reporter("fonts","otf subchain") -local report_chain=logs.reporter("fonts","otf chain") -local report_process=logs.reporter("fonts","otf process") -local report_prepare=logs.reporter("fonts","otf prepare") -local report_warning=logs.reporter("fonts","otf warning") -registertracker("otf.verbose_chain",function(v) otf.setcontextchain(v and "verbose") end) -registertracker("otf.normal_chain",function(v) otf.setcontextchain(v and "normal") end) -registertracker("otf.replacements","otf.singles,otf.multiples,otf.alternatives,otf.ligatures") -registertracker("otf.positions","otf.marks,otf.kerns,otf.cursive") -registertracker("otf.actions","otf.replacements,otf.positions") -registertracker("otf.injections","nodes.injections") -registertracker("*otf.sample","otf.steps,otf.actions,otf.analyzing") -local nuts=nodes.nuts -local tonode=nuts.tonode -local tonut=nuts.tonut -local getfield=nuts.getfield -local setfield=nuts.setfield -local getnext=nuts.getnext -local getprev=nuts.getprev -local getid=nuts.getid -local getattr=nuts.getattr -local setattr=nuts.setattr -local getprop=nuts.getprop -local setprop=nuts.setprop -local getfont=nuts.getfont -local getsubtype=nuts.getsubtype -local getchar=nuts.getchar -local insert_node_after=nuts.insert_after -local delete_node=nuts.delete -local copy_node=nuts.copy -local find_node_tail=nuts.tail -local flush_node_list=nuts.flush_list -local end_of_math=nuts.end_of_math -local setmetatableindex=table.setmetatableindex -local zwnj=0x200C -local zwj=0x200D -local wildcard="*" -local default="dflt" -local nodecodes=nodes.nodecodes -local whatcodes=nodes.whatcodes -local glyphcodes=nodes.glyphcodes -local disccodes=nodes.disccodes -local glyph_code=nodecodes.glyph -local glue_code=nodecodes.glue -local disc_code=nodecodes.disc -local whatsit_code=nodecodes.whatsit -local math_code=nodecodes.math -local dir_code=whatcodes.dir -local localpar_code=whatcodes.localpar -local discretionary_code=disccodes.discretionary -local ligature_code=glyphcodes.ligature -local privateattribute=attributes.private -local a_state=privateattribute('state') -local a_cursbase=privateattribute('cursbase') -local injections=nodes.injections -local setmark=injections.setmark -local setcursive=injections.setcursive -local setkern=injections.setkern -local setpair=injections.setpair -local resetinjection=injections.reset -local setligaindex=injections.setligaindex -local getligaindex=injections.getligaindex -local cursonce=true -local fonthashes=fonts.hashes -local fontdata=fonthashes.identifiers -local otffeatures=fonts.constructors.newfeatures("otf") -local registerotffeature=otffeatures.register -local onetimemessage=fonts.loggers.onetimemessage or function() end -otf.defaultnodealternate="none" -local tfmdata=false -local characters=false -local descriptions=false -local resources=false -local marks=false -local currentfont=false -local lookuptable=false -local anchorlookups=false -local lookuptypes=false -local lookuptags=false -local handlers={} -local rlmode=0 -local featurevalue=false -local checkstep=(nodes and nodes.tracers and nodes.tracers.steppers.check) or function() end -local registerstep=(nodes and nodes.tracers and nodes.tracers.steppers.register) or function() end -local registermessage=(nodes and nodes.tracers and nodes.tracers.steppers.message) or function() end -local function logprocess(...) - if trace_steps then - registermessage(...) - end - report_direct(...) -end -local function logwarning(...) - report_direct(...) -end -local f_unicode=formatters["%U"] -local f_uniname=formatters["%U (%s)"] -local f_unilist=formatters["% t (% t)"] -local function gref(n) - if type(n)=="number" then - local description=descriptions[n] - local name=description and description.name - if name then - return f_uniname(n,name) - else - return f_unicode(n) - end - elseif n then - local num,nam={},{} - for i=1,#n do - local ni=n[i] - if tonumber(ni) then - local di=descriptions[ni] - num[i]=f_unicode(ni) - nam[i]=di and di.name or "-" - end - end - return f_unilist(num,nam) - else - return "" - end -end -local function cref(kind,chainname,chainlookupname,lookupname,index) - if index then - return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookuptags[lookupname],index) - elseif lookupname then - return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookuptags[lookupname]) - elseif chainlookupname then - return formatters["feature %a, chain %a, sub %a"](kind,lookuptags[chainname],lookuptags[chainlookupname]) - elseif chainname then - return formatters["feature %a, chain %a"](kind,lookuptags[chainname]) - else - return formatters["feature %a"](kind) - end -end -local function pref(kind,lookupname) - return formatters["feature %a, lookup %a"](kind,lookuptags[lookupname]) -end -local function copy_glyph(g) - local components=getfield(g,"components") - if components then - setfield(g,"components",nil) - local n=copy_node(g) - setfield(g,"components",components) - return n - else - return copy_node(g) - end -end -local function markstoligature(kind,lookupname,head,start,stop,char) - if start==stop and getchar(start)==char then - return head,start - else - local prev=getprev(start) - local next=getnext(stop) - setfield(start,"prev",nil) - setfield(stop,"next",nil) - local base=copy_glyph(start) - if head==start then - head=base - end - resetinjection(base) - setfield(base,"char",char) - setfield(base,"subtype",ligature_code) - setfield(base,"components",start) - if prev then - setfield(prev,"next",base) - end - if next then - setfield(next,"prev",base) - end - setfield(base,"next",next) - setfield(base,"prev",prev) - return head,base - end -end -local function getcomponentindex(start) - if getid(start)~=glyph_code then - return 0 - elseif getsubtype(start)==ligature_code then - local i=0 - local components=getfield(start,"components") - while components do - i=i+getcomponentindex(components) - components=getnext(components) - end - return i - elseif not marks[getchar(start)] then - return 1 - else - return 0 - end -end -local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound) - if start==stop and getchar(start)==char then - resetinjection(start) - setfield(start,"char",char) - return head,start - end - local prev=getprev(start) - local next=getnext(stop) - setfield(start,"prev",nil) - setfield(stop,"next",nil) - local base=copy_glyph(start) - if start==head then - head=base - end - resetinjection(base) - setfield(base,"char",char) - setfield(base,"subtype",ligature_code) - setfield(base,"components",start) - if prev then - setfield(prev,"next",base) - end - if next then - setfield(next,"prev",base) - end - setfield(base,"next",next) - setfield(base,"prev",prev) - if not discfound then - local deletemarks=markflag~="mark" - local components=start - local baseindex=0 - local componentindex=0 - local head=base - local current=base - while start do - local char=getchar(start) - if not marks[char] then - baseindex=baseindex+componentindex - componentindex=getcomponentindex(start) - elseif not deletemarks then - setligaindex(start,baseindex+getligaindex(start,componentindex)) - if trace_marks then - logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),getligaindex(start)) - end - head,current=insert_node_after(head,current,copy_node(start)) - elseif trace_marks then - logwarning("%s: delete mark %s",pref(kind,lookupname),gref(char)) - end - start=getnext(start) - end - local start=getnext(current) - while start and getid(start)==glyph_code do - local char=getchar(start) - if marks[char] then - setligaindex(start,baseindex+getligaindex(start,componentindex)) - if trace_marks then - logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),getligaindex(start)) - end - else - break - end - start=getnext(start) - end - end - return head,base -end -function handlers.gsub_single(head,start,kind,lookupname,replacement) - if trace_singles then - logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(getchar(start)),gref(replacement)) - end - resetinjection(start) - setfield(start,"char",replacement) - return head,start,true -end -local function get_alternative_glyph(start,alternatives,value,trace_alternatives) - local n=#alternatives - if value=="random" then - local r=random(1,n) - return alternatives[r],trace_alternatives and formatters["value %a, taking %a"](value,r) - elseif value=="first" then - return alternatives[1],trace_alternatives and formatters["value %a, taking %a"](value,1) - elseif value=="last" then - return alternatives[n],trace_alternatives and formatters["value %a, taking %a"](value,n) - else - value=tonumber(value) - if type(value)~="number" then - return alternatives[1],trace_alternatives and formatters["invalid value %s, taking %a"](value,1) - elseif value>n then - local defaultalt=otf.defaultnodealternate - if defaultalt=="first" then - return alternatives[n],trace_alternatives and formatters["invalid value %s, taking %a"](value,1) - elseif defaultalt=="last" then - return alternatives[1],trace_alternatives and formatters["invalid value %s, taking %a"](value,n) - else - return false,trace_alternatives and formatters["invalid value %a, %s"](value,"out of range") - end - elseif value==0 then - return getchar(start),trace_alternatives and formatters["invalid value %a, %s"](value,"no change") - elseif value<1 then - return alternatives[1],trace_alternatives and formatters["invalid value %a, taking %a"](value,1) - else - return alternatives[value],trace_alternatives and formatters["value %a, taking %a"](value,value) - end - end -end -local function multiple_glyphs(head,start,multiple,ignoremarks) - local nofmultiples=#multiple - if nofmultiples>0 then - resetinjection(start) - setfield(start,"char",multiple[1]) - if nofmultiples>1 then - local sn=getnext(start) - for k=2,nofmultiples do - local n=copy_node(start) - resetinjection(n) - setfield(n,"char",multiple[k]) - setfield(n,"next",sn) - setfield(n,"prev",start) - if sn then - setfield(sn,"prev",n) - end - setfield(start,"next",n) - start=n - end - end - return head,start,true - else - if trace_multiples then - logprocess("no multiple for %s",gref(getchar(start))) - end - return head,start,false - end -end -function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence) - local value=featurevalue==true and tfmdata.shared.features[kind] or featurevalue - local choice,comment=get_alternative_glyph(start,alternative,value,trace_alternatives) - if choice then - if trace_alternatives then - logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(getchar(start)),choice,gref(choice),comment) - end - resetinjection(start) - setfield(start,"char",choice) - else - if trace_alternatives then - logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(getchar(start)),comment) - end - end - return head,start,true -end -function handlers.gsub_multiple(head,start,kind,lookupname,multiple,sequence) - if trace_multiples then - logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(getchar(start)),gref(multiple)) - end - return multiple_glyphs(head,start,multiple,sequence.flags[1]) -end -function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence) - local s,stop,discfound=getnext(start),nil,false - local startchar=getchar(start) - if marks[startchar] then - while s do - local id=getid(s) - if id==glyph_code and getfont(s)==currentfont and getsubtype(s)<256 then - local lg=ligature[getchar(s)] - if lg then - stop=s - ligature=lg - s=getnext(s) - else - break - end - else - break - end - end - if stop then - local lig=ligature.ligature - if lig then - if trace_ligatures then - local stopchar=getchar(stop) - head,start=markstoligature(kind,lookupname,head,start,stop,lig) - logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start))) - else - head,start=markstoligature(kind,lookupname,head,start,stop,lig) - end - return head,start,true - else - end - end - else - local skipmark=sequence.flags[1] - while s do - local id=getid(s) - if id==glyph_code and getsubtype(s)<256 then - if getfont(s)==currentfont then - local char=getchar(s) - if skipmark and marks[char] then - s=getnext(s) - else - local lg=ligature[char] - if lg then - stop=s - ligature=lg - s=getnext(s) - else - break - end - end - else - break - end - elseif id==disc_code then - discfound=true - s=getnext(s) - else - break - end - end - local lig=ligature.ligature - if lig then - if stop then - if trace_ligatures then - local stopchar=getchar(stop) - head,start=toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound) - logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start))) - else - head,start=toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound) - end - else - resetinjection(start) - setfield(start,"char",lig) - if trace_ligatures then - logprocess("%s: replacing %s by (no real) ligature %s case 3",pref(kind,lookupname),gref(startchar),gref(lig)) - end - end - return head,start,true - else - end - end - return head,start,false -end -function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence) - local markchar=getchar(start) - if marks[markchar] then - local base=getprev(start) - if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then - local basechar=getchar(base) - if marks[basechar] then - while true do - base=getprev(base) - if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then - basechar=getchar(base) - if not marks[basechar] then - break - end - else - if trace_bugs then - logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar)) - end - return head,start,false - end - end - end - local baseanchors=descriptions[basechar] - if baseanchors then - baseanchors=baseanchors.anchors - end - if baseanchors then - local baseanchors=baseanchors['basechar'] - if baseanchors then - local al=anchorlookups[lookupname] - for anchor,ba in next,baseanchors do - if al[anchor] then - local ma=markanchors[anchor] - if ma then - local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) - if trace_marks then - logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)", - pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) - end - return head,start,true - end - end - end - if trace_bugs then - logwarning("%s, no matching anchors for mark %s and base %s",pref(kind,lookupname),gref(markchar),gref(basechar)) - end - end - elseif trace_bugs then - onetimemessage(currentfont,basechar,"no base anchors",report_fonts) - end - elseif trace_bugs then - logwarning("%s: prev node is no char",pref(kind,lookupname)) - end - elseif trace_bugs then - logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar)) - end - return head,start,false -end -function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence) - local markchar=getchar(start) - if marks[markchar] then - local base=getprev(start) - if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then - local basechar=getchar(base) - if marks[basechar] then - while true do - base=getprev(base) - if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then - basechar=getchar(base) - if not marks[basechar] then - break - end - else - if trace_bugs then - logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar)) - end - return head,start,false - end - end - end - local index=getligaindex(start) - local baseanchors=descriptions[basechar] - if baseanchors then - baseanchors=baseanchors.anchors - if baseanchors then - local baseanchors=baseanchors['baselig'] - if baseanchors then - local al=anchorlookups[lookupname] - for anchor,ba in next,baseanchors do - if al[anchor] then - local ma=markanchors[anchor] - if ma then - ba=ba[index] - if ba then - local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) - if trace_marks then - logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)", - pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy) - end - return head,start,true - else - if trace_bugs then - logwarning("%s: no matching anchors for mark %s and baselig %s with index %a",pref(kind,lookupname),gref(markchar),gref(basechar),index) - end - end - end - end - end - if trace_bugs then - logwarning("%s: no matching anchors for mark %s and baselig %s",pref(kind,lookupname),gref(markchar),gref(basechar)) - end - end - end - elseif trace_bugs then - onetimemessage(currentfont,basechar,"no base anchors",report_fonts) - end - elseif trace_bugs then - logwarning("%s: prev node is no char",pref(kind,lookupname)) - end - elseif trace_bugs then - logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar)) - end - return head,start,false -end -function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence) - local markchar=getchar(start) - if marks[markchar] then - local base=getprev(start) - local slc=getligaindex(start) - if slc then - while base do - local blc=getligaindex(base) - if blc and blc~=slc then - base=getprev(base) - else - break - end - end - end - if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then - local basechar=getchar(base) - local baseanchors=descriptions[basechar] - if baseanchors then - baseanchors=baseanchors.anchors - if baseanchors then - baseanchors=baseanchors['basemark'] - if baseanchors then - local al=anchorlookups[lookupname] - for anchor,ba in next,baseanchors do - if al[anchor] then - local ma=markanchors[anchor] - if ma then - local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) - if trace_marks then - logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)", - pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) - end - return head,start,true - end - end - end - if trace_bugs then - logwarning("%s: no matching anchors for mark %s and basemark %s",pref(kind,lookupname),gref(markchar),gref(basechar)) - end - end - end - elseif trace_bugs then - onetimemessage(currentfont,basechar,"no base anchors",report_fonts) - end - elseif trace_bugs then - logwarning("%s: prev node is no mark",pref(kind,lookupname)) - end - elseif trace_bugs then - logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar)) - end - return head,start,false -end -function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence) - local alreadydone=cursonce and getprop(start,a_cursbase) - if not alreadydone then - local done=false - local startchar=getchar(start) - if marks[startchar] then - if trace_cursive then - logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar)) - end - else - local nxt=getnext(start) - while not done and nxt and getid(nxt)==glyph_code and getfont(nxt)==currentfont and getsubtype(nxt)<256 do - local nextchar=getchar(nxt) - if marks[nextchar] then - nxt=getnext(nxt) - else - local entryanchors=descriptions[nextchar] - if entryanchors then - entryanchors=entryanchors.anchors - if entryanchors then - entryanchors=entryanchors['centry'] - if entryanchors then - local al=anchorlookups[lookupname] - for anchor,entry in next,entryanchors do - if al[anchor] then - local exit=exitanchors[anchor] - if exit then - local dx,dy,bound=setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar]) - if trace_cursive then - logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode) - end - done=true - break - end - end - end - end - end - elseif trace_bugs then - onetimemessage(currentfont,startchar,"no entry anchors",report_fonts) - end - break - end - end - end - return head,start,done - else - if trace_cursive and trace_details then - logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone) - end - return head,start,false - end -end -function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence) - local startchar=getchar(start) - local dx,dy,w,h=setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar]) - if trace_kerns then - logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h) - end - return head,start,false -end -function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence) - local snext=getnext(start) - if not snext then - return head,start,false - else - local prev,done=start,false - local factor=tfmdata.parameters.factor - local lookuptype=lookuptypes[lookupname] - while snext and getid(snext)==glyph_code and getfont(snext)==currentfont and getsubtype(snext)<256 do - local nextchar=getchar(snext) - local krn=kerns[nextchar] - if not krn and marks[nextchar] then - prev=snext - snext=getnext(snext) - else - if not krn then - elseif type(krn)=="table" then - if lookuptype=="pair" then - local a,b=krn[2],krn[3] - if a and #a>0 then - local startchar=getchar(start) - local x,y,w,h=setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar]) - if trace_kerns then - logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h) - end - end - if b and #b>0 then - local startchar=getchar(start) - local x,y,w,h=setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar]) - if trace_kerns then - logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h) - end - end - else - report_process("%s: check this out (old kern stuff)",pref(kind,lookupname)) - end - done=true - elseif krn~=0 then - local k=setkern(snext,factor,rlmode,krn) - if trace_kerns then - logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(getchar(prev)),gref(nextchar)) - end - done=true - end - break - end - end - return head,start,done - end -end -local chainmores={} -local chainprocs={} -local function logprocess(...) - if trace_steps then - registermessage(...) - end - report_subchain(...) -end -local logwarning=report_subchain -local function logprocess(...) - if trace_steps then - registermessage(...) - end - report_chain(...) -end -local logwarning=report_chain -function chainprocs.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname) - logwarning("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname)) - return head,start,false -end -function chainmores.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname,n) - logprocess("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname)) - return head,start,false -end -function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements) - local char=getchar(start) - local replacement=replacements[char] - if replacement then - if trace_singles then - logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement)) - end - resetinjection(start) - setfield(start,"char",replacement) - return head,start,true - else - return head,start,false - end -end -function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex) - local current=start - local subtables=currentlookup.subtables - if #subtables>1 then - logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," ")) - end - while current do - if getid(current)==glyph_code then - local currentchar=getchar(current) - local lookupname=subtables[1] - local replacement=lookuphash[lookupname] - if not replacement then - if trace_bugs then - logwarning("%s: no single hits",cref(kind,chainname,chainlookupname,lookupname,chainindex)) - end - else - replacement=replacement[currentchar] - if not replacement or replacement=="" then - if trace_bugs then - logwarning("%s: no single for %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar)) - end - else - if trace_singles then - logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement)) - end - resetinjection(current) - setfield(current,"char",replacement) - end - end - return head,start,true - elseif current==stop then - break - else - current=getnext(current) - end - end - return head,start,false -end -chainmores.gsub_single=chainprocs.gsub_single -function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - local startchar=getchar(start) - local subtables=currentlookup.subtables - local lookupname=subtables[1] - local replacements=lookuphash[lookupname] - if not replacements then - if trace_bugs then - logwarning("%s: no multiple hits",cref(kind,chainname,chainlookupname,lookupname)) - end - else - replacements=replacements[startchar] - if not replacements or replacement=="" then - if trace_bugs then - logwarning("%s: no multiple for %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar)) - end - else - if trace_multiples then - logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements)) - end - return multiple_glyphs(head,start,replacements,currentlookup.flags[1]) - end - end - return head,start,false -end -chainmores.gsub_multiple=chainprocs.gsub_multiple -function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - local current=start - local subtables=currentlookup.subtables - local value=featurevalue==true and tfmdata.shared.features[kind] or featurevalue - while current do - if getid(current)==glyph_code then - local currentchar=getchar(current) - local lookupname=subtables[1] - local alternatives=lookuphash[lookupname] - if not alternatives then - if trace_bugs then - logwarning("%s: no alternative hit",cref(kind,chainname,chainlookupname,lookupname)) - end - else - alternatives=alternatives[currentchar] - if alternatives then - local choice,comment=get_alternative_glyph(current,alternatives,value,trace_alternatives) - if choice then - if trace_alternatives then - logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment) - end - resetinjection(start) - setfield(start,"char",choice) - else - if trace_alternatives then - logwarning("%s: no variant %a for %s, %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(char),comment) - end - end - elseif trace_bugs then - logwarning("%s: no alternative for %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(currentchar),comment) - end - end - return head,start,true - elseif current==stop then - break - else - current=getnext(current) - end - end - return head,start,false -end -chainmores.gsub_alternate=chainprocs.gsub_alternate -function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex) - local startchar=getchar(start) - local subtables=currentlookup.subtables - local lookupname=subtables[1] - local ligatures=lookuphash[lookupname] - if not ligatures then - if trace_bugs then - logwarning("%s: no ligature hits",cref(kind,chainname,chainlookupname,lookupname,chainindex)) - end - else - ligatures=ligatures[startchar] - if not ligatures then - if trace_bugs then - logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar)) - end - else - local s=getnext(start) - local discfound=false - local last=stop - local nofreplacements=0 - local skipmark=currentlookup.flags[1] - while s do - local id=getid(s) - if id==disc_code then - s=getnext(s) - discfound=true - else - local schar=getchar(s) - if skipmark and marks[schar] then - s=getnext(s) - else - local lg=ligatures[schar] - if lg then - ligatures,last,nofreplacements=lg,s,nofreplacements+1 - if s==stop then - break - else - s=getnext(s) - end - else - break - end - end - end - end - local l2=ligatures.ligature - if l2 then - if chainindex then - stop=last - end - if trace_ligatures then - if start==stop then - logprocess("%s: replacing character %s by ligature %s case 3",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2)) - else - logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop)),gref(l2)) - end - end - head,start=toligature(kind,lookupname,head,start,stop,l2,currentlookup.flags[1],discfound) - return head,start,true,nofreplacements - elseif trace_bugs then - if start==stop then - logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar)) - else - logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop))) - end - end - end - end - return head,start,false,0 -end -chainmores.gsub_ligature=chainprocs.gsub_ligature -function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - local markchar=getchar(start) - if marks[markchar] then - local subtables=currentlookup.subtables - local lookupname=subtables[1] - local markanchors=lookuphash[lookupname] - if markanchors then - markanchors=markanchors[markchar] - end - if markanchors then - local base=getprev(start) - if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then - local basechar=getchar(base) - if marks[basechar] then - while true do - base=getprev(base) - if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then - basechar=getchar(base) - if not marks[basechar] then - break - end - else - if trace_bugs then - logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar)) - end - return head,start,false - end - end - end - local baseanchors=descriptions[basechar].anchors - if baseanchors then - local baseanchors=baseanchors['basechar'] - if baseanchors then - local al=anchorlookups[lookupname] - for anchor,ba in next,baseanchors do - if al[anchor] then - local ma=markanchors[anchor] - if ma then - local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) - if trace_marks then - logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)", - cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) - end - return head,start,true - end - end - end - if trace_bugs then - logwarning("%s, no matching anchors for mark %s and base %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar)) - end - end - end - elseif trace_bugs then - logwarning("%s: prev node is no char",cref(kind,chainname,chainlookupname,lookupname)) - end - elseif trace_bugs then - logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar)) - end - elseif trace_bugs then - logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar)) - end - return head,start,false -end -function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - local markchar=getchar(start) - if marks[markchar] then - local subtables=currentlookup.subtables - local lookupname=subtables[1] - local markanchors=lookuphash[lookupname] - if markanchors then - markanchors=markanchors[markchar] - end - if markanchors then - local base=getprev(start) - if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then - local basechar=getchar(base) - if marks[basechar] then - while true do - base=getprev(base) - if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then - basechar=getchar(base) - if not marks[basechar] then - break - end - else - if trace_bugs then - logwarning("%s: no base for mark %s",cref(kind,chainname,chainlookupname,lookupname),markchar) - end - return head,start,false - end - end - end - local index=getligaindex(start) - local baseanchors=descriptions[basechar].anchors - if baseanchors then - local baseanchors=baseanchors['baselig'] - if baseanchors then - local al=anchorlookups[lookupname] - for anchor,ba in next,baseanchors do - if al[anchor] then - local ma=markanchors[anchor] - if ma then - ba=ba[index] - if ba then - local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) - if trace_marks then - logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)", - cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy) - end - return head,start,true - end - end - end - end - if trace_bugs then - logwarning("%s: no matching anchors for mark %s and baselig %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar)) - end - end - end - elseif trace_bugs then - logwarning("feature %s, lookup %s: prev node is no char",kind,lookupname) - end - elseif trace_bugs then - logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar)) - end - elseif trace_bugs then - logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar)) - end - return head,start,false -end -function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - local markchar=getchar(start) - if marks[markchar] then - local subtables=currentlookup.subtables - local lookupname=subtables[1] - local markanchors=lookuphash[lookupname] - if markanchors then - markanchors=markanchors[markchar] - end - if markanchors then - local base=getprev(start) - local slc=getligaindex(start) - if slc then - while base do - local blc=getligaindex(base) - if blc and blc~=slc then - base=getprev(base) - else - break - end - end - end - if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then - local basechar=getchar(base) - local baseanchors=descriptions[basechar].anchors - if baseanchors then - baseanchors=baseanchors['basemark'] - if baseanchors then - local al=anchorlookups[lookupname] - for anchor,ba in next,baseanchors do - if al[anchor] then - local ma=markanchors[anchor] - if ma then - local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) - if trace_marks then - logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)", - cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) - end - return head,start,true - end - end - end - if trace_bugs then - logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar)) - end - end - end - elseif trace_bugs then - logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname)) - end - elseif trace_bugs then - logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar)) - end - elseif trace_bugs then - logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar)) - end - return head,start,false -end -function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - local alreadydone=cursonce and getprop(start,a_cursbase) - if not alreadydone then - local startchar=getchar(start) - local subtables=currentlookup.subtables - local lookupname=subtables[1] - local exitanchors=lookuphash[lookupname] - if exitanchors then - exitanchors=exitanchors[startchar] - end - if exitanchors then - local done=false - if marks[startchar] then - if trace_cursive then - logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar)) - end - else - local nxt=getnext(start) - while not done and nxt and getid(nxt)==glyph_code and getfont(nxt)==currentfont and getsubtype(nxt)<256 do - local nextchar=getchar(nxt) - if marks[nextchar] then - nxt=getnext(nxt) - else - local entryanchors=descriptions[nextchar] - if entryanchors then - entryanchors=entryanchors.anchors - if entryanchors then - entryanchors=entryanchors['centry'] - if entryanchors then - local al=anchorlookups[lookupname] - for anchor,entry in next,entryanchors do - if al[anchor] then - local exit=exitanchors[anchor] - if exit then - local dx,dy,bound=setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar]) - if trace_cursive then - logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode) - end - done=true - break - end - end - end - end - end - elseif trace_bugs then - onetimemessage(currentfont,startchar,"no entry anchors",report_fonts) - end - break - end - end - end - return head,start,done - else - if trace_cursive and trace_details then - logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone) - end - return head,start,false - end - end - return head,start,false -end -function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence) - local startchar=getchar(start) - local subtables=currentlookup.subtables - local lookupname=subtables[1] - local kerns=lookuphash[lookupname] - if kerns then - kerns=kerns[startchar] - if kerns then - local dx,dy,w,h=setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar]) - if trace_kerns then - logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h) - end - end - end - return head,start,false -end -chainmores.gpos_single=chainprocs.gpos_single -function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence) - local snext=getnext(start) - if snext then - local startchar=getchar(start) - local subtables=currentlookup.subtables - local lookupname=subtables[1] - local kerns=lookuphash[lookupname] - if kerns then - kerns=kerns[startchar] - if kerns then - local lookuptype=lookuptypes[lookupname] - local prev,done=start,false - local factor=tfmdata.parameters.factor - while snext and getid(snext)==glyph_code and getfont(snext)==currentfont and getsubtype(snext)<256 do - local nextchar=getchar(snext) - local krn=kerns[nextchar] - if not krn and marks[nextchar] then - prev=snext - snext=getnext(snext) - else - if not krn then - elseif type(krn)=="table" then - if lookuptype=="pair" then - local a,b=krn[2],krn[3] - if a and #a>0 then - local startchar=getchar(start) - local x,y,w,h=setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar]) - if trace_kerns then - logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h) - end - end - if b and #b>0 then - local startchar=getchar(start) - local x,y,w,h=setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar]) - if trace_kerns then - logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h) - end - end - else - report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname)) - local a,b=krn[2],krn[6] - if a and a~=0 then - local k=setkern(snext,factor,rlmode,a) - if trace_kerns then - logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar)) - end - end - if b and b~=0 then - logwarning("%s: ignoring second kern xoff %s",cref(kind,chainname,chainlookupname),b*factor) - end - end - done=true - elseif krn~=0 then - local k=setkern(snext,factor,rlmode,krn) - if trace_kerns then - logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar)) - end - done=true - end - break - end - end - return head,start,done - end - end - end - return head,start,false -end -chainmores.gpos_pair=chainprocs.gpos_pair -local function show_skip(kind,chainname,char,ck,class) - if ck[9] then - logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a, %a => %a",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10]) - else - logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a",cref(kind,chainname),gref(char),class,ck[1],ck[2]) - end -end -local quit_on_no_replacement=true -directives.register("otf.chain.quitonnoreplacement",function(value) - quit_on_no_replacement=value -end) -local function normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash) - local flags=sequence.flags - local done=false - local skipmark=flags[1] - local skipligature=flags[2] - local skipbase=flags[3] - local someskip=skipmark or skipligature or skipbase - local markclass=sequence.markclass - local skipped=false - for k=1,#contexts do - local match=true - local current=start - local last=start - local ck=contexts[k] - local seq=ck[3] - local s=#seq - if s==1 then - match=getid(current)==glyph_code and getfont(current)==currentfont and getsubtype(current)<256 and seq[1][getchar(current)] - else - local f,l=ck[4],ck[5] - if f==1 and f==l then - else - if f==l then - else - local n=f+1 - last=getnext(last) - while n<=l do - if last then - local id=getid(last) - if id==glyph_code then - if getfont(last)==currentfont and getsubtype(last)<256 then - local char=getchar(last) - local ccd=descriptions[char] - if ccd then - local class=ccd.class - if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then - skipped=true - if trace_skips then - show_skip(kind,chainname,char,ck,class) - end - last=getnext(last) - elseif seq[n][char] then - if n1 then - local prev=getprev(start) - if prev then - local n=f-1 - while n>=1 do - if prev then - local id=getid(prev) - if id==glyph_code then - if getfont(prev)==currentfont and getsubtype(prev)<256 then - local char=getchar(prev) - local ccd=descriptions[char] - if ccd then - local class=ccd.class - if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then - skipped=true - if trace_skips then - show_skip(kind,chainname,char,ck,class) - end - elseif seq[n][char] then - n=n -1 - else - match=false - break - end - else - match=false - break - end - else - match=false - break - end - elseif id==disc_code then - elseif seq[n][32] then - n=n -1 - else - match=false - break - end - prev=getprev(prev) - elseif seq[n][32] then - n=n -1 - else - match=false - break - end - end - elseif f==2 then - match=seq[1][32] - else - for n=f-1,1 do - if not seq[n][32] then - match=false - break - end - end - end - end - if match and s>l then - local current=last and getnext(last) - if current then - local n=l+1 - while n<=s do - if current then - local id=getid(current) - if id==glyph_code then - if getfont(current)==currentfont and getsubtype(current)<256 then - local char=getchar(current) - local ccd=descriptions[char] - if ccd then - local class=ccd.class - if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then - skipped=true - if trace_skips then - show_skip(kind,chainname,char,ck,class) - end - elseif seq[n][char] then - n=n+1 - else - match=false - break - end - else - match=false - break - end - else - match=false - break - end - elseif id==disc_code then - elseif seq[n][32] then - n=n+1 - else - match=false - break - end - current=getnext(current) - elseif seq[n][32] then - n=n+1 - else - match=false - break - end - end - elseif s-l==1 then - match=seq[s][32] - else - for n=l+1,s do - if not seq[n][32] then - match=false - break - end - end - end - end - end - if match then - if trace_contexts then - local rule,lookuptype,f,l=ck[1],ck[2],ck[4],ck[5] - local char=getchar(start) - if ck[9] then - logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a, %a => %a", - cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10]) - else - logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a", - cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype) - end - end - local chainlookups=ck[6] - if chainlookups then - local nofchainlookups=#chainlookups - if nofchainlookups==1 then - local chainlookupname=chainlookups[1] - local chainlookup=lookuptable[chainlookupname] - if chainlookup then - local cp=chainprocs[chainlookup.type] - if cp then - local ok - head,start,ok=cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence) - if ok then - done=true - end - else - logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type) - end - else - logprocess("%s is not yet supported",cref(kind,chainname,chainlookupname)) - end - else - local i=1 - while true do - if skipped then - while true do - local char=getchar(start) - local ccd=descriptions[char] - if ccd then - local class=ccd.class - if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then - start=getnext(start) - else - break - end - else - break - end - end - end - local chainlookupname=chainlookups[i] - local chainlookup=lookuptable[chainlookupname] - if not chainlookup then - i=i+1 - else - local cp=chainmores[chainlookup.type] - if not cp then - logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type) - i=i+1 - else - local ok,n - head,start,ok,n=cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence) - if ok then - done=true - i=i+(n or 1) - else - i=i+1 - end - end - end - if i>nofchainlookups then - break - elseif start then - start=getnext(start) - else - end - end - end - else - local replacements=ck[7] - if replacements then - head,start,done=chainprocs.reversesub(head,start,last,kind,chainname,ck,lookuphash,replacements) - else - done=quit_on_no_replacement - if trace_contexts then - logprocess("%s: skipping match",cref(kind,chainname)) - end - end - end - end - end - return head,start,done -end -local verbose_handle_contextchain=function(font,...) - logwarning("no verbose handler installed, reverting to 'normal'") - otf.setcontextchain() - return normal_handle_contextchain(...) -end -otf.chainhandlers={ - normal=normal_handle_contextchain, - verbose=verbose_handle_contextchain, -} -function otf.setcontextchain(method) - if not method or method=="normal" or not otf.chainhandlers[method] then - if handlers.contextchain then - logwarning("installing normal contextchain handler") - end - handlers.contextchain=normal_handle_contextchain - else - logwarning("installing contextchain handler %a",method) - local handler=otf.chainhandlers[method] - handlers.contextchain=function(...) - return handler(currentfont,...) - end - end - handlers.gsub_context=handlers.contextchain - handlers.gsub_contextchain=handlers.contextchain - handlers.gsub_reversecontextchain=handlers.contextchain - handlers.gpos_contextchain=handlers.contextchain - handlers.gpos_context=handlers.contextchain -end -otf.setcontextchain() -local missing={} -local function logprocess(...) - if trace_steps then - registermessage(...) - end - report_process(...) -end -local logwarning=report_process -local function report_missing_cache(typ,lookup) - local f=missing[currentfont] if not f then f={} missing[currentfont]=f end - local t=f[typ] if not t then t={} f[typ]=t end - if not t[lookup] then - t[lookup]=true - logwarning("missing cache for lookup %a, type %a, font %a, name %a",lookup,typ,currentfont,tfmdata.properties.fullname) - end -end -local resolved={} -local lookuphashes={} -setmetatableindex(lookuphashes,function(t,font) - local lookuphash=fontdata[font].resources.lookuphash - if not lookuphash or not next(lookuphash) then - lookuphash=false - end - t[font]=lookuphash - return lookuphash -end) -local autofeatures=fonts.analyzers.features -local function initialize(sequence,script,language,enabled) - local features=sequence.features - if features then - local order=sequence.order - if order then - for i=1,#order do - local kind=order[i] - local valid=enabled[kind] - if valid then - local scripts=features[kind] - local languages=scripts[script] or scripts[wildcard] - if languages and (languages[language] or languages[wildcard]) then - return { valid,autofeatures[kind] or false,sequence.chain or 0,kind,sequence } - end - end - end - else - end - end - return false -end -function otf.dataset(tfmdata,font) - local shared=tfmdata.shared - local properties=tfmdata.properties - local language=properties.language or "dflt" - local script=properties.script or "dflt" - local enabled=shared.features - local res=resolved[font] - if not res then - res={} - resolved[font]=res - end - local rs=res[script] - if not rs then - rs={} - res[script]=rs - end - local rl=rs[language] - if not rl then - rl={ - } - rs[language]=rl - local sequences=tfmdata.resources.sequences - for s=1,#sequences do - local v=enabled and initialize(sequences[s],script,language,enabled) - if v then - rl[#rl+1]=v - end - end - end - return rl -end -local function featuresprocessor(head,font,attr) - local lookuphash=lookuphashes[font] - if not lookuphash then - return head,false - end - head=tonut(head) - if trace_steps then - checkstep(head) - end - tfmdata=fontdata[font] - descriptions=tfmdata.descriptions - characters=tfmdata.characters - resources=tfmdata.resources - marks=resources.marks - anchorlookups=resources.lookup_to_anchor - lookuptable=resources.lookups - lookuptypes=resources.lookuptypes - lookuptags=resources.lookuptags - currentfont=font - rlmode=0 - local sequences=resources.sequences - local done=false - local datasets=otf.dataset(tfmdata,font,attr) - local dirstack={} - for s=1,#datasets do - local dataset=datasets[s] - featurevalue=dataset[1] - local sequence=dataset[5] - local rlparmode=0 - local topstack=0 - local success=false - local attribute=dataset[2] - local chain=dataset[3] - local typ=sequence.type - local subtables=sequence.subtables - if chain<0 then - local handler=handlers[typ] - local start=find_node_tail(head) - while start do - local id=getid(start) - if id==glyph_code then - if getfont(start)==font and getsubtype(start)<256 then - local a=getattr(start,0) - if a then - a=a==attr - else - a=true - end - if a then - for i=1,#subtables do - local lookupname=subtables[i] - local lookupcache=lookuphash[lookupname] - if lookupcache then - local lookupmatch=lookupcache[getchar(start)] - if lookupmatch then - head,start,success=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) - if success then - break - end - end - else - report_missing_cache(typ,lookupname) - end - end - if start then start=getprev(start) end - else - start=getprev(start) - end - else - start=getprev(start) - end - else - start=getprev(start) - end - end - else - local handler=handlers[typ] - local ns=#subtables - local start=head - rlmode=0 - if ns==1 then - local lookupname=subtables[1] - local lookupcache=lookuphash[lookupname] - if not lookupcache then - report_missing_cache(typ,lookupname) - else - local function subrun(start) - local head=start - local done=false - while start do - local id=getid(start) - if id==glyph_code and getfont(start)==font and getsubtype(start)<256 then - local a=getattr(start,0) - if a then - a=(a==attr) and (not attribute or getprop(start,a_state)==attribute) - else - a=not attribute or getprop(start,a_state)==attribute - end - if a then - local lookupmatch=lookupcache[getchar(start)] - if lookupmatch then - local ok - head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) - if ok then - done=true - end - end - if start then start=getnext(start) end - else - start=getnext(start) - end - else - start=getnext(start) - end - end - if done then - success=true - return head - end - end - local function kerndisc(disc) - local prev=getprev(disc) - local next=getnext(disc) - if prev and next then - setfield(prev,"next",next) - local a=getattr(prev,0) - if a then - a=(a==attr) and (not attribute or getprop(prev,a_state)==attribute) - else - a=not attribute or getprop(prev,a_state)==attribute - end - if a then - local lookupmatch=lookupcache[getchar(prev)] - if lookupmatch then - local h,d,ok=handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) - if ok then - done=true - success=true - end - end - end - setfield(prev,"next",disc) - end - return next - end - while start do - local id=getid(start) - if id==glyph_code then - if getfont(start)==font and getsubtype(start)<256 then - local a=getattr(start,0) - if a then - a=(a==attr) and (not attribute or getprop(start,a_state)==attribute) - else - a=not attribute or getprop(start,a_state)==attribute - end - if a then - local lookupmatch=lookupcache[getchar(start)] - if lookupmatch then - local ok - head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) - if ok then - success=true - end - end - if start then start=getnext(start) end - else - start=getnext(start) - end - else - start=getnext(start) - end - elseif id==disc_code then - if getsubtype(start)==discretionary_code then - local pre=getfield(start,"pre") - if pre then - local new=subrun(pre) - if new then setfield(start,"pre",new) end - end - local post=getfield(start,"post") - if post then - local new=subrun(post) - if new then setfield(start,"post",new) end - end - local replace=getfield(start,"replace") - if replace then - local new=subrun(replace) - if new then setfield(start,"replace",new) end - end -elseif typ=="gpos_single" or typ=="gpos_pair" then - kerndisc(start) - end - start=getnext(start) - elseif id==whatsit_code then - local subtype=getsubtype(start) - if subtype==dir_code then - local dir=getfield(start,"dir") - if dir=="+TRT" or dir=="+TLT" then - topstack=topstack+1 - dirstack[topstack]=dir - elseif dir=="-TRT" or dir=="-TLT" then - topstack=topstack-1 - end - local newdir=dirstack[topstack] - if newdir=="+TRT" then - rlmode=-1 - elseif newdir=="+TLT" then - rlmode=1 - else - rlmode=rlparmode - end - if trace_directions then - report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir) - end - elseif subtype==localpar_code then - local dir=getfield(start,"dir") - if dir=="TRT" then - rlparmode=-1 - elseif dir=="TLT" then - rlparmode=1 - else - rlparmode=0 - end - rlmode=rlparmode - if trace_directions then - report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode) - end - end - start=getnext(start) - elseif id==math_code then - start=getnext(end_of_math(start)) - else - start=getnext(start) - end - end - end - else - local function subrun(start) - local head=start - local done=false - while start do - local id=getid(start) - if id==glyph_code and getfont(start)==font and getsubtype(start)<256 then - local a=getattr(start,0) - if a then - a=(a==attr) and (not attribute or getprop(start,a_state)==attribute) - else - a=not attribute or getprop(start,a_state)==attribute - end - if a then - for i=1,ns do - local lookupname=subtables[i] - local lookupcache=lookuphash[lookupname] - if lookupcache then - local lookupmatch=lookupcache[getchar(start)] - if lookupmatch then - local ok - head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) - if ok then - done=true - break - elseif not start then - break - end - end - else - report_missing_cache(typ,lookupname) - end - end - if start then start=getnext(start) end - else - start=getnext(start) - end - else - start=getnext(start) - end - end - if done then - success=true - return head - end - end - local function kerndisc(disc) - local prev=getprev(disc) - local next=getnext(disc) - if prev and next then - setfield(prev,"next",next) - local a=getattr(prev,0) - if a then - a=(a==attr) and (not attribute or getprop(prev,a_state)==attribute) - else - a=not attribute or getprop(prev,a_state)==attribute - end - if a then - for i=1,ns do - local lookupname=subtables[i] - local lookupcache=lookuphash[lookupname] - if lookupcache then - local lookupmatch=lookupcache[getchar(prev)] - if lookupmatch then - local h,d,ok=handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) - if ok then - done=true - break - end - end - else - report_missing_cache(typ,lookupname) - end - end - end - setfield(prev,"next",disc) - end - return next - end - while start do - local id=getid(start) - if id==glyph_code then - if getfont(start)==font and getsubtype(start)<256 then - local a=getattr(start,0) - if a then - a=(a==attr) and (not attribute or getprop(start,a_state)==attribute) - else - a=not attribute or getprop(start,a_state)==attribute - end - if a then - for i=1,ns do - local lookupname=subtables[i] - local lookupcache=lookuphash[lookupname] - if lookupcache then - local lookupmatch=lookupcache[getchar(start)] - if lookupmatch then - local ok - head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) - if ok then - success=true - break - elseif not start then - break - end - end - else - report_missing_cache(typ,lookupname) - end - end - if start then start=getnext(start) end - else - start=getnext(start) - end - else - start=getnext(start) - end - elseif id==disc_code then - if getsubtype(start)==discretionary_code then - local pre=getfield(start,"pre") - if pre then - local new=subrun(pre) - if new then setfield(start,"pre",new) end - end - local post=getfield(start,"post") - if post then - local new=subrun(post) - if new then setfield(start,"post",new) end - end - local replace=getfield(start,"replace") - if replace then - local new=subrun(replace) - if new then setfield(start,"replace",new) end - end -elseif typ=="gpos_single" or typ=="gpos_pair" then - kerndisc(start) - end - start=getnext(start) - elseif id==whatsit_code then - local subtype=getsubtype(start) - if subtype==dir_code then - local dir=getfield(start,"dir") - if dir=="+TRT" or dir=="+TLT" then - topstack=topstack+1 - dirstack[topstack]=dir - elseif dir=="-TRT" or dir=="-TLT" then - topstack=topstack-1 - end - local newdir=dirstack[topstack] - if newdir=="+TRT" then - rlmode=-1 - elseif newdir=="+TLT" then - rlmode=1 - else - rlmode=rlparmode - end - if trace_directions then - report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir) - end - elseif subtype==localpar_code then - local dir=getfield(start,"dir") - if dir=="TRT" then - rlparmode=-1 - elseif dir=="TLT" then - rlparmode=1 - else - rlparmode=0 - end - rlmode=rlparmode - if trace_directions then - report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode) - end - end - start=getnext(start) - elseif id==math_code then - start=getnext(end_of_math(start)) - else - start=getnext(start) - end - end - end - end - if success then - done=true - end - if trace_steps then - registerstep(head) - end - end - head=tonode(head) - return head,done -end -local function generic(lookupdata,lookupname,unicode,lookuphash) - local target=lookuphash[lookupname] - if target then - target[unicode]=lookupdata - else - lookuphash[lookupname]={ [unicode]=lookupdata } - end -end -local action={ - substitution=generic, - multiple=generic, - alternate=generic, - position=generic, - ligature=function(lookupdata,lookupname,unicode,lookuphash) - local target=lookuphash[lookupname] - if not target then - target={} - lookuphash[lookupname]=target - end - for i=1,#lookupdata do - local li=lookupdata[i] - local tu=target[li] - if not tu then - tu={} - target[li]=tu - end - target=tu - end - target.ligature=unicode - end, - pair=function(lookupdata,lookupname,unicode,lookuphash) - local target=lookuphash[lookupname] - if not target then - target={} - lookuphash[lookupname]=target - end - local others=target[unicode] - local paired=lookupdata[1] - if others then - others[paired]=lookupdata - else - others={ [paired]=lookupdata } - target[unicode]=others - end - end, -} -local function prepare_lookups(tfmdata) - local rawdata=tfmdata.shared.rawdata - local resources=rawdata.resources - local lookuphash=resources.lookuphash - local anchor_to_lookup=resources.anchor_to_lookup - local lookup_to_anchor=resources.lookup_to_anchor - local lookuptypes=resources.lookuptypes - local characters=tfmdata.characters - local descriptions=tfmdata.descriptions - for unicode,character in next,characters do - local description=descriptions[unicode] - if description then - local lookups=description.slookups - if lookups then - for lookupname,lookupdata in next,lookups do - action[lookuptypes[lookupname]](lookupdata,lookupname,unicode,lookuphash) - end - end - local lookups=description.mlookups - if lookups then - for lookupname,lookuplist in next,lookups do - local lookuptype=lookuptypes[lookupname] - for l=1,#lookuplist do - local lookupdata=lookuplist[l] - action[lookuptype](lookupdata,lookupname,unicode,lookuphash) - end - end - end - local list=description.kerns - if list then - for lookup,krn in next,list do - local target=lookuphash[lookup] - if target then - target[unicode]=krn - else - lookuphash[lookup]={ [unicode]=krn } - end - end - end - local list=description.anchors - if list then - for typ,anchors in next,list do - if typ=="mark" or typ=="cexit" then - for name,anchor in next,anchors do - local lookups=anchor_to_lookup[name] - if lookups then - for lookup,_ in next,lookups do - local target=lookuphash[lookup] - if target then - target[unicode]=anchors - else - lookuphash[lookup]={ [unicode]=anchors } - end - end - end - end - end - end - end - end - end -end -local function split(replacement,original) - local result={} - for i=1,#replacement do - result[original[i]]=replacement[i] - end - return result -end -local valid={ - coverage={ chainsub=true,chainpos=true,contextsub=true }, - reversecoverage={ reversesub=true }, - glyphs={ chainsub=true,chainpos=true }, -} -local function prepare_contextchains(tfmdata) - local rawdata=tfmdata.shared.rawdata - local resources=rawdata.resources - local lookuphash=resources.lookuphash - local lookuptags=resources.lookuptags - local lookups=rawdata.lookups - if lookups then - for lookupname,lookupdata in next,rawdata.lookups do - local lookuptype=lookupdata.type - if lookuptype then - local rules=lookupdata.rules - if rules then - local format=lookupdata.format - local validformat=valid[format] - if not validformat then - report_prepare("unsupported format %a",format) - elseif not validformat[lookuptype] then - report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookuptags[lookupname]) - else - local contexts=lookuphash[lookupname] - if not contexts then - contexts={} - lookuphash[lookupname]=contexts - end - local t,nt={},0 - for nofrules=1,#rules do - local rule=rules[nofrules] - local current=rule.current - local before=rule.before - local after=rule.after - local replacements=rule.replacements - local sequence={} - local nofsequences=0 - if before then - for n=1,#before do - nofsequences=nofsequences+1 - sequence[nofsequences]=before[n] - end - end - local start=nofsequences+1 - for n=1,#current do - nofsequences=nofsequences+1 - sequence[nofsequences]=current[n] - end - local stop=nofsequences - if after then - for n=1,#after do - nofsequences=nofsequences+1 - sequence[nofsequences]=after[n] - end - end - if sequence[1] then - nt=nt+1 - t[nt]={ nofrules,lookuptype,sequence,start,stop,rule.lookups,replacements } - for unic,_ in next,sequence[start] do - local cu=contexts[unic] - if not cu then - contexts[unic]=t - end - end - end - end - end - else - end - else - report_prepare("missing lookuptype for lookupname %a",lookuptags[lookupname]) - end - end - end -end -local function featuresinitializer(tfmdata,value) - if true then - local rawdata=tfmdata.shared.rawdata - local properties=rawdata.properties - if not properties.initialized then - local starttime=trace_preparing and os.clock() - local resources=rawdata.resources - resources.lookuphash=resources.lookuphash or {} - prepare_contextchains(tfmdata) - prepare_lookups(tfmdata) - properties.initialized=true - if trace_preparing then - report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,tfmdata.properties.fullname) - end - end - end -end -registerotffeature { - name="features", - description="features", - default=true, - initializers={ - position=1, - node=featuresinitializer, - }, - processors={ - node=featuresprocessor, - } -} -otf.handlers=handlers - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-otp']={ - version=1.001, - comment="companion to font-otf.lua (packing)", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local next,type=next,type -local sort,concat=table.sort,table.concat -local sortedhash=table.sortedhash -local trace_packing=false trackers.register("otf.packing",function(v) trace_packing=v end) -local trace_loading=false trackers.register("otf.loading",function(v) trace_loading=v end) -local report_otf=logs.reporter("fonts","otf loading") -fonts=fonts or {} -local handlers=fonts.handlers or {} -fonts.handlers=handlers -local otf=handlers.otf or {} -handlers.otf=otf -local enhancers=otf.enhancers or {} -otf.enhancers=enhancers -local glists=otf.glists or { "gsub","gpos" } -otf.glists=glists -local criterium=1 -local threshold=0 -local function tabstr_normal(t) - local s={} - local n=0 - for k,v in next,t do - n=n+1 - if type(v)=="table" then - s[n]=k..">"..tabstr_normal(v) - elseif v==true then - s[n]=k.."+" - elseif v then - s[n]=k.."="..v - else - s[n]=k.."-" - end - end - if n==0 then - return "" - elseif n==1 then - return s[1] - else - sort(s) - return concat(s,",") - end -end -local function tabstr_flat(t) - local s={} - local n=0 - for k,v in next,t do - n=n+1 - s[n]=k.."="..v - end - if n==0 then - return "" - elseif n==1 then - return s[1] - else - sort(s) - return concat(s,",") - end -end -local function tabstr_mixed(t) - local s={} - local n=#t - if n==0 then - return "" - elseif n==1 then - local k=t[1] - if k==true then - return "++" - elseif k==false then - return "--" - else - return tostring(k) - end - else - for i=1,n do - local k=t[i] - if k==true then - s[i]="++" - elseif k==false then - s[i]="--" - else - s[i]=k - end - end - return concat(s,",") - end -end -local function tabstr_boolean(t) - local s={} - local n=0 - for k,v in next,t do - n=n+1 - if v then - s[n]=k.."+" - else - s[n]=k.."-" - end - end - if n==0 then - return "" - elseif n==1 then - return s[1] - else - sort(s) - return concat(s,",") - end -end -local function packdata(data) - if data then - local h,t,c={},{},{} - local hh,tt,cc={},{},{} - local nt,ntt=0,0 - local function pack_normal(v) - local tag=tabstr_normal(v) - local ht=h[tag] - if ht then - c[ht]=c[ht]+1 - return ht - else - nt=nt+1 - t[nt]=v - h[tag]=nt - c[nt]=1 - return nt - end - end - local function pack_flat(v) - local tag=tabstr_flat(v) - local ht=h[tag] - if ht then - c[ht]=c[ht]+1 - return ht - else - nt=nt+1 - t[nt]=v - h[tag]=nt - c[nt]=1 - return nt - end - end - local function pack_boolean(v) - local tag=tabstr_boolean(v) - local ht=h[tag] - if ht then - c[ht]=c[ht]+1 - return ht - else - nt=nt+1 - t[nt]=v - h[tag]=nt - c[nt]=1 - return nt - end - end - local function pack_indexed(v) - local tag=concat(v," ") - local ht=h[tag] - if ht then - c[ht]=c[ht]+1 - return ht - else - nt=nt+1 - t[nt]=v - h[tag]=nt - c[nt]=1 - return nt - end - end - local function pack_mixed(v) - local tag=tabstr_mixed(v) - local ht=h[tag] - if ht then - c[ht]=c[ht]+1 - return ht - else - nt=nt+1 - t[nt]=v - h[tag]=nt - c[nt]=1 - return nt - end - end - local function pack_final(v) - if c[v]<=criterium then - return t[v] - else - local hv=hh[v] - if hv then - return hv - else - ntt=ntt+1 - tt[ntt]=t[v] - hh[v]=ntt - cc[ntt]=c[v] - return ntt - end - end - end - local function success(stage,pass) - if nt==0 then - if trace_loading or trace_packing then - report_otf("pack quality: nothing to pack") - end - return false - elseif nt>=threshold then - local one,two,rest=0,0,0 - if pass==1 then - for k,v in next,c do - if v==1 then - one=one+1 - elseif v==2 then - two=two+1 - else - rest=rest+1 - end - end - else - for k,v in next,cc do - if v>20 then - rest=rest+1 - elseif v>10 then - two=two+1 - else - one=one+1 - end - end - data.tables=tt - end - if trace_loading or trace_packing then - report_otf("pack quality: stage %s, pass %s, %s packed, 1-10:%s, 11-20:%s, rest:%s (criterium: %s)",stage,pass,one+two+rest,one,two,rest,criterium) - end - return true - else - if trace_loading or trace_packing then - report_otf("pack quality: stage %s, pass %s, %s packed, aborting pack (threshold: %s)",stage,pass,nt,threshold) - end - return false - end - end - local function packers(pass) - if pass==1 then - return pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed - else - return pack_final,pack_final,pack_final,pack_final,pack_final - end - end - local resources=data.resources - local lookuptypes=resources.lookuptypes - for pass=1,2 do - if trace_packing then - report_otf("start packing: stage 1, pass %s",pass) - end - local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass) - for unicode,description in next,data.descriptions do - local boundingbox=description.boundingbox - if boundingbox then - description.boundingbox=pack_indexed(boundingbox) - end - local slookups=description.slookups - if slookups then - for tag,slookup in next,slookups do - local what=lookuptypes[tag] - if what=="pair" then - local t=slookup[2] if t then slookup[2]=pack_indexed(t) end - local t=slookup[3] if t then slookup[3]=pack_indexed(t) end - elseif what~="substitution" then - slookups[tag]=pack_indexed(slookup) - end - end - end - local mlookups=description.mlookups - if mlookups then - for tag,mlookup in next,mlookups do - local what=lookuptypes[tag] - if what=="pair" then - for i=1,#mlookup do - local lookup=mlookup[i] - local t=lookup[2] if t then lookup[2]=pack_indexed(t) end - local t=lookup[3] if t then lookup[3]=pack_indexed(t) end - end - elseif what~="substitution" then - for i=1,#mlookup do - mlookup[i]=pack_indexed(mlookup[i]) - end - end - end - end - local kerns=description.kerns - if kerns then - for tag,kern in next,kerns do - kerns[tag]=pack_flat(kern) - end - end - local math=description.math - if math then - local kerns=math.kerns - if kerns then - for tag,kern in next,kerns do - kerns[tag]=pack_normal(kern) - end - end - end - local anchors=description.anchors - if anchors then - for what,anchor in next,anchors do - if what=="baselig" then - for _,a in next,anchor do - for k=1,#a do - a[k]=pack_indexed(a[k]) - end - end - else - for k,v in next,anchor do - anchor[k]=pack_indexed(v) - end - end - end - end - local altuni=description.altuni - if altuni then - for i=1,#altuni do - altuni[i]=pack_flat(altuni[i]) - end - end - end - local lookups=data.lookups - if lookups then - for _,lookup in next,lookups do - local rules=lookup.rules - if rules then - for i=1,#rules do - local rule=rules[i] - local r=rule.before if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end - local r=rule.after if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end - local r=rule.current if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end - local r=rule.replacements if r then rule.replacements=pack_flat (r) end - local r=rule.lookups if r then rule.lookups=pack_indexed(r) end - end - end - end - end - local anchor_to_lookup=resources.anchor_to_lookup - if anchor_to_lookup then - for anchor,lookup in next,anchor_to_lookup do - anchor_to_lookup[anchor]=pack_normal(lookup) - end - end - local lookup_to_anchor=resources.lookup_to_anchor - if lookup_to_anchor then - for lookup,anchor in next,lookup_to_anchor do - lookup_to_anchor[lookup]=pack_normal(anchor) - end - end - local sequences=resources.sequences - if sequences then - for feature,sequence in next,sequences do - local flags=sequence.flags - if flags then - sequence.flags=pack_normal(flags) - end - local subtables=sequence.subtables - if subtables then - sequence.subtables=pack_normal(subtables) - end - local features=sequence.features - if features then - for script,feature in next,features do - features[script]=pack_normal(feature) - end - end - local order=sequence.order - if order then - sequence.order=pack_indexed(order) - end - local markclass=sequence.markclass - if markclass then - sequence.markclass=pack_boolean(markclass) - end - end - end - local lookups=resources.lookups - if lookups then - for name,lookup in next,lookups do - local flags=lookup.flags - if flags then - lookup.flags=pack_normal(flags) - end - local subtables=lookup.subtables - if subtables then - lookup.subtables=pack_normal(subtables) - end - end - end - local features=resources.features - if features then - for _,what in next,glists do - local list=features[what] - if list then - for feature,spec in next,list do - list[feature]=pack_normal(spec) - end - end - end - end - if not success(1,pass) then - return - end - end - if nt>0 then - for pass=1,2 do - if trace_packing then - report_otf("start packing: stage 2, pass %s",pass) - end - local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass) - for unicode,description in next,data.descriptions do - local kerns=description.kerns - if kerns then - description.kerns=pack_normal(kerns) - end - local math=description.math - if math then - local kerns=math.kerns - if kerns then - math.kerns=pack_normal(kerns) - end - end - local anchors=description.anchors - if anchors then - description.anchors=pack_normal(anchors) - end - local mlookups=description.mlookups - if mlookups then - for tag,mlookup in next,mlookups do - mlookups[tag]=pack_normal(mlookup) - end - end - local altuni=description.altuni - if altuni then - description.altuni=pack_normal(altuni) - end - end - local lookups=data.lookups - if lookups then - for _,lookup in next,lookups do - local rules=lookup.rules - if rules then - for i=1,#rules do - local rule=rules[i] - local r=rule.before if r then rule.before=pack_normal(r) end - local r=rule.after if r then rule.after=pack_normal(r) end - local r=rule.current if r then rule.current=pack_normal(r) end - end - end - end - end - local sequences=resources.sequences - if sequences then - for feature,sequence in next,sequences do - sequence.features=pack_normal(sequence.features) - end - end - if not success(2,pass) then - end - end - for pass=1,2 do - local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass) - for unicode,description in next,data.descriptions do - local slookups=description.slookups - if slookups then - description.slookups=pack_normal(slookups) - end - local mlookups=description.mlookups - if mlookups then - description.mlookups=pack_normal(mlookups) - end - end - end - end - end -end -local unpacked_mt={ - __index=function(t,k) - t[k]=false - return k - end -} -local function unpackdata(data) - if data then - local tables=data.tables - if tables then - local resources=data.resources - local lookuptypes=resources.lookuptypes - local unpacked={} - setmetatable(unpacked,unpacked_mt) - for unicode,description in next,data.descriptions do - local tv=tables[description.boundingbox] - if tv then - description.boundingbox=tv - end - local slookups=description.slookups - if slookups then - local tv=tables[slookups] - if tv then - description.slookups=tv - slookups=unpacked[tv] - end - if slookups then - for tag,lookup in next,slookups do - local what=lookuptypes[tag] - if what=="pair" then - local tv=tables[lookup[2]] - if tv then - lookup[2]=tv - end - local tv=tables[lookup[3]] - if tv then - lookup[3]=tv - end - elseif what~="substitution" then - local tv=tables[lookup] - if tv then - slookups[tag]=tv - end - end - end - end - end - local mlookups=description.mlookups - if mlookups then - local tv=tables[mlookups] - if tv then - description.mlookups=tv - mlookups=unpacked[tv] - end - if mlookups then - for tag,list in next,mlookups do - local tv=tables[list] - if tv then - mlookups[tag]=tv - list=unpacked[tv] - end - if list then - local what=lookuptypes[tag] - if what=="pair" then - for i=1,#list do - local lookup=list[i] - local tv=tables[lookup[2]] - if tv then - lookup[2]=tv - end - local tv=tables[lookup[3]] - if tv then - lookup[3]=tv - end - end - elseif what~="substitution" then - for i=1,#list do - local tv=tables[list[i]] - if tv then - list[i]=tv - end - end - end - end - end - end - end - local kerns=description.kerns - if kerns then - local tm=tables[kerns] - if tm then - description.kerns=tm - kerns=unpacked[tm] - end - if kerns then - for k,kern in next,kerns do - local tv=tables[kern] - if tv then - kerns[k]=tv - end - end - end - end - local math=description.math - if math then - local kerns=math.kerns - if kerns then - local tm=tables[kerns] - if tm then - math.kerns=tm - kerns=unpacked[tm] - end - if kerns then - for k,kern in next,kerns do - local tv=tables[kern] - if tv then - kerns[k]=tv - end - end - end - end - end - local anchors=description.anchors - if anchors then - local ta=tables[anchors] - if ta then - description.anchors=ta - anchors=unpacked[ta] - end - if anchors then - for tag,anchor in next,anchors do - if tag=="baselig" then - for _,list in next,anchor do - for i=1,#list do - local tv=tables[list[i]] - if tv then - list[i]=tv - end - end - end - else - for a,data in next,anchor do - local tv=tables[data] - if tv then - anchor[a]=tv - end - end - end - end - end - end - local altuni=description.altuni - if altuni then - local altuni=tables[altuni] - if altuni then - description.altuni=altuni - for i=1,#altuni do - local tv=tables[altuni[i]] - if tv then - altuni[i]=tv - end - end - end - end - end - local lookups=data.lookups - if lookups then - for _,lookup in next,lookups do - local rules=lookup.rules - if rules then - for i=1,#rules do - local rule=rules[i] - local before=rule.before - if before then - local tv=tables[before] - if tv then - rule.before=tv - before=unpacked[tv] - end - if before then - for i=1,#before do - local tv=tables[before[i]] - if tv then - before[i]=tv - end - end - end - end - local after=rule.after - if after then - local tv=tables[after] - if tv then - rule.after=tv - after=unpacked[tv] - end - if after then - for i=1,#after do - local tv=tables[after[i]] - if tv then - after[i]=tv - end - end - end - end - local current=rule.current - if current then - local tv=tables[current] - if tv then - rule.current=tv - current=unpacked[tv] - end - if current then - for i=1,#current do - local tv=tables[current[i]] - if tv then - current[i]=tv - end - end - end - end - local replacements=rule.replacements - if replacements then - local tv=tables[replacements] - if tv then - rule.replacements=tv - end - end - local lookups=rule.lookups - if lookups then - local tv=tables[lookups] - if tv then - rule.lookups=tv - end - end - end - end - end - end - local anchor_to_lookup=resources.anchor_to_lookup - if anchor_to_lookup then - for anchor,lookup in next,anchor_to_lookup do - local tv=tables[lookup] - if tv then - anchor_to_lookup[anchor]=tv - end - end - end - local lookup_to_anchor=resources.lookup_to_anchor - if lookup_to_anchor then - for lookup,anchor in next,lookup_to_anchor do - local tv=tables[anchor] - if tv then - lookup_to_anchor[lookup]=tv - end - end - end - local ls=resources.sequences - if ls then - for _,feature in next,ls do - local flags=feature.flags - if flags then - local tv=tables[flags] - if tv then - feature.flags=tv - end - end - local subtables=feature.subtables - if subtables then - local tv=tables[subtables] - if tv then - feature.subtables=tv - end - end - local features=feature.features - if features then - local tv=tables[features] - if tv then - feature.features=tv - features=unpacked[tv] - end - if features then - for script,data in next,features do - local tv=tables[data] - if tv then - features[script]=tv - end - end - end - end - local order=feature.order - if order then - local tv=tables[order] - if tv then - feature.order=tv - end - end - local markclass=feature.markclass - if markclass then - local tv=tables[markclass] - if tv then - feature.markclass=tv - end - end - end - end - local lookups=resources.lookups - if lookups then - for _,lookup in next,lookups do - local flags=lookup.flags - if flags then - local tv=tables[flags] - if tv then - lookup.flags=tv - end - end - local subtables=lookup.subtables - if subtables then - local tv=tables[subtables] - if tv then - lookup.subtables=tv - end - end - end - end - local features=resources.features - if features then - for _,what in next,glists do - local feature=features[what] - if feature then - for tag,spec in next,feature do - local tv=tables[spec] - if tv then - feature[tag]=tv - end - end - end - end - end - data.tables=nil - end - end -end -if otf.enhancers.register then - otf.enhancers.register("pack",packdata) - otf.enhancers.register("unpack",unpackdata) -end -otf.enhancers.unpack=unpackdata -otf.enhancers.pack=packdata - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['luatex-fonts-lua']={ - version=1.001, - comment="companion to luatex-*.tex", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end -local fonts=fonts -fonts.formats.lua="lua" -function fonts.readers.lua(specification) - local fullname=specification.filename or "" - if fullname=="" then - local forced=specification.forced or "" - if forced~="" then - fullname=specification.name.."."..forced - else - fullname=specification.name - end - end - local fullname=resolvers.findfile(fullname) or "" - if fullname~="" then - local loader=loadfile(fullname) - loader=loader and loader() - return loader and loader(specification) - end -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-def']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local format,gmatch,match,find,lower,gsub=string.format,string.gmatch,string.match,string.find,string.lower,string.gsub -local tostring,next=tostring,next -local lpegmatch=lpeg.match -local suffixonly,removesuffix=file.suffix,file.removesuffix -local allocate=utilities.storage.allocate -local trace_defining=false trackers .register("fonts.defining",function(v) trace_defining=v end) -local directive_embedall=false directives.register("fonts.embedall",function(v) directive_embedall=v end) -trackers.register("fonts.loading","fonts.defining","otf.loading","afm.loading","tfm.loading") -trackers.register("fonts.all","fonts.*","otf.*","afm.*","tfm.*") -local report_defining=logs.reporter("fonts","defining") -local fonts=fonts -local fontdata=fonts.hashes.identifiers -local readers=fonts.readers -local definers=fonts.definers -local specifiers=fonts.specifiers -local constructors=fonts.constructors -local fontgoodies=fonts.goodies -readers.sequence=allocate { 'otf','ttf','afm','tfm','lua' } -local variants=allocate() -specifiers.variants=variants -definers.methods=definers.methods or {} -local internalized=allocate() -local lastdefined=nil -local loadedfonts=constructors.loadedfonts -local designsizes=constructors.designsizes -local resolvefile=fontgoodies and fontgoodies.filenames and fontgoodies.filenames.resolve or function(s) return s end -local splitter,splitspecifiers=nil,"" -local P,C,S,Cc=lpeg.P,lpeg.C,lpeg.S,lpeg.Cc -local left=P("(") -local right=P(")") -local colon=P(":") -local space=P(" ") -definers.defaultlookup="file" -local prefixpattern=P(false) -local function addspecifier(symbol) - splitspecifiers=splitspecifiers..symbol - local method=S(splitspecifiers) - local lookup=C(prefixpattern)*colon - local sub=left*C(P(1-left-right-method)^1)*right - local specification=C(method)*C(P(1)^1) - local name=C((1-sub-specification)^1) - splitter=P((lookup+Cc(""))*name*(sub+Cc(""))*(specification+Cc(""))) -end -local function addlookup(str,default) - prefixpattern=prefixpattern+P(str) -end -definers.addlookup=addlookup -addlookup("file") -addlookup("name") -addlookup("spec") -local function getspecification(str) - return lpegmatch(splitter,str or "") -end -definers.getspecification=getspecification -function definers.registersplit(symbol,action,verbosename) - addspecifier(symbol) - variants[symbol]=action - if verbosename then - variants[verbosename]=action - end -end -local function makespecification(specification,lookup,name,sub,method,detail,size) - size=size or 655360 - if not lookup or lookup=="" then - lookup=definers.defaultlookup - end - if trace_defining then - report_defining("specification %a, lookup %a, name %a, sub %a, method %a, detail %a", - specification,lookup,name,sub,method,detail) - end - local t={ - lookup=lookup, - specification=specification, - size=size, - name=name, - sub=sub, - method=method, - detail=detail, - resolved="", - forced="", - features={}, - } - return t -end -definers.makespecification=makespecification -function definers.analyze(specification,size) - local lookup,name,sub,method,detail=getspecification(specification or "") - return makespecification(specification,lookup,name,sub,method,detail,size) -end -definers.resolvers=definers.resolvers or {} -local resolvers=definers.resolvers -function resolvers.file(specification) - local name=resolvefile(specification.name) - local suffix=lower(suffixonly(name)) - if fonts.formats[suffix] then - specification.forced=suffix - specification.forcedname=name - specification.name=removesuffix(name) - else - specification.name=name - end -end -function resolvers.name(specification) - local resolve=fonts.names.resolve - if resolve then - local resolved,sub=resolve(specification.name,specification.sub,specification) - if resolved then - specification.resolved=resolved - specification.sub=sub - local suffix=lower(suffixonly(resolved)) - if fonts.formats[suffix] then - specification.forced=suffix - specification.forcedname=resolved - specification.name=removesuffix(resolved) - else - specification.name=resolved - end - end - else - resolvers.file(specification) - end -end -function resolvers.spec(specification) - local resolvespec=fonts.names.resolvespec - if resolvespec then - local resolved,sub=resolvespec(specification.name,specification.sub,specification) - if resolved then - specification.resolved=resolved - specification.sub=sub - specification.forced=lower(suffixonly(resolved)) - specification.forcedname=resolved - specification.name=removesuffix(resolved) - end - else - resolvers.name(specification) - end -end -function definers.resolve(specification) - if not specification.resolved or specification.resolved=="" then - local r=resolvers[specification.lookup] - if r then - r(specification) - end - end - if specification.forced=="" then - specification.forced=nil - specification.forcedname=nil - end - specification.hash=lower(specification.name..' @ '..constructors.hashfeatures(specification)) - if specification.sub and specification.sub~="" then - specification.hash=specification.sub..' @ '..specification.hash - end - return specification -end -function definers.applypostprocessors(tfmdata) - local postprocessors=tfmdata.postprocessors - if postprocessors then - local properties=tfmdata.properties - for i=1,#postprocessors do - local extrahash=postprocessors[i](tfmdata) - if type(extrahash)=="string" and extrahash~="" then - extrahash=gsub(lower(extrahash),"[^a-z]","-") - properties.fullname=format("%s-%s",properties.fullname,extrahash) - end - end - end - return tfmdata -end -local function checkembedding(tfmdata) - local properties=tfmdata.properties - local embedding - if directive_embedall then - embedding="full" - elseif properties and properties.filename and constructors.dontembed[properties.filename] then - embedding="no" - else - embedding="subset" - end - if properties then - properties.embedding=embedding - else - tfmdata.properties={ embedding=embedding } - end - tfmdata.embedding=embedding -end -function definers.loadfont(specification) - local hash=constructors.hashinstance(specification) - local tfmdata=loadedfonts[hash] - if not tfmdata then - local forced=specification.forced or "" - if forced~="" then - local reader=readers[lower(forced)] - tfmdata=reader and reader(specification) - if not tfmdata then - report_defining("forced type %a of %a not found",forced,specification.name) - end - else - local sequence=readers.sequence - for s=1,#sequence do - local reader=sequence[s] - if readers[reader] then - if trace_defining then - report_defining("trying (reader sequence driven) type %a for %a with file %a",reader,specification.name,specification.filename) - end - tfmdata=readers[reader](specification) - if tfmdata then - break - else - specification.filename=nil - end - end - end - end - if tfmdata then - tfmdata=definers.applypostprocessors(tfmdata) - checkembedding(tfmdata) - loadedfonts[hash]=tfmdata - designsizes[specification.hash]=tfmdata.parameters.designsize - end - end - if not tfmdata then - report_defining("font with asked name %a is not found using lookup %a",specification.name,specification.lookup) - end - return tfmdata -end -function constructors.checkvirtualids() -end -function constructors.readanddefine(name,size) - local specification=definers.analyze(name,size) - local method=specification.method - if method and variants[method] then - specification=variants[method](specification) - end - specification=definers.resolve(specification) - local hash=constructors.hashinstance(specification) - local id=definers.registered(hash) - if not id then - local tfmdata=definers.loadfont(specification) - if tfmdata then - tfmdata.properties.hash=hash - constructors.checkvirtualids(tfmdata) - id=font.define(tfmdata) - definers.register(tfmdata,id) - else - id=0 - end - end - return fontdata[id],id -end -function definers.current() - return lastdefined -end -function definers.registered(hash) - local id=internalized[hash] - return id,id and fontdata[id] -end -function definers.register(tfmdata,id) - if tfmdata and id then - local hash=tfmdata.properties.hash - if not hash then - report_defining("registering font, id %a, name %a, invalid hash",id,tfmdata.properties.filename or "?") - elseif not internalized[hash] then - internalized[hash]=id - if trace_defining then - report_defining("registering font, id %s, hash %a",id,hash) - end - fontdata[id]=tfmdata - end - end -end -function definers.read(specification,size,id) - statistics.starttiming(fonts) - if type(specification)=="string" then - specification=definers.analyze(specification,size) - end - local method=specification.method - if method and variants[method] then - specification=variants[method](specification) - end - specification=definers.resolve(specification) - local hash=constructors.hashinstance(specification) - local tfmdata=definers.registered(hash) - if tfmdata then - if trace_defining then - report_defining("already hashed: %s",hash) - end - else - tfmdata=definers.loadfont(specification) - if tfmdata then - if trace_defining then - report_defining("loaded and hashed: %s",hash) - end - tfmdata.properties.hash=hash - if id then - definers.register(tfmdata,id) - end - else - if trace_defining then - report_defining("not loaded and hashed: %s",hash) - end - end - end - lastdefined=tfmdata or id - if not tfmdata then - report_defining("unknown font %a, loading aborted",specification.name) - elseif trace_defining and type(tfmdata)=="table" then - local properties=tfmdata.properties or {} - local parameters=tfmdata.parameters or {} - report_defining("using %a font with id %a, name %a, size %a, bytes %a, encoding %a, fullname %a, filename %a", - properties.format or "unknown",id,properties.name,parameters.size,properties.encodingbytes, - properties.encodingname,properties.fullname,file.basename(properties.filename)) - end - statistics.stoptiming(fonts) - return tfmdata -end -function font.getfont(id) - return fontdata[id] -end -callbacks.register('define_font',definers.read,"definition of fonts (tfmdata preparation)") - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['luatex-font-def']={ - version=1.001, - comment="companion to luatex-*.tex", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end -local fonts=fonts -fonts.constructors.namemode="specification" -function fonts.definers.getspecification(str) - return "",str,"",":",str -end -local list={} -local function issome () list.lookup='name' end -local function isfile () list.lookup='file' end -local function isname () list.lookup='name' end -local function thename(s) list.name=s end -local function issub (v) list.sub=v end -local function iscrap (s) list.crap=string.lower(s) end -local function iskey (k,v) list[k]=v end -local function istrue (s) list[s]=true end -local function isfalse(s) list[s]=false end -local P,S,R,C=lpeg.P,lpeg.S,lpeg.R,lpeg.C -local spaces=P(" ")^0 -local namespec=(1-S("/:("))^0 -local crapspec=spaces*P("/")*(((1-P(":"))^0)/iscrap)*spaces -local filename_1=P("file:")/isfile*(namespec/thename) -local filename_2=P("[")*P(true)/isname*(((1-P("]"))^0)/thename)*P("]") -local fontname_1=P("name:")/isname*(namespec/thename) -local fontname_2=P(true)/issome*(namespec/thename) -local sometext=(R("az","AZ","09")+S("+-."))^1 -local truevalue=P("+")*spaces*(sometext/istrue) -local falsevalue=P("-")*spaces*(sometext/isfalse) -local keyvalue=(C(sometext)*spaces*P("=")*spaces*C(sometext))/iskey -local somevalue=sometext/istrue -local subvalue=P("(")*(C(P(1-S("()"))^1)/issub)*P(")") -local option=spaces*(keyvalue+falsevalue+truevalue+somevalue)*spaces -local options=P(":")*spaces*(P(";")^0*option)^0 -local pattern=(filename_1+filename_2+fontname_1+fontname_2)*subvalue^0*crapspec^0*options^0 -local function colonized(specification) - list={} - lpeg.match(pattern,specification.specification) - list.crap=nil - if list.name then - specification.name=list.name - list.name=nil - end - if list.lookup then - specification.lookup=list.lookup - list.lookup=nil - end - if list.sub then - specification.sub=list.sub - list.sub=nil - end - specification.features.normal=fonts.handlers.otf.features.normalize(list) - return specification -end -fonts.definers.registersplit(":",colonized,"cryptic") -fonts.definers.registersplit("",colonized,"more cryptic") -function fonts.definers.applypostprocessors(tfmdata) - local postprocessors=tfmdata.postprocessors - if postprocessors then - for i=1,#postprocessors do - local extrahash=postprocessors[i](tfmdata) - if type(extrahash)=="string" and extrahash~="" then - extrahash=string.gsub(lower(extrahash),"[^a-z]","-") - tfmdata.properties.fullname=format("%s-%s",tfmdata.properties.fullname,extrahash) - end - end - end - return tfmdata -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['luatex-fonts-ext']={ - version=1.001, - comment="companion to luatex-*.tex", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end -local fonts=fonts -local otffeatures=fonts.constructors.newfeatures("otf") -local function initializeitlc(tfmdata,value) - if value then - local parameters=tfmdata.parameters - local italicangle=parameters.italicangle - if italicangle and italicangle~=0 then - local properties=tfmdata.properties - local factor=tonumber(value) or 1 - properties.hasitalics=true - properties.autoitalicamount=factor*(parameters.uwidth or 40)/2 - end - end -end -otffeatures.register { - name="itlc", - description="italic correction", - initializers={ - base=initializeitlc, - node=initializeitlc, - } -} -local function initializeslant(tfmdata,value) - value=tonumber(value) - if not value then - value=0 - elseif value>1 then - value=1 - elseif value<-1 then - value=-1 - end - tfmdata.parameters.slantfactor=value -end -otffeatures.register { - name="slant", - description="slant glyphs", - initializers={ - base=initializeslant, - node=initializeslant, - } -} -local function initializeextend(tfmdata,value) - value=tonumber(value) - if not value then - value=0 - elseif value>10 then - value=10 - elseif value<-10 then - value=-10 - end - tfmdata.parameters.extendfactor=value -end -otffeatures.register { - name="extend", - description="scale glyphs horizontally", - initializers={ - base=initializeextend, - node=initializeextend, - } -} -fonts.protrusions=fonts.protrusions or {} -fonts.protrusions.setups=fonts.protrusions.setups or {} -local setups=fonts.protrusions.setups -local function initializeprotrusion(tfmdata,value) - if value then - local setup=setups[value] - if setup then - local factor,left,right=setup.factor or 1,setup.left or 1,setup.right or 1 - local emwidth=tfmdata.parameters.quad - tfmdata.parameters.protrusion={ - auto=true, - } - for i,chr in next,tfmdata.characters do - local v,pl,pr=setup[i],nil,nil - if v then - pl,pr=v[1],v[2] - end - if pl and pl~=0 then chr.left_protruding=left*pl*factor end - if pr and pr~=0 then chr.right_protruding=right*pr*factor end - end - end - end -end -otffeatures.register { - name="protrusion", - description="shift characters into the left and or right margin", - initializers={ - base=initializeprotrusion, - node=initializeprotrusion, - } -} -fonts.expansions=fonts.expansions or {} -fonts.expansions.setups=fonts.expansions.setups or {} -local setups=fonts.expansions.setups -local function initializeexpansion(tfmdata,value) - if value then - local setup=setups[value] - if setup then - local factor=setup.factor or 1 - tfmdata.parameters.expansion={ - stretch=10*(setup.stretch or 0), - shrink=10*(setup.shrink or 0), - step=10*(setup.step or 0), - auto=true, - } - for i,chr in next,tfmdata.characters do - local v=setup[i] - if v and v~=0 then - chr.expansion_factor=v*factor - else - chr.expansion_factor=factor - end - end - end - end -end -otffeatures.register { - name="expansion", - description="apply hz optimization", - initializers={ - base=initializeexpansion, - node=initializeexpansion, - } -} -function fonts.loggers.onetimemessage() end -local byte=string.byte -fonts.expansions.setups['default']={ - stretch=2,shrink=2,step=.5,factor=1, - [byte('A')]=0.5,[byte('B')]=0.7,[byte('C')]=0.7,[byte('D')]=0.5,[byte('E')]=0.7, - [byte('F')]=0.7,[byte('G')]=0.5,[byte('H')]=0.7,[byte('K')]=0.7,[byte('M')]=0.7, - [byte('N')]=0.7,[byte('O')]=0.5,[byte('P')]=0.7,[byte('Q')]=0.5,[byte('R')]=0.7, - [byte('S')]=0.7,[byte('U')]=0.7,[byte('W')]=0.7,[byte('Z')]=0.7, - [byte('a')]=0.7,[byte('b')]=0.7,[byte('c')]=0.7,[byte('d')]=0.7,[byte('e')]=0.7, - [byte('g')]=0.7,[byte('h')]=0.7,[byte('k')]=0.7,[byte('m')]=0.7,[byte('n')]=0.7, - [byte('o')]=0.7,[byte('p')]=0.7,[byte('q')]=0.7,[byte('s')]=0.7,[byte('u')]=0.7, - [byte('w')]=0.7,[byte('z')]=0.7, - [byte('2')]=0.7,[byte('3')]=0.7,[byte('6')]=0.7,[byte('8')]=0.7,[byte('9')]=0.7, -} -fonts.protrusions.setups['default']={ - factor=1,left=1,right=1, - [0x002C]={ 0,1 }, - [0x002E]={ 0,1 }, - [0x003A]={ 0,1 }, - [0x003B]={ 0,1 }, - [0x002D]={ 0,1 }, - [0x2013]={ 0,0.50 }, - [0x2014]={ 0,0.33 }, - [0x3001]={ 0,1 }, - [0x3002]={ 0,1 }, - [0x060C]={ 0,1 }, - [0x061B]={ 0,1 }, - [0x06D4]={ 0,1 }, -} -fonts.handlers.otf.features.normalize=function(t) - if t.rand then - t.rand="random" - end - return t -end -function fonts.helpers.nametoslot(name) - local t=type(name) - if t=="string" then - local tfmdata=fonts.hashes.identifiers[currentfont()] - local shared=tfmdata and tfmdata.shared - local fntdata=shared and shared.rawdata - return fntdata and fntdata.resources.unicodes[name] - elseif t=="number" then - return n - end -end -fonts.encodings=fonts.encodings or {} -local reencodings={} -fonts.encodings.reencodings=reencodings -local function specialreencode(tfmdata,value) - local encoding=value and reencodings[value] - if encoding then - local temp={} - local char=tfmdata.characters - for k,v in next,encoding do - temp[k]=char[v] - end - for k,v in next,temp do - char[k]=temp[k] - end - return string.format("reencoded:%s",value) - end -end -local function reencode(tfmdata,value) - tfmdata.postprocessors=tfmdata.postprocessors or {} - table.insert(tfmdata.postprocessors, - function(tfmdata) - return specialreencode(tfmdata,value) - end - ) -end -otffeatures.register { - name="reencode", - description="reencode characters", - manipulators={ - base=reencode, - node=reencode, - } -} - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['luatex-fonts-cbk']={ - version=1.001, - comment="companion to luatex-*.tex", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end -local fonts=fonts -local nodes=nodes -local traverse_id=node.traverse_id -local glyph_code=nodes.nodecodes.glyph -local ligaturing=node.ligaturing -local kerning=node.kerning -function node.ligaturing() texio.write_nl("warning: node.ligaturing is already applied") end -function node.kerning () texio.write_nl("warning: node.kerning is already applied") end -function nodes.handlers.characters(head) - local fontdata=fonts.hashes.identifiers - if fontdata then - local usedfonts,basefonts,prevfont,basefont={},{},nil,nil - for n in traverse_id(glyph_code,head) do - local font=n.font - if font~=prevfont then - if basefont then - basefont[2]=n.prev - end - prevfont=font - local used=usedfonts[font] - if not used then - local tfmdata=fontdata[font] - if tfmdata then - local shared=tfmdata.shared - if shared then - local processors=shared.processes - if processors and #processors>0 then - usedfonts[font]=processors - else - basefont={ n,nil } - basefonts[#basefonts+1]=basefont - end - end - end - end - end - end - if next(usedfonts) then - for font,processors in next,usedfonts do - for i=1,#processors do - head=processors[i](head,font,0) or head - end - end - end - if #basefonts>0 then - for i=1,#basefonts do - local range=basefonts[i] - local start,stop=range[1],range[2] - if stop then - ligaturing(start,stop) - kerning(start,stop) - else - ligaturing(start) - kerning(start) - end - end - end - return head,true - else - return head,false - end -end -function nodes.simple_font_handler(head) - head=nodes.handlers.characters(head) - nodes.injections.handler(head) - nodes.handlers.protectglyphs(head) - return head -end - -end -- closure diff --git a/src/luaotfload-fonts-cbk.lua b/src/luaotfload-fonts-cbk.lua deleted file mode 100644 index 9db94f6..0000000 --- a/src/luaotfload-fonts-cbk.lua +++ /dev/null @@ -1,68 +0,0 @@ -if not modules then modules = { } end modules ['luatex-fonts-cbk'] = { - version = 1.001, - comment = "companion to luatex-*.tex", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end - -local fonts = fonts -local nodes = nodes - --- Fonts: (might move to node-gef.lua) - -local traverse_id = node.traverse_id -local glyph_code = nodes.nodecodes.glyph - -function nodes.handlers.characters(head) - local fontdata = fonts.hashes.identifiers - if fontdata then - local usedfonts, done, prevfont = { }, false, nil - for n in traverse_id(glyph_code,head) do - local font = n.font - if font ~= prevfont then - prevfont = font - local used = usedfonts[font] - if not used then - local tfmdata = fontdata[font] -- - if tfmdata then - local shared = tfmdata.shared -- we need to check shared, only when same features - if shared then - local processors = shared.processes - if processors and #processors > 0 then - usedfonts[font] = processors - done = true - end - end - end - end - end - end - if done then - for font, processors in next, usedfonts do - for i=1,#processors do - local h, d = processors[i](head,font,0) - head, done = h or head, done or d - end - end - end - return head, true - else - return head, false - end -end - -function nodes.simple_font_handler(head) --- lang.hyphenate(head) - head = nodes.handlers.characters(head) - nodes.injections.handler(head) - nodes.handlers.protectglyphs(head) - head = node.ligaturing(head) - head = node.kerning(head) - return head -end diff --git a/src/luaotfload-fonts-def.lua b/src/luaotfload-fonts-def.lua deleted file mode 100644 index 0c2f0db..0000000 --- a/src/luaotfload-fonts-def.lua +++ /dev/null @@ -1,97 +0,0 @@ -if not modules then modules = { } end modules ['luatex-font-def'] = { - version = 1.001, - comment = "companion to luatex-*.tex", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end - -local fonts = fonts - --- A bit of tuning for definitions. - -fonts.constructors.namemode = "specification" -- somehow latex needs this (changed name!) => will change into an overload - --- tricky: we sort of bypass the parser and directly feed all into --- the sub parser - -function fonts.definers.getspecification(str) - return "", str, "", ":", str -end - --- the generic name parser (different from context!) - -local list = { } - -local function issome () list.lookup = 'name' end -- xetex mode prefers name (not in context!) -local function isfile () list.lookup = 'file' end -local function isname () list.lookup = 'name' end -local function thename(s) list.name = s end -local function issub (v) list.sub = v end -local function iscrap (s) list.crap = string.lower(s) end -local function iskey (k,v) list[k] = v end -local function istrue (s) list[s] = true end -local function isfalse(s) list[s] = false end - -local P, S, R, C = lpeg.P, lpeg.S, lpeg.R, lpeg.C - -local spaces = P(" ")^0 -local namespec = (1-S("/:("))^0 -- was: (1-S("/: ("))^0 -local crapspec = spaces * P("/") * (((1-P(":"))^0)/iscrap) * spaces -local filename_1 = P("file:")/isfile * (namespec/thename) -local filename_2 = P("[") * P(true)/isname * (((1-P("]"))^0)/thename) * P("]") -local fontname_1 = P("name:")/isname * (namespec/thename) -local fontname_2 = P(true)/issome * (namespec/thename) -local sometext = (R("az","AZ","09") + S("+-."))^1 -local truevalue = P("+") * spaces * (sometext/istrue) -local falsevalue = P("-") * spaces * (sometext/isfalse) -local keyvalue = (C(sometext) * spaces * P("=") * spaces * C(sometext))/iskey -local somevalue = sometext/istrue -local subvalue = P("(") * (C(P(1-S("()"))^1)/issub) * P(")") -- for Kim -local option = spaces * (keyvalue + falsevalue + truevalue + somevalue) * spaces -local options = P(":") * spaces * (P(";")^0 * option)^0 - -local pattern = (filename_1 + filename_2 + fontname_1 + fontname_2) * subvalue^0 * crapspec^0 * options^0 - -local function colonized(specification) -- xetex mode - list = { } - lpeg.match(pattern,specification.specification) - list.crap = nil -- style not supported, maybe some day - if list.name then - specification.name = list.name - list.name = nil - end - if list.lookup then - specification.lookup = list.lookup - list.lookup = nil - end - if list.sub then - specification.sub = list.sub - list.sub = nil - end - specification.features.normal = fonts.handlers.otf.features.normalize(list) - return specification -end - -fonts.definers.registersplit(":",colonized,"cryptic") -fonts.definers.registersplit("", colonized,"more cryptic") -- catches \font\text=[names] - -function fonts.definers.applypostprocessors(tfmdata) - local postprocessors = tfmdata.postprocessors - if postprocessors then - for i=1,#postprocessors do - local extrahash = postprocessors[i](tfmdata) -- after scaling etc - if type(extrahash) == "string" and extrahash ~= "" then - -- e.g. a reencoding needs this - extrahash = string.gsub(lower(extrahash),"[^a-z]","-") - tfmdata.properties.fullname = format("%s-%s",tfmdata.properties.fullname,extrahash) - end - end - end - return tfmdata -end diff --git a/src/luaotfload-fonts-enc.lua b/src/luaotfload-fonts-enc.lua deleted file mode 100644 index e20c3a0..0000000 --- a/src/luaotfload-fonts-enc.lua +++ /dev/null @@ -1,28 +0,0 @@ -if not modules then modules = { } end modules ['luatex-font-enc'] = { - version = 1.001, - comment = "companion to luatex-*.tex", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end - -local fonts = fonts -fonts.encodings = { } -fonts.encodings.agl = { } - -setmetatable(fonts.encodings.agl, { __index = function(t,k) - if k == "unicodes" then - texio.write(" ") - local unicodes = dofile(resolvers.findfile("font-age.lua")) - fonts.encodings.agl = { unicodes = unicodes } - return unicodes - else - return nil - end -end }) - diff --git a/src/luaotfload-fonts-ext.lua b/src/luaotfload-fonts-ext.lua deleted file mode 100644 index b60d045..0000000 --- a/src/luaotfload-fonts-ext.lua +++ /dev/null @@ -1,272 +0,0 @@ -if not modules then modules = { } end modules ['luatex-fonts-ext'] = { - version = 1.001, - comment = "companion to luatex-*.tex", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end - -local fonts = fonts -local otffeatures = fonts.constructors.newfeatures("otf") - --- A few generic extensions. - -local function initializeitlc(tfmdata,value) - if value then - -- the magic 40 and it formula come from Dohyun Kim but we might need another guess - local parameters = tfmdata.parameters - local italicangle = parameters.italicangle - if italicangle and italicangle ~= 0 then - local properties = tfmdata.properties - local factor = tonumber(value) or 1 - properties.hasitalics = true - properties.autoitalicamount = factor * (parameters.uwidth or 40)/2 - end - end -end - -otffeatures.register { - name = "itlc", - description = "italic correction", - initializers = { - base = initializeitlc, - node = initializeitlc, - } -} - --- slant and extend - -local function initializeslant(tfmdata,value) - value = tonumber(value) - if not value then - value = 0 - elseif value > 1 then - value = 1 - elseif value < -1 then - value = -1 - end - tfmdata.parameters.slantfactor = value -end - -otffeatures.register { - name = "slant", - description = "slant glyphs", - initializers = { - base = initializeslant, - node = initializeslant, - } -} - -local function initializeextend(tfmdata,value) - value = tonumber(value) - if not value then - value = 0 - elseif value > 10 then - value = 10 - elseif value < -10 then - value = -10 - end - tfmdata.parameters.extendfactor = value -end - -otffeatures.register { - name = "extend", - description = "scale glyphs horizontally", - initializers = { - base = initializeextend, - node = initializeextend, - } -} - --- expansion and protrusion - -fonts.protrusions = fonts.protrusions or { } -fonts.protrusions.setups = fonts.protrusions.setups or { } - -local setups = fonts.protrusions.setups - -local function initializeprotrusion(tfmdata,value) - if value then - local setup = setups[value] - if setup then - local factor, left, right = setup.factor or 1, setup.left or 1, setup.right or 1 - local emwidth = tfmdata.parameters.quad - tfmdata.parameters.protrusion = { - auto = true, - } - for i, chr in next, tfmdata.characters do - local v, pl, pr = setup[i], nil, nil - if v then - pl, pr = v[1], v[2] - end - if pl and pl ~= 0 then chr.left_protruding = left *pl*factor end - if pr and pr ~= 0 then chr.right_protruding = right*pr*factor end - end - end - end -end - -otffeatures.register { - name = "protrusion", - description = "shift characters into the left and or right margin", - initializers = { - base = initializeprotrusion, - node = initializeprotrusion, - } -} - -fonts.expansions = fonts.expansions or { } -fonts.expansions.setups = fonts.expansions.setups or { } - -local setups = fonts.expansions.setups - -local function initializeexpansion(tfmdata,value) - if value then - local setup = setups[value] - if setup then - local factor = setup.factor or 1 - tfmdata.parameters.expansion = { - stretch = 10 * (setup.stretch or 0), - shrink = 10 * (setup.shrink or 0), - step = 10 * (setup.step or 0), - auto = true, - } - for i, chr in next, tfmdata.characters do - local v = setup[i] - if v and v ~= 0 then - chr.expansion_factor = v*factor - else -- can be option - chr.expansion_factor = factor - end - end - end - end -end - -otffeatures.register { - name = "expansion", - description = "apply hz optimization", - initializers = { - base = initializeexpansion, - node = initializeexpansion, - } -} - --- left over - -function fonts.loggers.onetimemessage() end - --- example vectors - -local byte = string.byte - -fonts.expansions.setups['default'] = { - - stretch = 2, shrink = 2, step = .5, factor = 1, - - [byte('A')] = 0.5, [byte('B')] = 0.7, [byte('C')] = 0.7, [byte('D')] = 0.5, [byte('E')] = 0.7, - [byte('F')] = 0.7, [byte('G')] = 0.5, [byte('H')] = 0.7, [byte('K')] = 0.7, [byte('M')] = 0.7, - [byte('N')] = 0.7, [byte('O')] = 0.5, [byte('P')] = 0.7, [byte('Q')] = 0.5, [byte('R')] = 0.7, - [byte('S')] = 0.7, [byte('U')] = 0.7, [byte('W')] = 0.7, [byte('Z')] = 0.7, - [byte('a')] = 0.7, [byte('b')] = 0.7, [byte('c')] = 0.7, [byte('d')] = 0.7, [byte('e')] = 0.7, - [byte('g')] = 0.7, [byte('h')] = 0.7, [byte('k')] = 0.7, [byte('m')] = 0.7, [byte('n')] = 0.7, - [byte('o')] = 0.7, [byte('p')] = 0.7, [byte('q')] = 0.7, [byte('s')] = 0.7, [byte('u')] = 0.7, - [byte('w')] = 0.7, [byte('z')] = 0.7, - [byte('2')] = 0.7, [byte('3')] = 0.7, [byte('6')] = 0.7, [byte('8')] = 0.7, [byte('9')] = 0.7, -} - -fonts.protrusions.setups['default'] = { - - factor = 1, left = 1, right = 1, - - [0x002C] = { 0, 1 }, -- comma - [0x002E] = { 0, 1 }, -- period - [0x003A] = { 0, 1 }, -- colon - [0x003B] = { 0, 1 }, -- semicolon - [0x002D] = { 0, 1 }, -- hyphen - [0x2013] = { 0, 0.50 }, -- endash - [0x2014] = { 0, 0.33 }, -- emdash - [0x3001] = { 0, 1 }, -- ideographic comma 、 - [0x3002] = { 0, 1 }, -- ideographic full stop 。 - [0x060C] = { 0, 1 }, -- arabic comma ، - [0x061B] = { 0, 1 }, -- arabic semicolon ؛ - [0x06D4] = { 0, 1 }, -- arabic full stop ۔ - -} - --- normalizer - -fonts.handlers.otf.features.normalize = function(t) - if t.rand then - t.rand = "random" - end - return t -end - --- bonus - -function fonts.helpers.nametoslot(name) - local t = type(name) - if t == "string" then - local tfmdata = fonts.hashes.identifiers[currentfont()] - local shared = tfmdata and tfmdata.shared - local fntdata = shared and shared.rawdata - return fntdata and fntdata.resources.unicodes[name] - elseif t == "number" then - return n - end -end - --- \font\test=file:somefont:reencode=mymessup --- --- fonts.encodings.reencodings.mymessup = { --- [109] = 110, -- m --- [110] = 109, -- n --- } - -fonts.encodings = fonts.encodings or { } -local reencodings = { } -fonts.encodings.reencodings = reencodings - -local function specialreencode(tfmdata,value) - -- we forget about kerns as we assume symbols and we - -- could issue a message if ther are kerns but it's - -- a hack anyway so we odn't care too much here - local encoding = value and reencodings[value] - if encoding then - local temp = { } - local char = tfmdata.characters - for k, v in next, encoding do - temp[k] = char[v] - end - for k, v in next, temp do - char[k] = temp[k] - end - -- if we use the font otherwise luatex gets confused so - -- we return an additional hash component for fullname - return string.format("reencoded:%s",value) - end -end - -local function reencode(tfmdata,value) - tfmdata.postprocessors = tfmdata.postprocessors or { } - table.insert(tfmdata.postprocessors, - function(tfmdata) - return specialreencode(tfmdata,value) - end - ) -end - -otffeatures.register { - name = "reencode", - description = "reencode characters", - manipulators = { - base = reencode, - node = reencode, - } -} diff --git a/src/luaotfload-fonts-inj.lua b/src/luaotfload-fonts-inj.lua deleted file mode 100644 index ae48150..0000000 --- a/src/luaotfload-fonts-inj.lua +++ /dev/null @@ -1,526 +0,0 @@ -if not modules then modules = { } end modules ['node-inj'] = { - version = 1.001, - comment = "companion to node-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files", -} - --- This is very experimental (this will change when we have luatex > .50 and --- a few pending thingies are available. Also, Idris needs to make a few more --- test fonts. Btw, future versions of luatex will have extended glyph properties --- that can be of help. Some optimizations can go away when we have faster machines. - --- todo: make a special one for context - -local next = next -local utfchar = utf.char - -local trace_injections = false trackers.register("nodes.injections", function(v) trace_injections = v end) - -local report_injections = logs.reporter("nodes","injections") - -local attributes, nodes, node = attributes, nodes, node - -fonts = fonts -local fontdata = fonts.hashes.identifiers - -nodes.injections = nodes.injections or { } -local injections = nodes.injections - -local nodecodes = nodes.nodecodes -local glyph_code = nodecodes.glyph -local kern_code = nodecodes.kern -local nodepool = nodes.pool -local newkern = nodepool.kern - -local traverse_id = node.traverse_id -local insert_node_before = node.insert_before -local insert_node_after = node.insert_after - -local a_kernpair = attributes.private('kernpair') -local a_ligacomp = attributes.private('ligacomp') -local a_markbase = attributes.private('markbase') -local a_markmark = attributes.private('markmark') -local a_markdone = attributes.private('markdone') -local a_cursbase = attributes.private('cursbase') -local a_curscurs = attributes.private('curscurs') -local a_cursdone = attributes.private('cursdone') - --- This injector has been tested by Idris Samawi Hamid (several arabic fonts as well as --- the rather demanding Husayni font), Khaled Hosny (latin and arabic) and Kaj Eigner --- (arabic, hebrew and thai) and myself (whatever font I come across). I'm pretty sure --- that this code is not 100% okay but examples are needed to figure things out. - -function injections.installnewkern(nk) - newkern = nk or newkern -end - -local cursives = { } -local marks = { } -local kerns = { } - --- Currently we do gpos/kern in a bit inofficial way but when we have the extra fields in --- glyphnodes to manipulate ht/dp/wd explicitly I will provide an alternative; also, we --- can share tables. - --- For the moment we pass the r2l key ... volt/arabtype tests .. idris: this needs --- checking with husayni (volt and fontforge). - -function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext) - local dx, dy = factor*(exit[1]-entry[1]), factor*(exit[2]-entry[2]) - local ws, wn = tfmstart.width, tfmnext.width - local bound = #cursives + 1 - start[a_cursbase] = bound - nxt[a_curscurs] = bound - cursives[bound] = { rlmode, dx, dy, ws, wn } - return dx, dy, bound -end - -function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr) - local x, y, w, h = factor*spec[1], factor*spec[2], factor*spec[3], factor*spec[4] - -- dy = y - h - if x ~= 0 or w ~= 0 or y ~= 0 or h ~= 0 then - local bound = current[a_kernpair] - if bound then - local kb = kerns[bound] - -- inefficient but singles have less, but weird anyway, needs checking - kb[2], kb[3], kb[4], kb[5] = (kb[2] or 0) + x, (kb[3] or 0) + y, (kb[4] or 0)+ w, (kb[5] or 0) + h - else - bound = #kerns + 1 - current[a_kernpair] = bound - kerns[bound] = { rlmode, x, y, w, h, r2lflag, tfmchr.width } - end - return x, y, w, h, bound - end - return x, y, w, h -- no bound -end - -function injections.setkern(current,factor,rlmode,x,tfmchr) - local dx = factor*x - if dx ~= 0 then - local bound = #kerns + 1 - current[a_kernpair] = bound - kerns[bound] = { rlmode, dx } - return dx, bound - else - return 0, 0 - end -end - -function injections.setmark(start,base,factor,rlmode,ba,ma,index,baseismark) -- ba=baseanchor, ma=markanchor - local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2]) -- the index argument is no longer used but when this - local bound = base[a_markbase] -- fails again we should pass it - local index = 1 - if bound then - local mb = marks[bound] - if mb then - -- if not index then index = #mb + 1 end - index = #mb + 1 - mb[index] = { dx, dy, rlmode } - start[a_markmark] = bound - start[a_markdone] = index - return dx, dy, bound - else - report_injections("possible problem, %U is base mark without data (id %a)",base.char,bound) - end - end --- index = index or 1 - index = index or 1 - bound = #marks + 1 - base[a_markbase] = bound - start[a_markmark] = bound - start[a_markdone] = index - marks[bound] = { [index] = { dx, dy, rlmode, baseismark } } - return dx, dy, bound -end - -local function dir(n) - return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset" -end - -local function trace(head) - report_injections("begin run") - for n in traverse_id(glyph_code,head) do - if n.subtype < 256 then - local kp = n[a_kernpair] - local mb = n[a_markbase] - local mm = n[a_markmark] - local md = n[a_markdone] - local cb = n[a_cursbase] - local cc = n[a_curscurs] - local char = n.char - report_injections("font %s, char %U, glyph %c",n.font,char,char) - if kp then - local k = kerns[kp] - if k[3] then - report_injections(" pairkern: dir %a, x %p, y %p, w %p, h %p",dir(k[1]),k[2],k[3],k[4],k[5]) - else - report_injections(" kern: dir %a, dx %p",dir(k[1]),k[2]) - end - end - if mb then - report_injections(" markbase: bound %a",mb) - end - if mm then - local m = marks[mm] - if mb then - local m = m[mb] - if m then - report_injections(" markmark: bound %a, index %a, dx %p, dy %p",mm,md,m[1],m[2]) - else - report_injections(" markmark: bound %a, missing index",mm) - end - else - m = m[1] - report_injections(" markmark: bound %a, dx %p, dy %p",mm,m and m[1],m and m[2]) - end - end - if cb then - report_injections(" cursbase: bound %a",cb) - end - if cc then - local c = cursives[cc] - report_injections(" curscurs: bound %a, dir %a, dx %p, dy %p",cc,dir(c[1]),c[2],c[3]) - end - end - end - report_injections("end run") -end - --- todo: reuse tables (i.e. no collection), but will be extra fields anyway --- todo: check for attribute - --- We can have a fast test on a font being processed, so we can check faster for marks etc --- but I'll make a context variant anyway. - -local function show_result(head) - local current = head - local skipping = false - while current do - local id = current.id - if id == glyph_code then - report_injections("char: %C, width %p, xoffset %p, yoffset %p",current.char,current.width,current.xoffset,current.yoffset) - skipping = false - elseif id == kern_code then - report_injections("kern: %p",current.kern) - skipping = false - elseif not skipping then - report_injections() - skipping = true - end - current = current.next - end -end - -function injections.handler(head,where,keep) - local has_marks, has_cursives, has_kerns = next(marks), next(cursives), next(kerns) - if has_marks or has_cursives then - if trace_injections then - trace(head) - end - -- in the future variant we will not copy items but refs to tables - local done, ky, rl, valid, cx, wx, mk, nofvalid = false, { }, { }, { }, { }, { }, { }, 0 - if has_kerns then -- move outside loop - local nf, tm = nil, nil - for n in traverse_id(glyph_code,head) do -- only needed for relevant fonts - if n.subtype < 256 then - nofvalid = nofvalid + 1 - valid[nofvalid] = n - if n.font ~= nf then - nf = n.font - tm = fontdata[nf].resources.marks - end - if tm then - mk[n] = tm[n.char] - end - local k = n[a_kernpair] - if k then - local kk = kerns[k] - if kk then - local x, y, w, h = kk[2] or 0, kk[3] or 0, kk[4] or 0, kk[5] or 0 - local dy = y - h - if dy ~= 0 then - ky[n] = dy - end - if w ~= 0 or x ~= 0 then - wx[n] = kk - end - rl[n] = kk[1] -- could move in test - end - end - end - end - else - local nf, tm = nil, nil - for n in traverse_id(glyph_code,head) do - if n.subtype < 256 then - nofvalid = nofvalid + 1 - valid[nofvalid] = n - if n.font ~= nf then - nf = n.font - tm = fontdata[nf].resources.marks - end - if tm then - mk[n] = tm[n.char] - end - end - end - end - if nofvalid > 0 then - -- we can assume done == true because we have cursives and marks - local cx = { } - if has_kerns and next(ky) then - for n, k in next, ky do - n.yoffset = k - end - end - -- todo: reuse t and use maxt - if has_cursives then - local p_cursbase, p = nil, nil - -- since we need valid[n+1] we can also use a "while true do" - local t, d, maxt = { }, { }, 0 - for i=1,nofvalid do -- valid == glyphs - local n = valid[i] - if not mk[n] then - local n_cursbase = n[a_cursbase] - if p_cursbase then - local n_curscurs = n[a_curscurs] - if p_cursbase == n_curscurs then - local c = cursives[n_curscurs] - if c then - local rlmode, dx, dy, ws, wn = c[1], c[2], c[3], c[4], c[5] - if rlmode >= 0 then - dx = dx - ws - else - dx = dx + wn - end - if dx ~= 0 then - cx[n] = dx - rl[n] = rlmode - end - -- if rlmode and rlmode < 0 then - dy = -dy - -- end - maxt = maxt + 1 - t[maxt] = p - d[maxt] = dy - else - maxt = 0 - end - end - elseif maxt > 0 then - local ny = n.yoffset - for i=maxt,1,-1 do - ny = ny + d[i] - local ti = t[i] - ti.yoffset = ti.yoffset + ny - end - maxt = 0 - end - if not n_cursbase and maxt > 0 then - local ny = n.yoffset - for i=maxt,1,-1 do - ny = ny + d[i] - local ti = t[i] - ti.yoffset = ny - end - maxt = 0 - end - p_cursbase, p = n_cursbase, n - end - end - if maxt > 0 then - local ny = n.yoffset - for i=maxt,1,-1 do - ny = ny + d[i] - local ti = t[i] - ti.yoffset = ny - end - maxt = 0 - end - if not keep then - cursives = { } - end - end - if has_marks then - for i=1,nofvalid do - local p = valid[i] - local p_markbase = p[a_markbase] - if p_markbase then - local mrks = marks[p_markbase] - local nofmarks = #mrks - for n in traverse_id(glyph_code,p.next) do - local n_markmark = n[a_markmark] - if p_markbase == n_markmark then - local index = n[a_markdone] or 1 - local d = mrks[index] - if d then - local rlmode = d[3] - -- - local k = wx[p] - if k then - local x = k[2] - local w = k[4] - if w then - if rlmode and rlmode >= 0 then - -- kern(x) glyph(p) kern(w-x) mark(n) - n.xoffset = p.xoffset - p.width + d[1] - (w-x) - else - -- kern(w-x) glyph(p) kern(x) mark(n) - n.xoffset = p.xoffset - d[1] - x - end - else - if rlmode and rlmode >= 0 then - -- okay for husayni - n.xoffset = p.xoffset - p.width + d[1] - else - -- needs checking: is x ok here? - n.xoffset = p.xoffset - d[1] - x - end - end - else - if rlmode and rlmode >= 0 then - n.xoffset = p.xoffset - p.width + d[1] - else - n.xoffset = p.xoffset - d[1] - end - local w = n.width - if w ~= 0 then - insert_node_before(head,n,newkern(-w/2)) - insert_node_after(head,n,newkern(-w/2)) - end - end - -- -- - if mk[p] then - n.yoffset = p.yoffset + d[2] - else - n.yoffset = n.yoffset + p.yoffset + d[2] - end - -- - if nofmarks == 1 then - break - else - nofmarks = nofmarks - 1 - end - end - else - -- KE: there can be sequences in ligatures - end - end - end - end - if not keep then - marks = { } - end - end - -- todo : combine - if next(wx) then - for n, k in next, wx do - -- only w can be nil (kernclasses), can be sped up when w == nil - local x = k[2] - local w = k[4] - if w then - local rl = k[1] -- r2l = k[6] - local wx = w - x - if rl < 0 then -- KE: don't use r2l here - if wx ~= 0 then - insert_node_before(head,n,newkern(wx)) -- type 0/2 - end - if x ~= 0 then - insert_node_after (head,n,newkern(x)) -- type 0/2 - end - else - if x ~= 0 then - insert_node_before(head,n,newkern(x)) -- type 0/2 - end - if wx ~= 0 then - insert_node_after (head,n,newkern(wx)) -- type 0/2 - end - end - elseif x ~= 0 then - -- this needs checking for rl < 0 but it is unlikely that a r2l script - -- uses kernclasses between glyphs so we're probably safe (KE has a - -- problematic font where marks interfere with rl < 0 in the previous - -- case) - insert_node_before(head,n,newkern(x)) -- a real font kern, type 0 - end - end - end - if next(cx) then - for n, k in next, cx do - if k ~= 0 then - local rln = rl[n] - if rln and rln < 0 then - insert_node_before(head,n,newkern(-k)) -- type 0/2 - else - insert_node_before(head,n,newkern(k)) -- type 0/2 - end - end - end - end - if not keep then - kerns = { } - end - -- if trace_injections then - -- show_result(head) - -- end - return head, true - elseif not keep then - kerns, cursives, marks = { }, { }, { } - end - elseif has_kerns then - if trace_injections then - trace(head) - end - for n in traverse_id(glyph_code,head) do - if n.subtype < 256 then - local k = n[a_kernpair] - if k then - local kk = kerns[k] - if kk then - local rl, x, y, w = kk[1], kk[2] or 0, kk[3], kk[4] - if y and y ~= 0 then - n.yoffset = y -- todo: h ? - end - if w then - -- copied from above - -- local r2l = kk[6] - local wx = w - x - if rl < 0 then -- KE: don't use r2l here - if wx ~= 0 then - insert_node_before(head,n,newkern(wx)) - end - if x ~= 0 then - insert_node_after (head,n,newkern(x)) - end - else - if x ~= 0 then - insert_node_before(head,n,newkern(x)) - end - if wx ~= 0 then - insert_node_after(head,n,newkern(wx)) - end - end - else - -- simple (e.g. kernclass kerns) - if x ~= 0 then - insert_node_before(head,n,newkern(x)) - end - end - end - end - end - end - if not keep then - kerns = { } - end - -- if trace_injections then - -- show_result(head) - -- end - return head, true - else - -- no tracing needed - end - return head, false -end diff --git a/src/luaotfload-fonts-lua.lua b/src/luaotfload-fonts-lua.lua deleted file mode 100644 index ec3fe38..0000000 --- a/src/luaotfload-fonts-lua.lua +++ /dev/null @@ -1,33 +0,0 @@ -if not modules then modules = { } end modules ['luatex-fonts-lua'] = { - version = 1.001, - comment = "companion to luatex-*.tex", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end - -local fonts = fonts -fonts.formats.lua = "lua" - -function fonts.readers.lua(specification) - local fullname = specification.filename or "" - if fullname == "" then - local forced = specification.forced or "" - if forced ~= "" then - fullname = specification.name .. "." .. forced - else - fullname = specification.name - end - end - local fullname = resolvers.findfile(fullname) or "" - if fullname ~= "" then - local loader = loadfile(fullname) - loader = loader and loader() - return loader and loader(specification) - end -end diff --git a/src/luaotfload-fonts-otn.lua b/src/luaotfload-fonts-otn.lua deleted file mode 100644 index c57be5f..0000000 --- a/src/luaotfload-fonts-otn.lua +++ /dev/null @@ -1,2848 +0,0 @@ -if not modules then modules = { } end modules ['font-otn'] = { - version = 1.001, - comment = "companion to font-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files", -} - --- preprocessors = { "nodes" } - --- this is still somewhat preliminary and it will get better in due time; --- much functionality could only be implemented thanks to the husayni font --- of Idris Samawi Hamid to who we dedicate this module. - --- in retrospect it always looks easy but believe it or not, it took a lot --- of work to get proper open type support done: buggy fonts, fuzzy specs, --- special made testfonts, many skype sessions between taco, idris and me, --- torture tests etc etc ... unfortunately the code does not show how much --- time it took ... - --- todo: --- --- kerning is probably not yet ok for latin around dics nodes (interesting challenge) --- extension infrastructure (for usage out of context) --- sorting features according to vendors/renderers --- alternative loop quitters --- check cursive and r2l --- find out where ignore-mark-classes went --- default features (per language, script) --- handle positions (we need example fonts) --- handle gpos_single (we might want an extra width field in glyph nodes because adding kerns might interfere) --- mark (to mark) code is still not what it should be (too messy but we need some more extreem husayni tests) --- remove some optimizations (when I have a faster machine) --- --- maybe redo the lot some way (more context specific) - ---[[ldx-- -

This module is a bit more split up that I'd like but since we also want to test -with plain it has to be so. This module is part of -and discussion about improvements and functionality mostly happens on the - mailing list.

- -

The specification of OpenType is kind of vague. Apart from a lack of a proper -free specifications there's also the problem that Microsoft and Adobe -may have their own interpretation of how and in what order to apply features. -In general the Microsoft website has more detailed specifications and is a -better reference. There is also some information in the FontForge help files.

- -

Because there is so much possible, fonts might contain bugs and/or be made to -work with certain rederers. These may evolve over time which may have the side -effect that suddenly fonts behave differently.

- -

After a lot of experiments (mostly by Taco, me and Idris) we're now at yet another -implementation. Of course all errors are mine and of course the code can be -improved. There are quite some optimizations going on here and processing speed -is currently acceptable. Not all functions are implemented yet, often because I -lack the fonts for testing. Many scripts are not yet supported either, but I will -look into them as soon as users ask for it.

- -

Because there are different interpretations possible, I will extend the code -with more (configureable) variants. I can also add hooks for users so that they can -write their own extensions.

- -

Glyphs are indexed not by unicode but in their own way. This is because there is no -relationship with unicode at all, apart from the fact that a font might cover certain -ranges of characters. One character can have multiple shapes. However, at the - end we use unicode so and all extra glyphs are mapped into a private -space. This is needed because we need to access them and has to include -then in the output eventually.

- -

The raw table as it coms from gets reorganized in to fit out needs. -In that table is packed (similar tables are shared) and cached on disk -so that successive runs can use the optimized table (after loading the table is -unpacked). The flattening code used later is a prelude to an even more compact table -format (and as such it keeps evolving).

- -

This module is sparsely documented because it is a moving target. The table format -of the reader changes and we experiment a lot with different methods for supporting -features.

- -

As with the code, we may decide to store more information in the - table.

- -

Incrementing the version number will force a re-cache. We jump the number by one -when there's a fix in the library or code that -results in different tables.

---ldx]]-- - --- action handler chainproc chainmore comment --- --- gsub_single ok ok ok --- gsub_multiple ok ok not implemented yet --- gsub_alternate ok ok not implemented yet --- gsub_ligature ok ok ok --- gsub_context ok -- --- gsub_contextchain ok -- --- gsub_reversecontextchain ok -- --- chainsub -- ok --- reversesub -- ok --- gpos_mark2base ok ok --- gpos_mark2ligature ok ok --- gpos_mark2mark ok ok --- gpos_cursive ok untested --- gpos_single ok ok --- gpos_pair ok ok --- gpos_context ok -- --- gpos_contextchain ok -- --- --- todo: contextpos and contextsub and class stuff --- --- actions: --- --- handler : actions triggered by lookup --- chainproc : actions triggered by contextual lookup --- chainmore : multiple substitutions triggered by contextual lookup (e.g. fij -> f + ij) --- --- remark: the 'not implemented yet' variants will be done when we have fonts that use them --- remark: we need to check what to do with discretionaries - --- We used to have independent hashes for lookups but as the tags are unique --- we now use only one hash. If needed we can have multiple again but in that --- case I will probably prefix (i.e. rename) the lookups in the cached font file. - --- Todo: make plugin feature that operates on char/glyphnode arrays - -local concat, insert, remove = table.concat, table.insert, table.remove -local gmatch, gsub, find, match, lower, strip = string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip -local type, next, tonumber, tostring = type, next, tonumber, tostring -local lpegmatch = lpeg.match -local random = math.random -local formatters = string.formatters - -local logs, trackers, nodes, attributes = logs, trackers, nodes, attributes - -local registertracker = trackers.register - -local fonts = fonts -local otf = fonts.handlers.otf - -local trace_lookups = false registertracker("otf.lookups", function(v) trace_lookups = v end) -local trace_singles = false registertracker("otf.singles", function(v) trace_singles = v end) -local trace_multiples = false registertracker("otf.multiples", function(v) trace_multiples = v end) -local trace_alternatives = false registertracker("otf.alternatives", function(v) trace_alternatives = v end) -local trace_ligatures = false registertracker("otf.ligatures", function(v) trace_ligatures = v end) -local trace_contexts = false registertracker("otf.contexts", function(v) trace_contexts = v end) -local trace_marks = false registertracker("otf.marks", function(v) trace_marks = v end) -local trace_kerns = false registertracker("otf.kerns", function(v) trace_kerns = v end) -local trace_cursive = false registertracker("otf.cursive", function(v) trace_cursive = v end) -local trace_preparing = false registertracker("otf.preparing", function(v) trace_preparing = v end) -local trace_bugs = false registertracker("otf.bugs", function(v) trace_bugs = v end) -local trace_details = false registertracker("otf.details", function(v) trace_details = v end) -local trace_applied = false registertracker("otf.applied", function(v) trace_applied = v end) -local trace_steps = false registertracker("otf.steps", function(v) trace_steps = v end) -local trace_skips = false registertracker("otf.skips", function(v) trace_skips = v end) -local trace_directions = false registertracker("otf.directions", function(v) trace_directions = v end) - -local report_direct = logs.reporter("fonts","otf direct") -local report_subchain = logs.reporter("fonts","otf subchain") -local report_chain = logs.reporter("fonts","otf chain") -local report_process = logs.reporter("fonts","otf process") -local report_prepare = logs.reporter("fonts","otf prepare") -local report_warning = logs.reporter("fonts","otf warning") - -registertracker("otf.verbose_chain", function(v) otf.setcontextchain(v and "verbose") end) -registertracker("otf.normal_chain", function(v) otf.setcontextchain(v and "normal") end) - -registertracker("otf.replacements", "otf.singles,otf.multiples,otf.alternatives,otf.ligatures") -registertracker("otf.positions","otf.marks,otf.kerns,otf.cursive") -registertracker("otf.actions","otf.replacements,otf.positions") -registertracker("otf.injections","nodes.injections") - -registertracker("*otf.sample","otf.steps,otf.actions,otf.analyzing") - -local insert_node_after = node.insert_after -local delete_node = nodes.delete -local copy_node = node.copy -local find_node_tail = node.tail or node.slide -local flush_node_list = node.flush_list -local end_of_math = node.end_of_math - -local setmetatableindex = table.setmetatableindex - -local zwnj = 0x200C -local zwj = 0x200D -local wildcard = "*" -local default = "dflt" - -local nodecodes = nodes.nodecodes -local whatcodes = nodes.whatcodes -local glyphcodes = nodes.glyphcodes -local disccodes = nodes.disccodes - -local glyph_code = nodecodes.glyph -local glue_code = nodecodes.glue -local disc_code = nodecodes.disc -local whatsit_code = nodecodes.whatsit -local math_code = nodecodes.math - -local dir_code = whatcodes.dir -local localpar_code = whatcodes.localpar - -local discretionary_code = disccodes.discretionary - -local ligature_code = glyphcodes.ligature - -local privateattribute = attributes.private - --- Something is messed up: we have two mark / ligature indices, one at the injection --- end and one here ... this is bases in KE's patches but there is something fishy --- there as I'm pretty sure that for husayni we need some connection (as it's much --- more complex than an average font) but I need proper examples of all cases, not --- of only some. - -local a_state = privateattribute('state') -local a_markbase = privateattribute('markbase') -local a_markmark = privateattribute('markmark') -local a_markdone = privateattribute('markdone') -- assigned at the injection end -local a_cursbase = privateattribute('cursbase') -local a_curscurs = privateattribute('curscurs') -local a_cursdone = privateattribute('cursdone') -local a_kernpair = privateattribute('kernpair') -local a_ligacomp = privateattribute('ligacomp') -- assigned here (ideally it should be combined) - -local injections = nodes.injections -local setmark = injections.setmark -local setcursive = injections.setcursive -local setkern = injections.setkern -local setpair = injections.setpair - -local markonce = true -local cursonce = true -local kernonce = true - -local fonthashes = fonts.hashes -local fontdata = fonthashes.identifiers - -local otffeatures = fonts.constructors.newfeatures("otf") -local registerotffeature = otffeatures.register - -local onetimemessage = fonts.loggers.onetimemessage or function() end - -otf.defaultnodealternate = "none" -- first last - --- we share some vars here, after all, we have no nested lookups and less code - -local tfmdata = false -local characters = false -local descriptions = false -local resources = false -local marks = false -local currentfont = false -local lookuptable = false -local anchorlookups = false -local lookuptypes = false -local handlers = { } -local rlmode = 0 -local featurevalue = false - --- head is always a whatsit so we can safely assume that head is not changed - --- we use this for special testing and documentation - -local checkstep = (nodes and nodes.tracers and nodes.tracers.steppers.check) or function() end -local registerstep = (nodes and nodes.tracers and nodes.tracers.steppers.register) or function() end -local registermessage = (nodes and nodes.tracers and nodes.tracers.steppers.message) or function() end - -local function logprocess(...) - if trace_steps then - registermessage(...) - end - report_direct(...) -end - -local function logwarning(...) - report_direct(...) -end - -local f_unicode = formatters["%U"] -local f_uniname = formatters["%U (%s)"] -local f_unilist = formatters["% t (% t)"] - -local function gref(n) -- currently the same as in font-otb - if type(n) == "number" then - local description = descriptions[n] - local name = description and description.name - if name then - return f_uniname(n,name) - else - return f_unicode(n) - end - elseif n then - local num, nam = { }, { } - for i=1,#n do - local ni = n[i] - if tonumber(ni) then -- later we will start at 2 - local di = descriptions[ni] - num[i] = f_unicode(ni) - nam[i] = di and di.name or "-" - end - end - return f_unilist(num,nam) - else - return "" - end -end - -local function cref(kind,chainname,chainlookupname,lookupname,index) -- not in the mood to alias f_ - if index then - return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookupname,index) - elseif lookupname then - return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookupname) - elseif chainlookupname then - return formatters["feature %a, chain %a, sub %a"](kind,chainname,chainlookupname) - elseif chainname then - return formatters["feature %a, chain %a"](kind,chainname) - else - return formatters["feature %a"](kind) - end -end - -local function pref(kind,lookupname) - return formatters["feature %a, lookup %a"](kind,lookupname) -end - --- We can assume that languages that use marks are not hyphenated. We can also assume --- that at most one discretionary is present. - --- We do need components in funny kerning mode but maybe I can better reconstruct then --- as we do have the font components info available; removing components makes the --- previous code much simpler. Also, later on copying and freeing becomes easier. --- However, for arabic we need to keep them around for the sake of mark placement --- and indices. - -local function copy_glyph(g) -- next and prev are untouched ! - local components = g.components - if components then - g.components = nil - local n = copy_node(g) - g.components = components - return n - else - return copy_node(g) - end -end - --- start is a mark and we need to keep that one - -local function markstoligature(kind,lookupname,head,start,stop,char) - if start == stop and start.char == char then - return head, start - else - local prev = start.prev - local next = stop.next - start.prev = nil - stop.next = nil - local base = copy_glyph(start) - if head == start then - head = base - end - base.char = char - base.subtype = ligature_code - base.components = start - if prev then - prev.next = base - end - if next then - next.prev = base - end - base.next = next - base.prev = prev - return head, base - end -end - --- The next code is somewhat complicated by the fact that some fonts can have ligatures made --- from ligatures that themselves have marks. This was identified by Kai in for instance --- arabtype: KAF LAM SHADDA ALEF FATHA (0x0643 0x0644 0x0651 0x0627 0x064E). This becomes --- KAF LAM-ALEF with a SHADDA on the first and a FATHA op de second component. In a next --- iteration this becomes a KAF-LAM-ALEF with a SHADDA on the second and a FATHA on the --- third component. - -local function getcomponentindex(start) - if start.id ~= glyph_code then - return 0 - elseif start.subtype == ligature_code then - local i = 0 - local components = start.components - while components do - i = i + getcomponentindex(components) - components = components.next - end - return i - elseif not marks[start.char] then - return 1 - else - return 0 - end -end - --- eventually we will do positioning in an other way (needs addional w/h/d fields) - -local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound) -- brr head - if start == stop and start.char == char then - start.char = char - return head, start - end - local prev = start.prev - local next = stop.next - start.prev = nil - stop.next = nil - local base = copy_glyph(start) - if start == head then - head = base - end - base.char = char - base.subtype = ligature_code - base.components = start -- start can have components - if prev then - prev.next = base - end - if next then - next.prev = base - end - base.next = next - base.prev = prev - if not discfound then - local deletemarks = markflag ~= "mark" - local components = start - local baseindex = 0 - local componentindex = 0 - local head = base - local current = base - -- first we loop over the glyphs in start .. stop - while start do - local char = start.char - if not marks[char] then - baseindex = baseindex + componentindex - componentindex = getcomponentindex(start) - elseif not deletemarks then -- quite fishy - start[a_ligacomp] = baseindex + (start[a_ligacomp] or componentindex) - if trace_marks then - logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp]) - end - head, current = insert_node_after(head,current,copy_node(start)) -- unlikely that mark has components - elseif trace_marks then - logwarning("%s: delete mark %s",pref(kind,lookupname),gref(char)) - end - start = start.next - end - -- we can have one accent as part of a lookup and another following - -- local start = components -- was wrong (component scanning was introduced when more complex ligs in devanagari was added) - local start = current.next - while start and start.id == glyph_code do - local char = start.char - if marks[char] then - start[a_ligacomp] = baseindex + (start[a_ligacomp] or componentindex) - if trace_marks then - logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp]) - end - else - break - end - start = start.next - end - end - return head, base -end - -function handlers.gsub_single(head,start,kind,lookupname,replacement) - if trace_singles then - logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(start.char),gref(replacement)) - end - start.char = replacement - return head, start, true -end - -local function get_alternative_glyph(start,alternatives,value,trace_alternatives) - local n = #alternatives - if value == "random" then - local r = random(1,n) - return alternatives[r], trace_alternatives and formatters["value %a, taking %a"](value,r) - elseif value == "first" then - return alternatives[1], trace_alternatives and formatters["value %a, taking %a"](value,1) - elseif value == "last" then - return alternatives[n], trace_alternatives and formatters["value %a, taking %a"](value,n) - else - value = tonumber(value) - if type(value) ~= "number" then - return alternatives[1], trace_alternatives and formatters["invalid value %s, taking %a"](value,1) - elseif value > n then - local defaultalt = otf.defaultnodealternate - if defaultalt == "first" then - return alternatives[n], trace_alternatives and formatters["invalid value %s, taking %a"](value,1) - elseif defaultalt == "last" then - return alternatives[1], trace_alternatives and formatters["invalid value %s, taking %a"](value,n) - else - return false, trace_alternatives and formatters["invalid value %a, %s"](value,"out of range") - end - elseif value == 0 then - return start.char, trace_alternatives and formatters["invalid value %a, %s"](value,"no change") - elseif value < 1 then - return alternatives[1], trace_alternatives and formatters["invalid value %a, taking %a"](value,1) - else - return alternatives[value], trace_alternatives and formatters["value %a, taking %a"](value,value) - end - end -end - -local function multiple_glyphs(head,start,multiple,ignoremarks) - local nofmultiples = #multiple - if nofmultiples > 0 then - start.char = multiple[1] - if nofmultiples > 1 then - local sn = start.next - for k=2,nofmultiples do -- todo: use insert_node --- untested: --- --- while ignoremarks and marks[sn.char] then --- local sn = sn.next --- end - local n = copy_node(start) -- ignore components - n.char = multiple[k] - n.next = sn - n.prev = start - if sn then - sn.prev = n - end - start.next = n - start = n - end - end - return head, start, true - else - if trace_multiples then - logprocess("no multiple for %s",gref(start.char)) - end - return head, start, false - end -end - -function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence) - local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue - local choice, comment = get_alternative_glyph(start,alternative,value,trace_alternatives) - if choice then - if trace_alternatives then - logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(start.char),choice,gref(choice),comment) - end - start.char = choice - else - if trace_alternatives then - logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(start.char),comment) - end - end - return head, start, true -end - -function handlers.gsub_multiple(head,start,kind,lookupname,multiple,sequence) - if trace_multiples then - logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(start.char),gref(multiple)) - end - return multiple_glyphs(head,start,multiple,sequence.flags[1]) -end - -function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence) - local s, stop, discfound = start.next, nil, false - local startchar = start.char - if marks[startchar] then - while s do - local id = s.id - if id == glyph_code and s.font == currentfont and s.subtype<256 then - local lg = ligature[s.char] - if lg then - stop = s - ligature = lg - s = s.next - else - break - end - else - break - end - end - if stop then - local lig = ligature.ligature - if lig then - if trace_ligatures then - local stopchar = stop.char - head, start = markstoligature(kind,lookupname,head,start,stop,lig) - logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char)) - else - head, start = markstoligature(kind,lookupname,head,start,stop,lig) - end - return head, start, true - else - -- ok, goto next lookup - end - end - else - local skipmark = sequence.flags[1] - while s do - local id = s.id - if id == glyph_code and s.subtype<256 then - if s.font == currentfont then - local char = s.char - if skipmark and marks[char] then - s = s.next - else - local lg = ligature[char] - if lg then - stop = s - ligature = lg - s = s.next - else - break - end - end - else - break - end - elseif id == disc_code then - discfound = true - s = s.next - else - break - end - end - local lig = ligature.ligature - if lig then - if stop then - if trace_ligatures then - local stopchar = stop.char - head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound) - logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char)) - else - head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound) - end - return head, start, true - else - -- weird but happens (in some arabic font) - start.char = lig - if trace_ligatures then - logprocess("%s: replacing %s by (no real) ligature %s case 3",pref(kind,lookupname),gref(startchar),gref(lig)) - end - return head, start, true - end - else - -- weird but happens - end - end - return head, start, false -end - ---[[ldx-- -

We get hits on a mark, but we're not sure if the it has to be applied so -we need to explicitly test for basechar, baselig and basemark entries.

---ldx]]-- - -function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence) - local markchar = start.char - if marks[markchar] then - local base = start.prev -- [glyph] [start=mark] - if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then - local basechar = base.char - if marks[basechar] then - while true do - base = base.prev - if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then - basechar = base.char - if not marks[basechar] then - break - end - else - if trace_bugs then - logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar)) - end - return head, start, false - end - end - end - local baseanchors = descriptions[basechar] - if baseanchors then - baseanchors = baseanchors.anchors - end - if baseanchors then - local baseanchors = baseanchors['basechar'] - if baseanchors then - local al = anchorlookups[lookupname] - for anchor,ba in next, baseanchors do - if al[anchor] then - local ma = markanchors[anchor] - if ma then - local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) - if trace_marks then - logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)", - pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) - end - return head, start, true - end - end - end - if trace_bugs then - logwarning("%s, no matching anchors for mark %s and base %s",pref(kind,lookupname),gref(markchar),gref(basechar)) - end - end - elseif trace_bugs then - -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar)) - onetimemessage(currentfont,basechar,"no base anchors",report_fonts) - end - elseif trace_bugs then - logwarning("%s: prev node is no char",pref(kind,lookupname)) - end - elseif trace_bugs then - logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar)) - end - return head, start, false -end - -function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence) - -- check chainpos variant - local markchar = start.char - if marks[markchar] then - local base = start.prev -- [glyph] [optional marks] [start=mark] - if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then - local basechar = base.char - if marks[basechar] then - while true do - base = base.prev - if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then - basechar = base.char - if not marks[basechar] then - break - end - else - if trace_bugs then - logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar)) - end - return head, start, false - end - end - end - local index = start[a_ligacomp] - local baseanchors = descriptions[basechar] - if baseanchors then - baseanchors = baseanchors.anchors - if baseanchors then - local baseanchors = baseanchors['baselig'] - if baseanchors then - local al = anchorlookups[lookupname] - for anchor, ba in next, baseanchors do - if al[anchor] then - local ma = markanchors[anchor] - if ma then - ba = ba[index] - if ba then - local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) -- index - if trace_marks then - logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)", - pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy) - end - return head, start, true - else - if trace_bugs then - logwarning("%s: no matching anchors for mark %s and baselig %s with index %a",pref(kind,lookupname),gref(markchar),gref(basechar),index) - end - end - end - end - end - if trace_bugs then - logwarning("%s: no matching anchors for mark %s and baselig %s",pref(kind,lookupname),gref(markchar),gref(basechar)) - end - end - end - elseif trace_bugs then - -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar)) - onetimemessage(currentfont,basechar,"no base anchors",report_fonts) - end - elseif trace_bugs then - logwarning("%s: prev node is no char",pref(kind,lookupname)) - end - elseif trace_bugs then - logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar)) - end - return head, start, false -end - -function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence) - local markchar = start.char - if marks[markchar] then - local base = start.prev -- [glyph] [basemark] [start=mark] - local slc = start[a_ligacomp] - if slc then -- a rather messy loop ... needs checking with husayni - while base do - local blc = base[a_ligacomp] - if blc and blc ~= slc then - base = base.prev - else - break - end - end - end - if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then -- subtype test can go - local basechar = base.char - local baseanchors = descriptions[basechar] - if baseanchors then - baseanchors = baseanchors.anchors - if baseanchors then - baseanchors = baseanchors['basemark'] - if baseanchors then - local al = anchorlookups[lookupname] - for anchor,ba in next, baseanchors do - if al[anchor] then - local ma = markanchors[anchor] - if ma then - local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true) - if trace_marks then - logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)", - pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) - end - return head, start, true - end - end - end - if trace_bugs then - logwarning("%s: no matching anchors for mark %s and basemark %s",pref(kind,lookupname),gref(markchar),gref(basechar)) - end - end - end - elseif trace_bugs then - -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar)) - onetimemessage(currentfont,basechar,"no base anchors",report_fonts) - end - elseif trace_bugs then - logwarning("%s: prev node is no mark",pref(kind,lookupname)) - end - elseif trace_bugs then - logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar)) - end - return head, start, false -end - -function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence) -- to be checked - local alreadydone = cursonce and start[a_cursbase] - if not alreadydone then - local done = false - local startchar = start.char - if marks[startchar] then - if trace_cursive then - logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar)) - end - else - local nxt = start.next - while not done and nxt and nxt.id == glyph_code and nxt.font == currentfont and nxt.subtype<256 do - local nextchar = nxt.char - if marks[nextchar] then - -- should not happen (maybe warning) - nxt = nxt.next - else - local entryanchors = descriptions[nextchar] - if entryanchors then - entryanchors = entryanchors.anchors - if entryanchors then - entryanchors = entryanchors['centry'] - if entryanchors then - local al = anchorlookups[lookupname] - for anchor, entry in next, entryanchors do - if al[anchor] then - local exit = exitanchors[anchor] - if exit then - local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar]) - if trace_cursive then - logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode) - end - done = true - break - end - end - end - end - end - elseif trace_bugs then - -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar)) - onetimemessage(currentfont,startchar,"no entry anchors",report_fonts) - end - break - end - end - end - return head, start, done - else - if trace_cursive and trace_details then - logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone) - end - return head, start, false - end -end - -function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence) - local startchar = start.char - local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar]) - if trace_kerns then - logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h) - end - return head, start, false -end - -function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence) - -- todo: kerns in disc nodes: pre, post, replace -> loop over disc too - -- todo: kerns in components of ligatures - local snext = start.next - if not snext then - return head, start, false - else - local prev, done = start, false - local factor = tfmdata.parameters.factor - local lookuptype = lookuptypes[lookupname] - while snext and snext.id == glyph_code and snext.font == currentfont and snext.subtype<256 do - local nextchar = snext.char - local krn = kerns[nextchar] - if not krn and marks[nextchar] then - prev = snext - snext = snext.next - else - if not krn then - -- skip - elseif type(krn) == "table" then - if lookuptype == "pair" then -- probably not needed - local a, b = krn[2], krn[3] - if a and #a > 0 then - local startchar = start.char - local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar]) - if trace_kerns then - logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h) - end - end - if b and #b > 0 then - local startchar = start.char - local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar]) - if trace_kerns then - logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h) - end - end - else -- wrong ... position has different entries - report_process("%s: check this out (old kern stuff)",pref(kind,lookupname)) - -- local a, b = krn[2], krn[6] - -- if a and a ~= 0 then - -- local k = setkern(snext,factor,rlmode,a) - -- if trace_kerns then - -- logprocess("%s: inserting first kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar)) - -- end - -- end - -- if b and b ~= 0 then - -- logwarning("%s: ignoring second kern xoff %s",pref(kind,lookupname),b*factor) - -- end - end - done = true - elseif krn ~= 0 then - local k = setkern(snext,factor,rlmode,krn) - if trace_kerns then - logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar)) - end - done = true - end - break - end - end - return head, start, done - end -end - ---[[ldx-- -

I will implement multiple chain replacements once I run into a font that uses -it. It's not that complex to handle.

---ldx]]-- - -local chainmores = { } -local chainprocs = { } - -local function logprocess(...) - if trace_steps then - registermessage(...) - end - report_subchain(...) -end - -local logwarning = report_subchain - -local function logprocess(...) - if trace_steps then - registermessage(...) - end - report_chain(...) -end - -local logwarning = report_chain - --- We could share functions but that would lead to extra function calls with many --- arguments, redundant tests and confusing messages. - -function chainprocs.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname) - logwarning("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname)) - return head, start, false -end - -function chainmores.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname,n) - logprocess("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname)) - return head, start, false -end - --- The reversesub is a special case, which is why we need to store the replacements --- in a bit weird way. There is no lookup and the replacement comes from the lookup --- itself. It is meant mostly for dealing with Urdu. - -function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements) - local char = start.char - local replacement = replacements[char] - if replacement then - if trace_singles then - logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement)) - end - start.char = replacement - return head, start, true - else - return head, start, false - end -end - ---[[ldx-- -

This chain stuff is somewhat tricky since we can have a sequence of actions to be -applied: single, alternate, multiple or ligature where ligature can be an invalid -one in the sense that it will replace multiple by one but not neccessary one that -looks like the combination (i.e. it is the counterpart of multiple then). For -example, the following is valid:

- - -xxxabcdexxx [single a->A][multiple b->BCD][ligature cde->E] xxxABCDExxx - - -

Therefore we we don't really do the replacement here already unless we have the -single lookup case. The efficiency of the replacements can be improved by deleting -as less as needed but that would also make the code even more messy.

---ldx]]-- - --- local function delete_till_stop(head,start,stop,ignoremarks) -- keeps start --- local n = 1 --- if start == stop then --- -- done --- elseif ignoremarks then --- repeat -- start x x m x x stop => start m --- local next = start.next --- if not marks[next.char] then --- local components = next.components --- if components then -- probably not needed --- flush_node_list(components) --- end --- head = delete_node(head,next) --- end --- n = n + 1 --- until next == stop --- else -- start x x x stop => start --- repeat --- local next = start.next --- local components = next.components --- if components then -- probably not needed --- flush_node_list(components) --- end --- head = delete_node(head,next) --- n = n + 1 --- until next == stop --- end --- return head, n --- end - ---[[ldx-- -

Here we replace start by a single variant, First we delete the rest of the -match.

---ldx]]-- - -function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex) - -- todo: marks ? - local current = start - local subtables = currentlookup.subtables - if #subtables > 1 then - logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," ")) - end - while current do - if current.id == glyph_code then - local currentchar = current.char - local lookupname = subtables[1] -- only 1 - local replacement = lookuphash[lookupname] - if not replacement then - if trace_bugs then - logwarning("%s: no single hits",cref(kind,chainname,chainlookupname,lookupname,chainindex)) - end - else - replacement = replacement[currentchar] - if not replacement or replacement == "" then - if trace_bugs then - logwarning("%s: no single for %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar)) - end - else - if trace_singles then - logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement)) - end - current.char = replacement - end - end - return head, start, true - elseif current == stop then - break - else - current = current.next - end - end - return head, start, false -end - -chainmores.gsub_single = chainprocs.gsub_single - ---[[ldx-- -

Here we replace start by a sequence of new glyphs. First we delete the rest of -the match.

---ldx]]-- - -function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - -- local head, n = delete_till_stop(head,start,stop) - local startchar = start.char - local subtables = currentlookup.subtables - local lookupname = subtables[1] - local replacements = lookuphash[lookupname] - if not replacements then - if trace_bugs then - logwarning("%s: no multiple hits",cref(kind,chainname,chainlookupname,lookupname)) - end - else - replacements = replacements[startchar] - if not replacements or replacement == "" then - if trace_bugs then - logwarning("%s: no multiple for %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar)) - end - else - if trace_multiples then - logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements)) - end - return multiple_glyphs(head,start,replacements,currentlookup.flags[1]) - end - end - return head, start, false -end - -chainmores.gsub_multiple = chainprocs.gsub_multiple - ---[[ldx-- -

Here we replace start by new glyph. First we delete the rest of the match.

---ldx]]-- - --- char_1 mark_1 -> char_x mark_1 (ignore marks) --- char_1 mark_1 -> char_x - --- to be checked: do we always have just one glyph? --- we can also have alternates for marks --- marks come last anyway --- are there cases where we need to delete the mark - -function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - local current = start - local subtables = currentlookup.subtables - local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue - while current do - if current.id == glyph_code then -- is this check needed? - local currentchar = current.char - local lookupname = subtables[1] - local alternatives = lookuphash[lookupname] - if not alternatives then - if trace_bugs then - logwarning("%s: no alternative hit",cref(kind,chainname,chainlookupname,lookupname)) - end - else - alternatives = alternatives[currentchar] - if alternatives then - local choice, comment = get_alternative_glyph(current,alternatives,value,trace_alternatives) - if choice then - if trace_alternatives then - logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment) - end - start.char = choice - else - if trace_alternatives then - logwarning("%s: no variant %a for %s, %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(char),comment) - end - end - elseif trace_bugs then - logwarning("%s: no alternative for %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(currentchar),comment) - end - end - return head, start, true - elseif current == stop then - break - else - current = current.next - end - end - return head, start, false -end - -chainmores.gsub_alternate = chainprocs.gsub_alternate - ---[[ldx-- -

When we replace ligatures we use a helper that handles the marks. I might change -this function (move code inline and handle the marks by a separate function). We -assume rather stupid ligatures (no complex disc nodes).

---ldx]]-- - -function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex) - local startchar = start.char - local subtables = currentlookup.subtables - local lookupname = subtables[1] - local ligatures = lookuphash[lookupname] - if not ligatures then - if trace_bugs then - logwarning("%s: no ligature hits",cref(kind,chainname,chainlookupname,lookupname,chainindex)) - end - else - ligatures = ligatures[startchar] - if not ligatures then - if trace_bugs then - logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar)) - end - else - local s = start.next - local discfound = false - local last = stop - local nofreplacements = 0 - local skipmark = currentlookup.flags[1] - while s do - local id = s.id - if id == disc_code then - s = s.next - discfound = true - else - local schar = s.char - if skipmark and marks[schar] then -- marks - s = s.next - else - local lg = ligatures[schar] - if lg then - ligatures, last, nofreplacements = lg, s, nofreplacements + 1 - if s == stop then - break - else - s = s.next - end - else - break - end - end - end - end - local l2 = ligatures.ligature - if l2 then - if chainindex then - stop = last - end - if trace_ligatures then - if start == stop then - logprocess("%s: replacing character %s by ligature %s case 3",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2)) - else - logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char),gref(l2)) - end - end - head, start = toligature(kind,lookupname,head,start,stop,l2,currentlookup.flags[1],discfound) - return head, start, true, nofreplacements - elseif trace_bugs then - if start == stop then - logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar)) - else - logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char)) - end - end - end - end - return head, start, false, 0 -end - -chainmores.gsub_ligature = chainprocs.gsub_ligature - -function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - local markchar = start.char - if marks[markchar] then - local subtables = currentlookup.subtables - local lookupname = subtables[1] - local markanchors = lookuphash[lookupname] - if markanchors then - markanchors = markanchors[markchar] - end - if markanchors then - local base = start.prev -- [glyph] [start=mark] - if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then - local basechar = base.char - if marks[basechar] then - while true do - base = base.prev - if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then - basechar = base.char - if not marks[basechar] then - break - end - else - if trace_bugs then - logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar)) - end - return head, start, false - end - end - end - local baseanchors = descriptions[basechar].anchors - if baseanchors then - local baseanchors = baseanchors['basechar'] - if baseanchors then - local al = anchorlookups[lookupname] - for anchor,ba in next, baseanchors do - if al[anchor] then - local ma = markanchors[anchor] - if ma then - local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) - if trace_marks then - logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)", - cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) - end - return head, start, true - end - end - end - if trace_bugs then - logwarning("%s, no matching anchors for mark %s and base %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar)) - end - end - end - elseif trace_bugs then - logwarning("%s: prev node is no char",cref(kind,chainname,chainlookupname,lookupname)) - end - elseif trace_bugs then - logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar)) - end - elseif trace_bugs then - logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar)) - end - return head, start, false -end - -function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - local markchar = start.char - if marks[markchar] then - local subtables = currentlookup.subtables - local lookupname = subtables[1] - local markanchors = lookuphash[lookupname] - if markanchors then - markanchors = markanchors[markchar] - end - if markanchors then - local base = start.prev -- [glyph] [optional marks] [start=mark] - if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then - local basechar = base.char - if marks[basechar] then - while true do - base = base.prev - if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then - basechar = base.char - if not marks[basechar] then - break - end - else - if trace_bugs then - logwarning("%s: no base for mark %s",cref(kind,chainname,chainlookupname,lookupname),markchar) - end - return head, start, false - end - end - end - -- todo: like marks a ligatures hash - local index = start[a_ligacomp] - local baseanchors = descriptions[basechar].anchors - if baseanchors then - local baseanchors = baseanchors['baselig'] - if baseanchors then - local al = anchorlookups[lookupname] - for anchor,ba in next, baseanchors do - if al[anchor] then - local ma = markanchors[anchor] - if ma then - ba = ba[index] - if ba then - local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) -- index - if trace_marks then - logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)", - cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy) - end - return head, start, true - end - end - end - end - if trace_bugs then - logwarning("%s: no matching anchors for mark %s and baselig %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar)) - end - end - end - elseif trace_bugs then - logwarning("feature %s, lookup %s: prev node is no char",kind,lookupname) - end - elseif trace_bugs then - logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar)) - end - elseif trace_bugs then - logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar)) - end - return head, start, false -end - -function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - local markchar = start.char - if marks[markchar] then - -- local alreadydone = markonce and start[a_markmark] - -- if not alreadydone then - -- local markanchors = descriptions[markchar].anchors markanchors = markanchors and markanchors.mark - local subtables = currentlookup.subtables - local lookupname = subtables[1] - local markanchors = lookuphash[lookupname] - if markanchors then - markanchors = markanchors[markchar] - end - if markanchors then - local base = start.prev -- [glyph] [basemark] [start=mark] - local slc = start[a_ligacomp] - if slc then -- a rather messy loop ... needs checking with husayni - while base do - local blc = base[a_ligacomp] - if blc and blc ~= slc then - base = base.prev - else - break - end - end - end - if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then -- subtype test can go - local basechar = base.char - local baseanchors = descriptions[basechar].anchors - if baseanchors then - baseanchors = baseanchors['basemark'] - if baseanchors then - local al = anchorlookups[lookupname] - for anchor,ba in next, baseanchors do - if al[anchor] then - local ma = markanchors[anchor] - if ma then - local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true) - if trace_marks then - logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)", - cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) - end - return head, start, true - end - end - end - if trace_bugs then - logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar)) - end - end - end - elseif trace_bugs then - logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname)) - end - elseif trace_bugs then - logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar)) - end - -- elseif trace_marks and trace_details then - -- logprocess("%s, mark %s is already bound (n=%s), ignoring mark2mark",pref(kind,lookupname),gref(markchar),alreadydone) - -- end - elseif trace_bugs then - logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar)) - end - return head, start, false -end - -function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - local alreadydone = cursonce and start[a_cursbase] - if not alreadydone then - local startchar = start.char - local subtables = currentlookup.subtables - local lookupname = subtables[1] - local exitanchors = lookuphash[lookupname] - if exitanchors then - exitanchors = exitanchors[startchar] - end - if exitanchors then - local done = false - if marks[startchar] then - if trace_cursive then - logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar)) - end - else - local nxt = start.next - while not done and nxt and nxt.id == glyph_code and nxt.font == currentfont and nxt.subtype<256 do - local nextchar = nxt.char - if marks[nextchar] then - -- should not happen (maybe warning) - nxt = nxt.next - else - local entryanchors = descriptions[nextchar] - if entryanchors then - entryanchors = entryanchors.anchors - if entryanchors then - entryanchors = entryanchors['centry'] - if entryanchors then - local al = anchorlookups[lookupname] - for anchor, entry in next, entryanchors do - if al[anchor] then - local exit = exitanchors[anchor] - if exit then - local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar]) - if trace_cursive then - logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode) - end - done = true - break - end - end - end - end - end - elseif trace_bugs then - -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar)) - onetimemessage(currentfont,startchar,"no entry anchors",report_fonts) - end - break - end - end - end - return head, start, done - else - if trace_cursive and trace_details then - logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone) - end - return head, start, false - end - end - return head, start, false -end - -function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence) - -- untested .. needs checking for the new model - local startchar = start.char - local subtables = currentlookup.subtables - local lookupname = subtables[1] - local kerns = lookuphash[lookupname] - if kerns then - kerns = kerns[startchar] -- needed ? - if kerns then - local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar]) - if trace_kerns then - logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h) - end - end - end - return head, start, false -end - -chainmores.gpos_single = chainprocs.gpos_single -- okay? - --- when machines become faster i will make a shared function - -function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence) - local snext = start.next - if snext then - local startchar = start.char - local subtables = currentlookup.subtables - local lookupname = subtables[1] - local kerns = lookuphash[lookupname] - if kerns then - kerns = kerns[startchar] - if kerns then - local lookuptype = lookuptypes[lookupname] - local prev, done = start, false - local factor = tfmdata.parameters.factor - while snext and snext.id == glyph_code and snext.font == currentfont and snext.subtype<256 do - local nextchar = snext.char - local krn = kerns[nextchar] - if not krn and marks[nextchar] then - prev = snext - snext = snext.next - else - if not krn then - -- skip - elseif type(krn) == "table" then - if lookuptype == "pair" then - local a, b = krn[2], krn[3] - if a and #a > 0 then - local startchar = start.char - local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar]) - if trace_kerns then - logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h) - end - end - if b and #b > 0 then - local startchar = start.char - local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar]) - if trace_kerns then - logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h) - end - end - else - report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname)) - local a, b = krn[2], krn[6] - if a and a ~= 0 then - local k = setkern(snext,factor,rlmode,a) - if trace_kerns then - logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar)) - end - end - if b and b ~= 0 then - logwarning("%s: ignoring second kern xoff %s",cref(kind,chainname,chainlookupname),b*factor) - end - end - done = true - elseif krn ~= 0 then - local k = setkern(snext,factor,rlmode,krn) - if trace_kerns then - logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar)) - end - done = true - end - break - end - end - return head, start, done - end - end - end - return head, start, false -end - -chainmores.gpos_pair = chainprocs.gpos_pair -- okay? - --- what pointer to return, spec says stop --- to be discussed ... is bidi changer a space? --- elseif char == zwnj and sequence[n][32] then -- brrr - --- somehow l or f is global --- we don't need to pass the currentcontext, saves a bit --- make a slow variant then can be activated but with more tracing - -local function show_skip(kind,chainname,char,ck,class) - if ck[9] then - logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a, %a => %a",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10]) - else - logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a",cref(kind,chainname),gref(char),class,ck[1],ck[2]) - end -end - -local function normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash) - -- local rule, lookuptype, sequence, f, l, lookups = ck[1], ck[2] ,ck[3], ck[4], ck[5], ck[6] - local flags = sequence.flags - local done = false - local skipmark = flags[1] - local skipligature = flags[2] - local skipbase = flags[3] - local someskip = skipmark or skipligature or skipbase -- could be stored in flags for a fast test (hm, flags could be false !) - local markclass = sequence.markclass -- todo, first we need a proper test - local skipped = false - for k=1,#contexts do - local match = true - local current = start - local last = start - local ck = contexts[k] - local seq = ck[3] - local s = #seq - -- f..l = mid string - if s == 1 then - -- never happens - match = current.id == glyph_code and current.font == currentfont and current.subtype<256 and seq[1][current.char] - else - -- maybe we need a better space check (maybe check for glue or category or combination) - -- we cannot optimize for n=2 because there can be disc nodes - local f, l = ck[4], ck[5] - -- current match - if f == 1 and f == l then -- current only - -- already a hit - -- match = true - else -- before/current/after | before/current | current/after - -- no need to test first hit (to be optimized) - if f == l then -- new, else last out of sync (f is > 1) - -- match = true - else - local n = f + 1 - last = last.next - while n <= l do - if last then - local id = last.id - if id == glyph_code then - if last.font == currentfont and last.subtype<256 then - local char = last.char - local ccd = descriptions[char] - if ccd then - local class = ccd.class - if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then - skipped = true - if trace_skips then - show_skip(kind,chainname,char,ck,class) - end - last = last.next - elseif seq[n][char] then - if n < l then - last = last.next - end - n = n + 1 - else - match = false - break - end - else - match = false - break - end - else - match = false - break - end - elseif id == disc_code then - last = last.next - else - match = false - break - end - else - match = false - break - end - end - end - end - -- before - if match and f > 1 then - local prev = start.prev - if prev then - local n = f-1 - while n >= 1 do - if prev then - local id = prev.id - if id == glyph_code then - if prev.font == currentfont and prev.subtype<256 then -- normal char - local char = prev.char - local ccd = descriptions[char] - if ccd then - local class = ccd.class - if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then - skipped = true - if trace_skips then - show_skip(kind,chainname,char,ck,class) - end - elseif seq[n][char] then - n = n -1 - else - match = false - break - end - else - match = false - break - end - else - match = false - break - end - elseif id == disc_code then - -- skip 'm - elseif seq[n][32] then - n = n -1 - else - match = false - break - end - prev = prev.prev - elseif seq[n][32] then -- somewhat special, as zapfino can have many preceding spaces - n = n -1 - else - match = false - break - end - end - elseif f == 2 then - match = seq[1][32] - else - for n=f-1,1 do - if not seq[n][32] then - match = false - break - end - end - end - end - -- after - if match and s > l then - local current = last and last.next - if current then - -- removed optimization for s-l == 1, we have to deal with marks anyway - local n = l + 1 - while n <= s do - if current then - local id = current.id - if id == glyph_code then - if current.font == currentfont and current.subtype<256 then -- normal char - local char = current.char - local ccd = descriptions[char] - if ccd then - local class = ccd.class - if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then - skipped = true - if trace_skips then - show_skip(kind,chainname,char,ck,class) - end - elseif seq[n][char] then - n = n + 1 - else - match = false - break - end - else - match = false - break - end - else - match = false - break - end - elseif id == disc_code then - -- skip 'm - elseif seq[n][32] then -- brrr - n = n + 1 - else - match = false - break - end - current = current.next - elseif seq[n][32] then - n = n + 1 - else - match = false - break - end - end - elseif s-l == 1 then - match = seq[s][32] - else - for n=l+1,s do - if not seq[n][32] then - match = false - break - end - end - end - end - end - if match then - -- ck == currentcontext - if trace_contexts then - local rule, lookuptype, f, l = ck[1], ck[2], ck[4], ck[5] - local char = start.char - if ck[9] then - logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a, %a => %a", - cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10]) - else - logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a", - cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype) - end - end - local chainlookups = ck[6] - if chainlookups then - local nofchainlookups = #chainlookups - -- we can speed this up if needed - if nofchainlookups == 1 then - local chainlookupname = chainlookups[1] - local chainlookup = lookuptable[chainlookupname] - if chainlookup then - local cp = chainprocs[chainlookup.type] - if cp then - local ok - head, start, ok = cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence) - if ok then - done = true - end - else - logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type) - end - else -- shouldn't happen - logprocess("%s is not yet supported",cref(kind,chainname,chainlookupname)) - end - else - local i = 1 - repeat - if skipped then - while true do - local char = start.char - local ccd = descriptions[char] - if ccd then - local class = ccd.class - if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then - start = start.next - else - break - end - else - break - end - end - end - local chainlookupname = chainlookups[i] - local chainlookup = lookuptable[chainlookupname] - if not chainlookup then - -- okay, n matches, < n replacements - i = i + 1 - else - local cp = chainmores[chainlookup.type] - if not cp then - -- actually an error - logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type) - i = i + 1 - else - local ok, n - head, start, ok, n = cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence) - -- messy since last can be changed ! - if ok then - done = true - -- skip next one(s) if ligature - i = i + (n or 1) - else - i = i + 1 - end - end - end - if start then - start = start.next - else - -- weird - end - until i > nofchainlookups - end - else - local replacements = ck[7] - if replacements then - head, start, done = chainprocs.reversesub(head,start,last,kind,chainname,ck,lookuphash,replacements) -- sequence - else - done = true -- can be meant to be skipped - if trace_contexts then - logprocess("%s: skipping match",cref(kind,chainname)) - end - end - end - end - end - return head, start, done -end - --- Because we want to keep this elsewhere (an because speed is less an issue) we --- pass the font id so that the verbose variant can access the relevant helper tables. - -local verbose_handle_contextchain = function(font,...) - logwarning("no verbose handler installed, reverting to 'normal'") - otf.setcontextchain() - return normal_handle_contextchain(...) -end - -otf.chainhandlers = { - normal = normal_handle_contextchain, - verbose = verbose_handle_contextchain, -} - -function otf.setcontextchain(method) - if not method or method == "normal" or not otf.chainhandlers[method] then - if handlers.contextchain then -- no need for a message while making the format - logwarning("installing normal contextchain handler") - end - handlers.contextchain = normal_handle_contextchain - else - logwarning("installing contextchain handler %a",method) - local handler = otf.chainhandlers[method] - handlers.contextchain = function(...) - return handler(currentfont,...) -- hm, get rid of ... - end - end - handlers.gsub_context = handlers.contextchain - handlers.gsub_contextchain = handlers.contextchain - handlers.gsub_reversecontextchain = handlers.contextchain - handlers.gpos_contextchain = handlers.contextchain - handlers.gpos_context = handlers.contextchain -end - -otf.setcontextchain() - -local missing = { } -- we only report once - -local function logprocess(...) - if trace_steps then - registermessage(...) - end - report_process(...) -end - -local logwarning = report_process - -local function report_missing_cache(typ,lookup) - local f = missing[currentfont] if not f then f = { } missing[currentfont] = f end - local t = f[typ] if not t then t = { } f[typ] = t end - if not t[lookup] then - t[lookup] = true - logwarning("missing cache for lookup %a, type %a, font %a, name %a",lookup,typ,currentfont,tfmdata.properties.fullname) - end -end - -local resolved = { } -- we only resolve a font,script,language pair once - --- todo: pass all these 'locals' in a table - -local lookuphashes = { } - -setmetatableindex(lookuphashes, function(t,font) - local lookuphash = fontdata[font].resources.lookuphash - if not lookuphash or not next(lookuphash) then - lookuphash = false - end - t[font] = lookuphash - return lookuphash -end) - --- fonts.hashes.lookups = lookuphashes - -local autofeatures = fonts.analyzers.features -- was: constants - -local function initialize(sequence,script,language,enabled) - local features = sequence.features - if features then - for kind, scripts in next, features do - local valid = enabled[kind] - if valid then - local languages = scripts[script] or scripts[wildcard] - if languages and (languages[language] or languages[wildcard]) then - return { valid, autofeatures[kind] or false, sequence.chain or 0, kind, sequence } - end - end - end - end - return false -end - -function otf.dataset(tfmdata,font) -- generic variant, overloaded in context - local shared = tfmdata.shared - local properties = tfmdata.properties - local language = properties.language or "dflt" - local script = properties.script or "dflt" - local enabled = shared.features - local res = resolved[font] - if not res then - res = { } - resolved[font] = res - end - local rs = res[script] - if not rs then - rs = { } - res[script] = rs - end - local rl = rs[language] - if not rl then - rl = { - -- indexed but we can also add specific data by key - } - rs[language] = rl - local sequences = tfmdata.resources.sequences --- setmetatableindex(rl, function(t,k) --- if type(k) == "number" then --- local v = enabled and initialize(sequences[k],script,language,enabled) --- t[k] = v --- return v --- end --- end) -for s=1,#sequences do - local v = enabled and initialize(sequences[s],script,language,enabled) - if v then - rl[#rl+1] = v - end -end - end - return rl -end - --- elseif id == glue_code then --- if p[5] then -- chain --- local pc = pp[32] --- if pc then --- start, ok = start, false -- p[1](start,kind,p[2],pc,p[3],p[4]) --- if ok then --- done = true --- end --- if start then start = start.next end --- else --- start = start.next --- end --- else --- start = start.next --- end - --- there will be a new direction parser (pre-parsed etc) - --- less bytecode: 290 -> 254 --- --- attr = attr or false --- --- local a = getattr(start,0) --- if (a == attr and (not attribute or getattr(start,a_state) == attribute)) or (not attribute or getattr(start,a_state) == attribute) then --- -- the action --- end - -local function featuresprocessor(head,font,attr) - - local lookuphash = lookuphashes[font] -- we can also check sequences here - - if not lookuphash then - return head, false - end - - if trace_steps then - checkstep(head) - end - - tfmdata = fontdata[font] - descriptions = tfmdata.descriptions - characters = tfmdata.characters - resources = tfmdata.resources - - marks = resources.marks - anchorlookups = resources.lookup_to_anchor - lookuptable = resources.lookups - lookuptypes = resources.lookuptypes - - currentfont = font - rlmode = 0 - - local sequences = resources.sequences - local done = false - local datasets = otf.dataset(tfmdata,font,attr) - - local dirstack = { } -- could move outside function - - -- We could work on sub start-stop ranges instead but I wonder if there is that - -- much speed gain (experiments showed that it made not much sense) and we need - -- to keep track of directions anyway. Also at some point I want to play with - -- font interactions and then we do need the full sweeps. - - -- Keeping track of the headnode is needed for devanagari (I generalized it a bit - -- so that multiple cases are also covered.) - - for s=1,#datasets do - local dataset = datasets[s] - featurevalue = dataset[1] -- todo: pass to function instead of using a global - - local sequence = dataset[5] -- sequences[s] -- also dataset[5] - local rlparmode = 0 - local topstack = 0 - local success = false - local attribute = dataset[2] - local chain = dataset[3] -- sequence.chain or 0 - local typ = sequence.type - local subtables = sequence.subtables - if chain < 0 then - -- this is a limited case, no special treatments like 'init' etc - local handler = handlers[typ] - -- we need to get rid of this slide! probably no longer needed in latest luatex - local start = find_node_tail(head) -- slow (we can store tail because there's always a skip at the end): todo - while start do - local id = start.id - if id == glyph_code then - if start.font == font and start.subtype<256 then - local a = start[0] - if a then - a = a == attr - else - a = true - end - if a then - for i=1,#subtables do - local lookupname = subtables[i] - local lookupcache = lookuphash[lookupname] - if lookupcache then - local lookupmatch = lookupcache[start.char] - if lookupmatch then - head, start, success = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) - if success then - break - end - end - else - report_missing_cache(typ,lookupname) - end - end - if start then start = start.prev end - else - start = start.prev - end - else - start = start.prev - end - else - start = start.prev - end - end - else - local handler = handlers[typ] - local ns = #subtables - local start = head -- local ? - rlmode = 0 -- to be checked ? - if ns == 1 then -- happens often - local lookupname = subtables[1] - local lookupcache = lookuphash[lookupname] - if not lookupcache then -- also check for empty cache - report_missing_cache(typ,lookupname) - else - - local function subrun(start) - -- mostly for gsub, gpos would demand a more clever approach - local head = start - local done = false - while start do - local id = start.id - if id == glyph_code and start.font == font and start.subtype <256 then - local a = start[0] - if a then - a = (a == attr) and (not attribute or start[a_state] == attribute) - else - a = not attribute or start[a_state] == attribute - end - if a then - local lookupmatch = lookupcache[start.char] - if lookupmatch then - -- sequence kan weg - local ok - head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) - if ok then - done = true - end - end - if start then start = start.next end - else - start = start.next - end - else - start = start.next - end - end - if done then - success = true - return head - end - end - - local function kerndisc(disc) -- we can assume that prev and next are glyphs - local prev = disc.prev - local next = disc.next - if prev and next then - prev.next = next - -- next.prev = prev - local a = prev[0] - if a then - a = (a == attr) and (not attribute or prev[a_state] == attribute) - else - a = not attribute or prev[a_state] == attribute - end - if a then - local lookupmatch = lookupcache[prev.char] - if lookupmatch then - -- sequence kan weg - local h, d, ok = handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) - if ok then - done = true - success = true - end - end - end - prev.next = disc - -- next.prev = disc - end - return next - end - - while start do - local id = start.id - if id == glyph_code then - if start.font == font and start.subtype<256 then - local a = start[0] - if a then - a = (a == attr) and (not attribute or start[a_state] == attribute) - else - a = not attribute or start[a_state] == attribute - end - if a then - local lookupmatch = lookupcache[start.char] - if lookupmatch then - -- sequence kan weg - local ok - head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) - if ok then - success = true - end - end - if start then start = start.next end - else - start = start.next - end - else - start = start.next - end - elseif id == disc_code then - -- mostly for gsub - if start.subtype == discretionary_code then - local pre = start.pre - if pre then - local new = subrun(pre) - if new then start.pre = new end - end - local post = start.post - if post then - local new = subrun(post) - if new then start.post = new end - end - local replace = start.replace - if replace then - local new = subrun(replace) - if new then start.replace = new end - end -elseif typ == "gpos_single" or typ == "gpos_pair" then - kerndisc(start) - end - start = start.next - elseif id == whatsit_code then -- will be function - local subtype = start.subtype - if subtype == dir_code then - local dir = start.dir - if dir == "+TRT" or dir == "+TLT" then - topstack = topstack + 1 - dirstack[topstack] = dir - elseif dir == "-TRT" or dir == "-TLT" then - topstack = topstack - 1 - end - local newdir = dirstack[topstack] - if newdir == "+TRT" then - rlmode = -1 - elseif newdir == "+TLT" then - rlmode = 1 - else - rlmode = rlparmode - end - if trace_directions then - report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir) - end - elseif subtype == localpar_code then - local dir = start.dir - if dir == "TRT" then - rlparmode = -1 - elseif dir == "TLT" then - rlparmode = 1 - else - rlparmode = 0 - end - -- one might wonder if the par dir should be looked at, so we might as well drop the next line - rlmode = rlparmode - if trace_directions then - report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode) - end - end - start = start.next - elseif id == math_code then - start = end_of_math(start).next - else - start = start.next - end - end - end - else - - local function subrun(start) - -- mostly for gsub, gpos would demand a more clever approach - local head = start - local done = false - while start do - local id = start.id - if id == glyph_code and start.id == font and start.subtype <256 then - local a = start[0] - if a then - a = (a == attr) and (not attribute or start[a_state] == attribute) - else - a = not attribute or start[a_state] == attribute - end - if a then - for i=1,ns do - local lookupname = subtables[i] - local lookupcache = lookuphash[lookupname] - if lookupcache then - local lookupmatch = lookupcache[start.char] - if lookupmatch then - -- we could move all code inline but that makes things even more unreadable - local ok - head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) - if ok then - done = true - break - elseif not start then - -- don't ask why ... shouldn't happen - break - end - end - else - report_missing_cache(typ,lookupname) - end - end - if start then start = start.next end - else - start = start.next - end - else - start = start.next - end - end - if done then - success = true - return head - end - end - - local function kerndisc(disc) -- we can assume that prev and next are glyphs - local prev = disc.prev - local next = disc.next - if prev and next then - prev.next = next - -- next.prev = prev - local a = prev[0] - if a then - a = (a == attr) and (not attribute or prev[a_state] == attribute) - else - a = not attribute or prev[a_state] == attribute - end - if a then - for i=1,ns do - local lookupname = subtables[i] - local lookupcache = lookuphash[lookupname] - if lookupcache then - local lookupmatch = lookupcache[prev.char] - if lookupmatch then - -- we could move all code inline but that makes things even more unreadable - local h, d, ok = handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) - if ok then - done = true - break - end - end - else - report_missing_cache(typ,lookupname) - end - end - end - prev.next = disc - -- next.prev = disc - end - return next - end - - while start do - local id = start.id - if id == glyph_code then - if start.font == font and start.subtype<256 then - local a = start[0] - if a then - a = (a == attr) and (not attribute or start[a_state] == attribute) - else - a = not attribute or start[a_state] == attribute - end - if a then - for i=1,ns do - local lookupname = subtables[i] - local lookupcache = lookuphash[lookupname] - if lookupcache then - local lookupmatch = lookupcache[start.char] - if lookupmatch then - -- we could move all code inline but that makes things even more unreadable - local ok - head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) - if ok then - success = true - break - elseif not start then - -- don't ask why ... shouldn't happen - break - end - end - else - report_missing_cache(typ,lookupname) - end - end - if start then start = start.next end - else - start = start.next - end - else - start = start.next - end - elseif id == disc_code then - -- mostly for gsub - if start.subtype == discretionary_code then - local pre = start.pre - if pre then - local new = subrun(pre) - if new then start.pre = new end - end - local post = start.post - if post then - local new = subrun(post) - if new then start.post = new end - end - local replace = start.replace - if replace then - local new = subrun(replace) - if new then start.replace = new end - end -elseif typ == "gpos_single" or typ == "gpos_pair" then - kerndisc(start) - end - start = start.next - elseif id == whatsit_code then - local subtype = start.subtype - if subtype == dir_code then - local dir = start.dir - if dir == "+TRT" or dir == "+TLT" then - topstack = topstack + 1 - dirstack[topstack] = dir - elseif dir == "-TRT" or dir == "-TLT" then - topstack = topstack - 1 - end - local newdir = dirstack[topstack] - if newdir == "+TRT" then - rlmode = -1 - elseif newdir == "+TLT" then - rlmode = 1 - else - rlmode = rlparmode - end - if trace_directions then - report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir) - end - elseif subtype == localpar_code then - local dir = start.dir - if dir == "TRT" then - rlparmode = -1 - elseif dir == "TLT" then - rlparmode = 1 - else - rlparmode = 0 - end - rlmode = rlparmode - if trace_directions then - report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode) - end - end - start = start.next - elseif id == math_code then - start = end_of_math(start).next - else - start = start.next - end - end - end - end - if success then - done = true - end - if trace_steps then -- ? - registerstep(head) - end - end - return head, done -end - -local function generic(lookupdata,lookupname,unicode,lookuphash) - local target = lookuphash[lookupname] - if target then - target[unicode] = lookupdata - else - lookuphash[lookupname] = { [unicode] = lookupdata } - end -end - -local action = { - - substitution = generic, - multiple = generic, - alternate = generic, - position = generic, - - ligature = function(lookupdata,lookupname,unicode,lookuphash) - local target = lookuphash[lookupname] - if not target then - target = { } - lookuphash[lookupname] = target - end - for i=1,#lookupdata do - local li = lookupdata[i] - local tu = target[li] - if not tu then - tu = { } - target[li] = tu - end - target = tu - end - target.ligature = unicode - end, - - pair = function(lookupdata,lookupname,unicode,lookuphash) - local target = lookuphash[lookupname] - if not target then - target = { } - lookuphash[lookupname] = target - end - local others = target[unicode] - local paired = lookupdata[1] - if others then - others[paired] = lookupdata - else - others = { [paired] = lookupdata } - target[unicode] = others - end - end, - -} - -local function prepare_lookups(tfmdata) - - local rawdata = tfmdata.shared.rawdata - local resources = rawdata.resources - local lookuphash = resources.lookuphash - local anchor_to_lookup = resources.anchor_to_lookup - local lookup_to_anchor = resources.lookup_to_anchor - local lookuptypes = resources.lookuptypes - local characters = tfmdata.characters - local descriptions = tfmdata.descriptions - - -- we cannot free the entries in the descriptions as sometimes we access - -- then directly (for instance anchors) ... selectively freeing does save - -- much memory as it's only a reference to a table and the slot in the - -- description hash is not freed anyway - - for unicode, character in next, characters do -- we cannot loop over descriptions ! - - local description = descriptions[unicode] - - if description then - - local lookups = description.slookups - if lookups then - for lookupname, lookupdata in next, lookups do - action[lookuptypes[lookupname]](lookupdata,lookupname,unicode,lookuphash) - end - end - - local lookups = description.mlookups - if lookups then - for lookupname, lookuplist in next, lookups do - local lookuptype = lookuptypes[lookupname] - for l=1,#lookuplist do - local lookupdata = lookuplist[l] - action[lookuptype](lookupdata,lookupname,unicode,lookuphash) - end - end - end - - local list = description.kerns - if list then - for lookup, krn in next, list do -- ref to glyph, saves lookup - local target = lookuphash[lookup] - if target then - target[unicode] = krn - else - lookuphash[lookup] = { [unicode] = krn } - end - end - end - - local list = description.anchors - if list then - for typ, anchors in next, list do -- types - if typ == "mark" or typ == "cexit" then -- or entry? - for name, anchor in next, anchors do - local lookups = anchor_to_lookup[name] - if lookups then - for lookup, _ in next, lookups do - local target = lookuphash[lookup] - if target then - target[unicode] = anchors - else - lookuphash[lookup] = { [unicode] = anchors } - end - end - end - end - end - end - end - - end - - end - -end - -local function split(replacement,original) - local result = { } - for i=1,#replacement do - result[original[i]] = replacement[i] - end - return result -end - -local valid = { - coverage = { chainsub = true, chainpos = true, contextsub = true }, - reversecoverage = { reversesub = true }, - glyphs = { chainsub = true, chainpos = true }, -} - -local function prepare_contextchains(tfmdata) - local rawdata = tfmdata.shared.rawdata - local resources = rawdata.resources - local lookuphash = resources.lookuphash - local lookups = rawdata.lookups - if lookups then - for lookupname, lookupdata in next, rawdata.lookups do - local lookuptype = lookupdata.type - if lookuptype then - local rules = lookupdata.rules - if rules then - local format = lookupdata.format - local validformat = valid[format] - if not validformat then - report_prepare("unsupported format %a",format) - elseif not validformat[lookuptype] then - -- todo: dejavu-serif has one (but i need to see what use it has) - report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookupname) - else - local contexts = lookuphash[lookupname] - if not contexts then - contexts = { } - lookuphash[lookupname] = contexts - end - local t, nt = { }, 0 - for nofrules=1,#rules do - local rule = rules[nofrules] - local current = rule.current - local before = rule.before - local after = rule.after - local replacements = rule.replacements - local sequence = { } - local nofsequences = 0 - -- Eventually we can store start, stop and sequence in the cached file - -- but then less sharing takes place so best not do that without a lot - -- of profiling so let's forget about it. - if before then - for n=1,#before do - nofsequences = nofsequences + 1 - sequence[nofsequences] = before[n] - end - end - local start = nofsequences + 1 - for n=1,#current do - nofsequences = nofsequences + 1 - sequence[nofsequences] = current[n] - end - local stop = nofsequences - if after then - for n=1,#after do - nofsequences = nofsequences + 1 - sequence[nofsequences] = after[n] - end - end - if sequence[1] then - -- Replacements only happen with reverse lookups as they are single only. We - -- could pack them into current (replacement value instead of true) and then - -- use sequence[start] instead but it's somewhat ugly. - nt = nt + 1 - t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups, replacements } - for unic, _ in next, sequence[start] do - local cu = contexts[unic] - if not cu then - contexts[unic] = t - end - end - end - end - end - else - -- no rules - end - else - report_prepare("missing lookuptype for lookupname %a",lookupname) - end - end - end -end - --- we can consider lookuphash == false (initialized but empty) vs lookuphash == table - -local function featuresinitializer(tfmdata,value) - if true then -- value then - -- beware we need to use the topmost properties table - local rawdata = tfmdata.shared.rawdata - local properties = rawdata.properties - if not properties.initialized then - local starttime = trace_preparing and os.clock() - local resources = rawdata.resources - resources.lookuphash = resources.lookuphash or { } - prepare_contextchains(tfmdata) - prepare_lookups(tfmdata) - properties.initialized = true - if trace_preparing then - report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,tfmdata.properties.fullname) - end - end - end -end - -registerotffeature { - name = "features", - description = "features", - default = true, - initializers = { - position = 1, - node = featuresinitializer, - }, - processors = { - node = featuresprocessor, - } -} - --- This can be used for extra handlers, but should be used with care! - -otf.handlers = handlers diff --git a/src/luaotfload-fonts-tfm.lua b/src/luaotfload-fonts-tfm.lua deleted file mode 100644 index b9bb1bd..0000000 --- a/src/luaotfload-fonts-tfm.lua +++ /dev/null @@ -1,38 +0,0 @@ -if not modules then modules = { } end modules ['luatex-fonts-tfm'] = { - version = 1.001, - comment = "companion to luatex-*.tex", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end - -local fonts = fonts -local tfm = { } -fonts.handlers.tfm = tfm -fonts.formats.tfm = "type1" -- we need to have at least a value here - -function fonts.readers.tfm(specification) - local fullname = specification.filename or "" - if fullname == "" then - local forced = specification.forced or "" - if forced ~= "" then - fullname = specification.name .. "." .. forced - else - fullname = specification.name - end - end - local foundname = resolvers.findbinfile(fullname, 'tfm') or "" - if foundname == "" then - foundname = resolvers.findbinfile(fullname, 'ofm') or "" - end - if foundname ~= "" then - specification.filename = foundname - specification.format = "ofm" - return font.read_tfm(specification.filename,specification.size) - end -end diff --git a/src/luaotfload-main.lua b/src/luaotfload-main.lua index 0ecb248..7dd6c6e 100644 --- a/src/luaotfload-main.lua +++ b/src/luaotfload-main.lua @@ -122,21 +122,23 @@ end --[[doc-- \subsection{Module loading} - We load the files imported from \CONTEXT with this function. It - automatically prepends the prefix \fileent{luaotfload-} to its - argument, so we can refer to the files with their actual \CONTEXT - name. + We load the files imported from \CONTEXT with function derived this way. It + automatically prepends a prefix to its argument, so we can refer to the + files with their actual \CONTEXT name. --doc]]-- -local fl_prefix = "luaotfload" -- “luatex” for luatex-plain -local loadmodule = function (name) - require (fl_prefix .."-"..name) +local make_loader = function (prefix) + return function (name) + require ((prefix or "luaotfload") .."-"..name) + end end -loadmodule "log.lua" --- log messages ---loadmodule "parsers.lua" --- new in 2.5; fonts.conf and syntax ---loadmodule "configuration.lua" --- configuration options +local load_luaotfload_module = make_loader () +----- load_luaotfload_module = make_loader "luatex" --=> for Luatex-Plain +local load_fontloader_module = make_loader "luaotfload" --- XXX adapt + +load_luaotfload_module "log.lua" --- log messages local log = luaotfload.log local logreport = log.report @@ -307,11 +309,13 @@ tex.attribute[0] = 0 --doc]]-- -loadmodule "fontloader.lua" ----loadmodule"font-odv.lua" --- <= Devanagari support from Context +load_fontloader_module "fontloader.lua" +---load_fontloader_module "font-odv.lua" --- <= Devanagari support from Context if fonts then + --- The Initialization is highly idiosyncratic. + if not fonts._merge_loaded_message_done_ then logreport ("log", 5, "main", [["I am using the merged fontloader here.]]) logreport ("log", 5, "main", [[ If you run into problems or experience unexpected]]) @@ -327,38 +331,38 @@ else--- the loading sequence is known to change, so this might have to --- be updated with future updates! --- do not modify it though unless there is a change to the merged --- package! - loadmodule("l-lua.lua") - loadmodule("l-lpeg.lua") - loadmodule("l-function.lua") - loadmodule("l-string.lua") - loadmodule("l-table.lua") - loadmodule("l-io.lua") - loadmodule("l-file.lua") - loadmodule("l-boolean.lua") - loadmodule("l-math.lua") - loadmodule("util-str.lua") - loadmodule('luatex-basics-gen.lua') - loadmodule('data-con.lua') - loadmodule('luatex-basics-nod.lua') - loadmodule('font-ini.lua') - loadmodule('font-con.lua') - loadmodule('luatex-fonts-enc.lua') - loadmodule('font-cid.lua') - loadmodule('font-map.lua') - loadmodule('luatex-fonts-syn.lua') - loadmodule('luatex-fonts-tfm.lua') - loadmodule('font-oti.lua') - loadmodule('font-otf.lua') - loadmodule('font-otb.lua') - loadmodule('luatex-fonts-inj.lua') --> since 2014-01-07, replaces node-inj.lua - loadmodule('font-ota.lua') - loadmodule('luatex-fonts-otn.lua') --> since 2014-01-07, replaces font-otn.lua - loadmodule('font-otp.lua') --> since 2013-04-23 - loadmodule('luatex-fonts-lua.lua') - loadmodule('font-def.lua') - loadmodule('luatex-fonts-def.lua') - loadmodule('luatex-fonts-ext.lua') - loadmodule('luatex-fonts-cbk.lua') + load_fontloader_module "l-lua.lua" + load_fontloader_module "l-lpeg.lua" + load_fontloader_module "l-function.lua" + load_fontloader_module "l-string.lua" + load_fontloader_module "l-table.lua" + load_fontloader_module "l-io.lua" + load_fontloader_module "l-file.lua" + load_fontloader_module "l-boolean.lua" + load_fontloader_module "l-math.lua" + load_fontloader_module "util-str.lua" + load_fontloader_module "luatex-basics-gen.lua" + load_fontloader_module "data-con.lua" + load_fontloader_module "luatex-basics-nod.lua" + load_fontloader_module "font-ini.lua" + load_fontloader_module "font-con.lua" + load_fontloader_module "luatex-fonts-enc.lua" + load_fontloader_module "font-cid.lua" + load_fontloader_module "font-map.lua" + load_fontloader_module "luatex-fonts-syn.lua" + load_fontloader_module "luatex-fonts-tfm.lua" + load_fontloader_module "font-oti.lua" + load_fontloader_module "font-otf.lua" + load_fontloader_module "font-otb.lua" + load_fontloader_module "luatex-fonts-inj.lua" --> since 2014-01-07, replaces node-inj.lua + load_fontloader_module "font-ota.lua" + load_fontloader_module "luatex-fonts-otn.lua" --> since 2014-01-07, replaces font-otn.lua + load_fontloader_module "font-otp.lua" --> since 2013-04-23 + load_fontloader_module "luatex-fonts-lua.lua" + load_fontloader_module "font-def.lua" + load_fontloader_module "luatex-fonts-def.lua" + load_fontloader_module "luatex-fonts-ext.lua" + load_fontloader_module "luatex-fonts-cbk.lua" end --- non-merge fallback scope --[[doc-- @@ -411,7 +415,7 @@ add_to_callback("hpack_filter", add_to_callback("find_vf_file", find_vf_file, "luaotfload.find_vf_file") -loadmodule "override.lua" --- load glyphlist on demand +load_luaotfload_module "override.lua" --- load glyphlist on demand --[[doc-- @@ -419,16 +423,16 @@ loadmodule "override.lua" --- load glyphlist on demand --doc]]-- -loadmodule "parsers.lua" --- fonts.conf and syntax -loadmodule "configuration.lua" --- configuration options +load_luaotfload_module "parsers.lua" --- fonts.conf and syntax +load_luaotfload_module "configuration.lua" --- configuration options if not config.actions.apply_defaults () then logreport ("log", 0, "load", "Configuration unsuccessful.") end -loadmodule "loaders.lua" --- Type1 font wrappers -loadmodule "database.lua" --- Font management. -loadmodule "colors.lua" --- Per-font colors. +load_luaotfload_module "loaders.lua" --- Type1 font wrappers +load_luaotfload_module "database.lua" --- Font management. +load_luaotfload_module "colors.lua" --- Per-font colors. if not config.actions.reconfigure () then logreport ("log", 0, "load", "Post-configuration hooks failed.") @@ -728,9 +732,9 @@ reset_callback "define_font" local definer = config.luaotfload.run.definer add_to_callback ("define_font", definers[definer], "luaotfload.define_font", 1) -loadmodule "features.lua" --- font request and feature handling -loadmodule "letterspace.lua" --- extra character kerning -loadmodule "auxiliary.lua" --- additional high-level functionality +load_luaotfload_module "features.lua" --- font request and feature handling +load_luaotfload_module "letterspace.lua" --- extra character kerning +load_luaotfload_module "auxiliary.lua" --- additional high-level functionality luaotfload.aux.start_rewrite_fontname () --- to be migrated to fontspec -- cgit v1.2.3 From 27c01eccbad7d74854232b4798879d175d73414b Mon Sep 17 00:00:00 2001 From: Philipp Gesang Date: Mon, 8 Dec 2014 20:32:16 +0100 Subject: [scripts] adapt status file generator to dir structure --- scripts/mkstatus | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/scripts/mkstatus b/scripts/mkstatus index 802b2cd..3665aff 100755 --- a/scripts/mkstatus +++ b/scripts/mkstatus @@ -37,25 +37,25 @@ local filelist = "./build/luaotfload-status.lua" --- result local names = { --- only the runtime files and scripts { "src", "luaotfload-auxiliary.lua", }, - { "src", "luaotfload-basics-gen.lua", }, - { "src", "luaotfload-basics-nod.lua", }, + { "src/fontloader", "luaotfload-basics-gen.lua", }, + { "src/fontloader", "luaotfload-basics-nod.lua", }, { "build", "luaotfload-characters.lua", }, { "src", "luaotfload-colors.lua", }, { "src", "luaotfload-database.lua", }, { "src", "luaotfload-diagnostics.lua", }, { "src", "luaotfload-features.lua", }, - { "src", "luaotfload-fonts-cbk.lua", }, - { "src", "luaotfload-fonts-def.lua", }, - { "src", "luaotfload-fonts-enc.lua", }, - { "src", "luaotfload-fonts-ext.lua", }, - { "src", "luaotfload-fonts-lua.lua", }, - { "src", "luaotfload-fonts-tfm.lua", }, + { "src/fontloader", "luaotfload-fonts-cbk.lua", }, + { "src/fontloader", "luaotfload-fonts-def.lua", }, + { "src/fontloader", "luaotfload-fonts-enc.lua", }, + { "src/fontloader", "luaotfload-fonts-ext.lua", }, + { "src/fontloader", "luaotfload-fonts-lua.lua", }, + { "src/fontloader", "luaotfload-fonts-tfm.lua", }, { "build", "luaotfload-glyphlist.lua", }, { "src", "luaotfload-letterspace.lua", }, { "src", "luaotfload-loaders.lua", }, { "src", "luaotfload-log.lua", }, { "src", "luaotfload-main.lua", }, - { "src", "luaotfload-fontloader.lua", }, + { "src/fontloader", "luaotfload-fontloader.lua", }, { "src", "luaotfload-override.lua", }, { "src", "luaotfload-parsers.lua", }, { "src", "luaotfload-tool.lua", }, -- cgit v1.2.3 From 61d4a9200d0ba19b7472386c713a4aba564ba93e Mon Sep 17 00:00:00 2001 From: Philipp Gesang Date: Mon, 8 Dec 2014 20:33:40 +0100 Subject: [build] include cleanup targets in makefile usage message --- Makefile | 3 +++ 1 file changed, 3 insertions(+) diff --git a/Makefile b/Makefile index 60dec60..17f583f 100644 --- a/Makefile +++ b/Makefile @@ -227,5 +227,8 @@ showtargets: @echo " ctan package a zipball for uploading to CTAN" @echo " sign sign zipball" @echo + @echo " clean cleanup side-effects" + @echo " mrproper cleanup side-effects as well as make targets" + @echo # vim:noexpandtab:tabstop=8:shiftwidth=2 -- cgit v1.2.3 From bb3a6230adf4f4595892f80c861165e8b633d6de Mon Sep 17 00:00:00 2001 From: Philipp Gesang Date: Mon, 8 Dec 2014 20:47:36 +0100 Subject: =?UTF-8?q?[fontloader]=20change=20prefix=20of=20imported=20files?= =?UTF-8?q?=20to=20=E2=80=9Cfontloader=E2=80=9D?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/fontloader/fontloader-basics-gen.lua | 368 + src/fontloader/fontloader-basics-nod.lua | 178 + src/fontloader/fontloader-fontloader.lua | 14628 +++++++++++++++++++++++++++++ src/fontloader/fontloader-fonts-cbk.lua | 68 + src/fontloader/fontloader-fonts-def.lua | 97 + src/fontloader/fontloader-fonts-enc.lua | 28 + src/fontloader/fontloader-fonts-ext.lua | 272 + src/fontloader/fontloader-fonts-inj.lua | 526 ++ src/fontloader/fontloader-fonts-lua.lua | 33 + src/fontloader/fontloader-fonts-otn.lua | 2848 ++++++ src/fontloader/fontloader-fonts-tfm.lua | 38 + src/fontloader/luaotfload-basics-gen.lua | 368 - src/fontloader/luaotfload-basics-nod.lua | 178 - src/fontloader/luaotfload-fontloader.lua | 14628 ----------------------------- src/fontloader/luaotfload-fonts-cbk.lua | 68 - src/fontloader/luaotfload-fonts-def.lua | 97 - src/fontloader/luaotfload-fonts-enc.lua | 28 - src/fontloader/luaotfload-fonts-ext.lua | 272 - src/fontloader/luaotfload-fonts-inj.lua | 526 -- src/fontloader/luaotfload-fonts-lua.lua | 33 - src/fontloader/luaotfload-fonts-otn.lua | 2848 ------ src/fontloader/luaotfload-fonts-tfm.lua | 38 - src/luaotfload-main.lua | 97 +- 23 files changed, 19132 insertions(+), 19133 deletions(-) create mode 100644 src/fontloader/fontloader-basics-gen.lua create mode 100644 src/fontloader/fontloader-basics-nod.lua create mode 100644 src/fontloader/fontloader-fontloader.lua create mode 100644 src/fontloader/fontloader-fonts-cbk.lua create mode 100644 src/fontloader/fontloader-fonts-def.lua create mode 100644 src/fontloader/fontloader-fonts-enc.lua create mode 100644 src/fontloader/fontloader-fonts-ext.lua create mode 100644 src/fontloader/fontloader-fonts-inj.lua create mode 100644 src/fontloader/fontloader-fonts-lua.lua create mode 100644 src/fontloader/fontloader-fonts-otn.lua create mode 100644 src/fontloader/fontloader-fonts-tfm.lua delete mode 100644 src/fontloader/luaotfload-basics-gen.lua delete mode 100644 src/fontloader/luaotfload-basics-nod.lua delete mode 100644 src/fontloader/luaotfload-fontloader.lua delete mode 100644 src/fontloader/luaotfload-fonts-cbk.lua delete mode 100644 src/fontloader/luaotfload-fonts-def.lua delete mode 100644 src/fontloader/luaotfload-fonts-enc.lua delete mode 100644 src/fontloader/luaotfload-fonts-ext.lua delete mode 100644 src/fontloader/luaotfload-fonts-inj.lua delete mode 100644 src/fontloader/luaotfload-fonts-lua.lua delete mode 100644 src/fontloader/luaotfload-fonts-otn.lua delete mode 100644 src/fontloader/luaotfload-fonts-tfm.lua diff --git a/src/fontloader/fontloader-basics-gen.lua b/src/fontloader/fontloader-basics-gen.lua new file mode 100644 index 0000000..c19a49a --- /dev/null +++ b/src/fontloader/fontloader-basics-gen.lua @@ -0,0 +1,368 @@ +if not modules then modules = { } end modules ['luat-basics-gen'] = { + version = 1.100, + comment = "companion to luatex-*.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end + +local dummyfunction = function() +end + +local dummyreporter = function(c) + return function(...) + (texio.reporter or texio.write_nl)(c .. " : " .. string.formatters(...)) + end +end + +statistics = { + register = dummyfunction, + starttiming = dummyfunction, + stoptiming = dummyfunction, + elapsedtime = nil, +} + +directives = { + register = dummyfunction, + enable = dummyfunction, + disable = dummyfunction, +} + +trackers = { + register = dummyfunction, + enable = dummyfunction, + disable = dummyfunction, +} + +experiments = { + register = dummyfunction, + enable = dummyfunction, + disable = dummyfunction, +} + +storage = { -- probably no longer needed + register = dummyfunction, + shared = { }, +} + +logs = { + new = dummyreporter, + reporter = dummyreporter, + messenger = dummyreporter, + report = dummyfunction, +} + +callbacks = { + register = function(n,f) return callback.register(n,f) end, + +} + +utilities = { + storage = { + allocate = function(t) return t or { } end, + mark = function(t) return t or { } end, + }, +} + +characters = characters or { + data = { } +} + +-- we need to cheat a bit here + +texconfig.kpse_init = true + +resolvers = resolvers or { } -- no fancy file helpers used + +local remapper = { + otf = "opentype fonts", + ttf = "truetype fonts", + ttc = "truetype fonts", + dfont = "truetype fonts", -- "truetype dictionary", + cid = "cid maps", + cidmap = "cid maps", + fea = "font feature files", + pfa = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this! + pfb = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this! + afm = "afm", +} + +function resolvers.findfile(name,fileformat) + name = string.gsub(name,"\\","/") + if not fileformat or fileformat == "" then + fileformat = file.suffix(name) + if fileformat == "" then + fileformat = "tex" + end + end + fileformat = string.lower(fileformat) + fileformat = remapper[fileformat] or fileformat + local found = kpse.find_file(name,fileformat) + if not found or found == "" then + found = kpse.find_file(name,"other text files") + end + return found +end + +-- function resolvers.findbinfile(name,fileformat) +-- if not fileformat or fileformat == "" then +-- fileformat = file.suffix(name) +-- end +-- return resolvers.findfile(name,(fileformat and remapper[fileformat]) or fileformat) +-- end + +resolvers.findbinfile = resolvers.findfile + +function resolvers.loadbinfile(filename,filetype) + local data = io.loaddata(filename) + return true, data, #data +end + +function resolvers.resolve(s) + return s +end + +function resolvers.unresolve(s) + return s +end + +-- Caches ... I will make a real stupid version some day when I'm in the +-- mood. After all, the generic code does not need the more advanced +-- ConTeXt features. Cached data is not shared between ConTeXt and other +-- usage as I don't want any dependency at all. Also, ConTeXt might have +-- different needs and tricks added. + +--~ containers.usecache = true + +caches = { } + +local writable = nil +local readables = { } +local usingjit = jit + +if not caches.namespace or caches.namespace == "" or caches.namespace == "context" then + caches.namespace = 'generic' +end + +do + + -- standard context tree setup + + local cachepaths = kpse.expand_var('$TEXMFCACHE') or "" + + -- quite like tex live or so (the weird $TEXMFCACHE test seems to be needed on miktex) + + if cachepaths == "" or cachepaths == "$TEXMFCACHE" then + cachepaths = kpse.expand_var('$TEXMFVAR') or "" + end + + -- this also happened to be used (the weird $TEXMFVAR test seems to be needed on miktex) + + if cachepaths == "" or cachepaths == "$TEXMFVAR" then + cachepaths = kpse.expand_var('$VARTEXMF') or "" + end + + -- and this is a last resort (hm, we could use TEMP or TEMPDIR) + + if cachepaths == "" then + local fallbacks = { "TMPDIR", "TEMPDIR", "TMP", "TEMP", "HOME", "HOMEPATH" } + for i=1,#fallbacks do + cachepaths = os.getenv(fallbacks[i]) or "" + if cachepath ~= "" and lfs.isdir(cachepath) then + break + end + end + end + + if cachepaths == "" then + cachepaths = "." + end + + cachepaths = string.split(cachepaths,os.type == "windows" and ";" or ":") + + for i=1,#cachepaths do + local cachepath = cachepaths[i] + if not lfs.isdir(cachepath) then + lfs.mkdirs(cachepath) -- needed for texlive and latex + if lfs.isdir(cachepath) then + texio.write(string.format("(created cache path: %s)",cachepath)) + end + end + if file.is_writable(cachepath) then + writable = file.join(cachepath,"luatex-cache") + lfs.mkdir(writable) + writable = file.join(writable,caches.namespace) + lfs.mkdir(writable) + break + end + end + + for i=1,#cachepaths do + if file.is_readable(cachepaths[i]) then + readables[#readables+1] = file.join(cachepaths[i],"luatex-cache",caches.namespace) + end + end + + if not writable then + texio.write_nl("quiting: fix your writable cache path") + os.exit() + elseif #readables == 0 then + texio.write_nl("quiting: fix your readable cache path") + os.exit() + elseif #readables == 1 and readables[1] == writable then + texio.write(string.format("(using cache: %s)",writable)) + else + texio.write(string.format("(using write cache: %s)",writable)) + texio.write(string.format("(using read cache: %s)",table.concat(readables, " "))) + end + +end + +function caches.getwritablepath(category,subcategory) + local path = file.join(writable,category) + lfs.mkdir(path) + path = file.join(path,subcategory) + lfs.mkdir(path) + return path +end + +function caches.getreadablepaths(category,subcategory) + local t = { } + for i=1,#readables do + t[i] = file.join(readables[i],category,subcategory) + end + return t +end + +local function makefullname(path,name) + if path and path ~= "" then + return file.addsuffix(file.join(path,name),"lua"), file.addsuffix(file.join(path,name),usingjit and "lub" or "luc") + end +end + +function caches.is_writable(path,name) + local fullname = makefullname(path,name) + return fullname and file.is_writable(fullname) +end + +function caches.loaddata(paths,name) + for i=1,#paths do + local data = false + local luaname, lucname = makefullname(paths[i],name) + if lucname and not lfs.isfile(lucname) and type(caches.compile) == "function" then + -- in case we used luatex and luajittex mixed ... lub or luc file + texio.write(string.format("(compiling luc: %s)",lucname)) + data = loadfile(luaname) + if data then + data = data() + end + if data then + caches.compile(data,luaname,lucname) + return data + end + end + if lucname and lfs.isfile(lucname) then -- maybe also check for size + texio.write(string.format("(load luc: %s)",lucname)) + data = loadfile(lucname) + if data then + data = data() + end + if data then + return data + else + texio.write(string.format("(loading failed: %s)",lucname)) + end + end + if luaname and lfs.isfile(luaname) then + texio.write(string.format("(load lua: %s)",luaname)) + data = loadfile(luaname) + if data then + data = data() + end + if data then + return data + end + end + end +end + +function caches.savedata(path,name,data) + local luaname, lucname = makefullname(path,name) + if luaname then + texio.write(string.format("(save: %s)",luaname)) + table.tofile(luaname,data,true) + if lucname and type(caches.compile) == "function" then + os.remove(lucname) -- better be safe + texio.write(string.format("(save: %s)",lucname)) + caches.compile(data,luaname,lucname) + end + end +end + +-- According to KH os.execute is not permitted in plain/latex so there is +-- no reason to use the normal context way. So the method here is slightly +-- different from the one we have in context. We also use different suffixes +-- as we don't want any clashes (sharing cache files is not that handy as +-- context moves on faster.) +-- +-- Beware: serialization might fail on large files (so maybe we should pcall +-- this) in which case one should limit the method to luac and enable support +-- for execution. + +-- function caches.compile(data,luaname,lucname) +-- local d = io.loaddata(luaname) +-- if not d or d == "" then +-- d = table.serialize(data,true) -- slow +-- end +-- if d and d ~= "" then +-- local f = io.open(lucname,'w') +-- if f then +-- local s = loadstring(d) +-- if s then +-- f:write(string.dump(s,true)) +-- end +-- f:close() +-- end +-- end +-- end + +function caches.compile(data,luaname,lucname) + local d = io.loaddata(luaname) + if not d or d == "" then + d = table.serialize(data,true) -- slow + end + if d and d ~= "" then + local f = io.open(lucname,'wb') + if f then + local s = loadstring(d) + if s then + f:write(string.dump(s,true)) + end + f:close() + end + end +end + +-- + +function table.setmetatableindex(t,f) + setmetatable(t,{ __index = f }) +end + +-- helper for plain: + +arguments = { } + +if arg then + for i=1,#arg do + local k, v = string.match(arg[i],"^%-%-([^=]+)=?(.-)$") + if k and v then + arguments[k] = v + end + end +end diff --git a/src/fontloader/fontloader-basics-nod.lua b/src/fontloader/fontloader-basics-nod.lua new file mode 100644 index 0000000..373dab5 --- /dev/null +++ b/src/fontloader/fontloader-basics-nod.lua @@ -0,0 +1,178 @@ +if not modules then modules = { } end modules ['luatex-fonts-nod'] = { + version = 1.001, + comment = "companion to luatex-fonts.lua", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end + +-- Don't depend on code here as it is only needed to complement the +-- font handler code. + +-- Attributes: + +if tex.attribute[0] ~= 0 then + + texio.write_nl("log","!") + texio.write_nl("log","! Attribute 0 is reserved for ConTeXt's font feature management and has to be") + texio.write_nl("log","! set to zero. Also, some attributes in the range 1-255 are used for special") + texio.write_nl("log","! purposes so setting them at the TeX end might break the font handler.") + texio.write_nl("log","!") + + tex.attribute[0] = 0 -- else no features + +end + +attributes = attributes or { } +attributes.unsetvalue = -0x7FFFFFFF + +local numbers, last = { }, 127 + +attributes.private = attributes.private or function(name) + local number = numbers[name] + if not number then + if last < 255 then + last = last + 1 + end + number = last + numbers[name] = number + end + return number +end + +-- Nodes: + +nodes = { } +nodes.pool = { } +nodes.handlers = { } + +local nodecodes = { } for k,v in next, node.types () do nodecodes[string.gsub(v,"_","")] = k end +local whatcodes = { } for k,v in next, node.whatsits() do whatcodes[string.gsub(v,"_","")] = k end +local glyphcodes = { [0] = "character", "glyph", "ligature", "ghost", "left", "right" } +local disccodes = { [0] = "discretionary", "explicit", "automatic", "regular", "first", "second" } + +nodes.nodecodes = nodecodes +nodes.whatcodes = whatcodes +nodes.whatsitcodes = whatcodes +nodes.glyphcodes = glyphcodes +nodes.disccodes = disccodes + +local free_node = node.free +local remove_node = node.remove +local new_node = node.new +local traverse_id = node.traverse_id + +nodes.handlers.protectglyphs = node.protect_glyphs +nodes.handlers.unprotectglyphs = node.unprotect_glyphs + +local math_code = nodecodes.math +local end_of_math = node.end_of_math + +function node.end_of_math(n) + if n.id == math_code and n.subtype == 1 then + return n + else + return end_of_math(n) + end +end + +function nodes.remove(head, current, free_too) + local t = current + head, current = remove_node(head,current) + if t then + if free_too then + free_node(t) + t = nil + else + t.next, t.prev = nil, nil + end + end + return head, current, t +end + +function nodes.delete(head,current) + return nodes.remove(head,current,true) +end + +function nodes.pool.kern(k) + local n = new_node("kern",1) + n.kern = k + return n +end + +-- experimental + +local getfield = node.getfield or function(n,tag) return n[tag] end +local setfield = node.setfield or function(n,tag,value) n[tag] = value end + +nodes.getfield = getfield +nodes.setfield = setfield + +nodes.getattr = getfield +nodes.setattr = setfield + +if node.getid then nodes.getid = node.getid else function nodes.getid (n) return getfield(n,"id") end end +if node.getsubtype then nodes.getsubtype = node.getsubtype else function nodes.getsubtype(n) return getfield(n,"subtype") end end +if node.getnext then nodes.getnext = node.getnext else function nodes.getnext (n) return getfield(n,"next") end end +if node.getprev then nodes.getprev = node.getprev else function nodes.getprev (n) return getfield(n,"prev") end end +if node.getchar then nodes.getchar = node.getchar else function nodes.getchar (n) return getfield(n,"char") end end +if node.getfont then nodes.getfont = node.getfont else function nodes.getfont (n) return getfield(n,"font") end end +if node.getlist then nodes.getlist = node.getlist else function nodes.getlist (n) return getfield(n,"list") end end + +function nodes.tonut (n) return n end +function nodes.tonode(n) return n end + +-- being lazy ... just copy a bunch ... not all needed in generic but we assume +-- nodes to be kind of private anyway + +nodes.tostring = node.tostring or tostring +nodes.copy = node.copy +nodes.copy_list = node.copy_list +nodes.delete = node.delete +nodes.dimensions = node.dimensions +nodes.end_of_math = node.end_of_math +nodes.flush_list = node.flush_list +nodes.flush_node = node.flush_node +nodes.free = node.free +nodes.insert_after = node.insert_after +nodes.insert_before = node.insert_before +nodes.hpack = node.hpack +nodes.new = node.new +nodes.tail = node.tail +nodes.traverse = node.traverse +nodes.traverse_id = node.traverse_id +nodes.slide = node.slide +nodes.vpack = node.vpack + +nodes.first_glyph = node.first_glyph +nodes.first_character = node.first_character +nodes.has_glyph = node.has_glyph or node.first_glyph + +nodes.current_attr = node.current_attr +nodes.do_ligature_n = node.do_ligature_n +nodes.has_field = node.has_field +nodes.last_node = node.last_node +nodes.usedlist = node.usedlist +nodes.protrusion_skippable = node.protrusion_skippable +nodes.write = node.write + +nodes.has_attribute = node.has_attribute +nodes.set_attribute = node.set_attribute +nodes.unset_attribute = node.unset_attribute + +nodes.protect_glyphs = node.protect_glyphs +nodes.unprotect_glyphs = node.unprotect_glyphs +nodes.kerning = node.kerning +nodes.ligaturing = node.ligaturing +nodes.mlist_to_hlist = node.mlist_to_hlist + +-- in generic code, at least for some time, we stay nodes, while in context +-- we can go nuts (e.g. experimental); this split permits us us keep code +-- used elsewhere stable but at the same time play around in context + +nodes.nuts = nodes diff --git a/src/fontloader/fontloader-fontloader.lua b/src/fontloader/fontloader-fontloader.lua new file mode 100644 index 0000000..e9c6638 --- /dev/null +++ b/src/fontloader/fontloader-fontloader.lua @@ -0,0 +1,14628 @@ +-- merged file : luatex-fonts-merged.lua +-- parent file : luatex-fonts.lua +-- merge date : 12/06/14 14:20:08 + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['l-lua']={ + version=1.001, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local major,minor=string.match(_VERSION,"^[^%d]+(%d+)%.(%d+).*$") +_MAJORVERSION=tonumber(major) or 5 +_MINORVERSION=tonumber(minor) or 1 +_LUAVERSION=_MAJORVERSION+_MINORVERSION/10 +if not lpeg then + lpeg=require("lpeg") +end +if loadstring then + local loadnormal=load + function load(first,...) + if type(first)=="string" then + return loadstring(first,...) + else + return loadnormal(first,...) + end + end +else + loadstring=load +end +if not ipairs then + local function iterate(a,i) + i=i+1 + local v=a[i] + if v~=nil then + return i,v + end + end + function ipairs(a) + return iterate,a,0 + end +end +if not pairs then + function pairs(t) + return next,t + end +end +if not table.unpack then + table.unpack=_G.unpack +elseif not unpack then + _G.unpack=table.unpack +end +if not package.loaders then + package.loaders=package.searchers +end +local print,select,tostring=print,select,tostring +local inspectors={} +function setinspector(inspector) + inspectors[#inspectors+1]=inspector +end +function inspect(...) + for s=1,select("#",...) do + local value=select(s,...) + local done=false + for i=1,#inspectors do + done=inspectors[i](value) + if done then + break + end + end + if not done then + print(tostring(value)) + end + end +end +local dummy=function() end +function optionalrequire(...) + local ok,result=xpcall(require,dummy,...) + if ok then + return result + end +end +if lua then + lua.mask=load([[τεχ = 1]]) and "utf" or "ascii" +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['l-lpeg']={ + version=1.001, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +lpeg=require("lpeg") +if not lpeg.print then function lpeg.print(...) print(lpeg.pcode(...)) end end +local type,next,tostring=type,next,tostring +local byte,char,gmatch,format=string.byte,string.char,string.gmatch,string.format +local floor=math.floor +local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt +local lpegtype,lpegmatch,lpegprint=lpeg.type,lpeg.match,lpeg.print +if setinspector then + setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end) +end +lpeg.patterns=lpeg.patterns or {} +local patterns=lpeg.patterns +local anything=P(1) +local endofstring=P(-1) +local alwaysmatched=P(true) +patterns.anything=anything +patterns.endofstring=endofstring +patterns.beginofstring=alwaysmatched +patterns.alwaysmatched=alwaysmatched +local sign=S('+-') +local zero=P('0') +local digit=R('09') +local octdigit=R("07") +local lowercase=R("az") +local uppercase=R("AZ") +local underscore=P("_") +local hexdigit=digit+lowercase+uppercase +local cr,lf,crlf=P("\r"),P("\n"),P("\r\n") +local newline=P("\r")*(P("\n")+P(true))+P("\n") +local escaped=P("\\")*anything +local squote=P("'") +local dquote=P('"') +local space=P(" ") +local period=P(".") +local comma=P(",") +local utfbom_32_be=P('\000\000\254\255') +local utfbom_32_le=P('\255\254\000\000') +local utfbom_16_be=P('\254\255') +local utfbom_16_le=P('\255\254') +local utfbom_8=P('\239\187\191') +local utfbom=utfbom_32_be+utfbom_32_le+utfbom_16_be+utfbom_16_le+utfbom_8 +local utftype=utfbom_32_be*Cc("utf-32-be")+utfbom_32_le*Cc("utf-32-le")+utfbom_16_be*Cc("utf-16-be")+utfbom_16_le*Cc("utf-16-le")+utfbom_8*Cc("utf-8")+alwaysmatched*Cc("utf-8") +local utfstricttype=utfbom_32_be*Cc("utf-32-be")+utfbom_32_le*Cc("utf-32-le")+utfbom_16_be*Cc("utf-16-be")+utfbom_16_le*Cc("utf-16-le")+utfbom_8*Cc("utf-8") +local utfoffset=utfbom_32_be*Cc(4)+utfbom_32_le*Cc(4)+utfbom_16_be*Cc(2)+utfbom_16_le*Cc(2)+utfbom_8*Cc(3)+Cc(0) +local utf8next=R("\128\191") +patterns.utfbom_32_be=utfbom_32_be +patterns.utfbom_32_le=utfbom_32_le +patterns.utfbom_16_be=utfbom_16_be +patterns.utfbom_16_le=utfbom_16_le +patterns.utfbom_8=utfbom_8 +patterns.utf_16_be_nl=P("\000\r\000\n")+P("\000\r")+P("\000\n") +patterns.utf_16_le_nl=P("\r\000\n\000")+P("\r\000")+P("\n\000") +patterns.utf_32_be_nl=P("\000\000\000\r\000\000\000\n")+P("\000\000\000\r")+P("\000\000\000\n") +patterns.utf_32_le_nl=P("\r\000\000\000\n\000\000\000")+P("\r\000\000\000")+P("\n\000\000\000") +patterns.utf8one=R("\000\127") +patterns.utf8two=R("\194\223")*utf8next +patterns.utf8three=R("\224\239")*utf8next*utf8next +patterns.utf8four=R("\240\244")*utf8next*utf8next*utf8next +patterns.utfbom=utfbom +patterns.utftype=utftype +patterns.utfstricttype=utfstricttype +patterns.utfoffset=utfoffset +local utf8char=patterns.utf8one+patterns.utf8two+patterns.utf8three+patterns.utf8four +local validutf8char=utf8char^0*endofstring*Cc(true)+Cc(false) +local utf8character=P(1)*R("\128\191")^0 +patterns.utf8=utf8char +patterns.utf8char=utf8char +patterns.utf8character=utf8character +patterns.validutf8=validutf8char +patterns.validutf8char=validutf8char +local eol=S("\n\r") +local spacer=S(" \t\f\v") +local whitespace=eol+spacer +local nonspacer=1-spacer +local nonwhitespace=1-whitespace +patterns.eol=eol +patterns.spacer=spacer +patterns.whitespace=whitespace +patterns.nonspacer=nonspacer +patterns.nonwhitespace=nonwhitespace +local stripper=spacer^0*C((spacer^0*nonspacer^1)^0) +local fullstripper=whitespace^0*C((whitespace^0*nonwhitespace^1)^0) +local collapser=Cs(spacer^0/""*nonspacer^0*((spacer^0/" "*nonspacer^1)^0)) +local b_collapser=Cs(whitespace^0/""*(nonwhitespace^1+whitespace^1/" ")^0) +local e_collapser=Cs((whitespace^1*P(-1)/""+nonwhitespace^1+whitespace^1/" ")^0) +local m_collapser=Cs((nonwhitespace^1+whitespace^1/" ")^0) +local b_stripper=Cs(spacer^0/""*(nonspacer^1+spacer^1/" ")^0) +local e_stripper=Cs((spacer^1*P(-1)/""+nonspacer^1+spacer^1/" ")^0) +local m_stripper=Cs((nonspacer^1+spacer^1/" ")^0) +patterns.stripper=stripper +patterns.fullstripper=fullstripper +patterns.collapser=collapser +patterns.b_collapser=b_collapser +patterns.m_collapser=m_collapser +patterns.e_collapser=e_collapser +patterns.b_stripper=b_stripper +patterns.m_stripper=m_stripper +patterns.e_stripper=e_stripper +patterns.lowercase=lowercase +patterns.uppercase=uppercase +patterns.letter=patterns.lowercase+patterns.uppercase +patterns.space=space +patterns.tab=P("\t") +patterns.spaceortab=patterns.space+patterns.tab +patterns.newline=newline +patterns.emptyline=newline^1 +patterns.equal=P("=") +patterns.comma=comma +patterns.commaspacer=comma*spacer^0 +patterns.period=period +patterns.colon=P(":") +patterns.semicolon=P(";") +patterns.underscore=underscore +patterns.escaped=escaped +patterns.squote=squote +patterns.dquote=dquote +patterns.nosquote=(escaped+(1-squote))^0 +patterns.nodquote=(escaped+(1-dquote))^0 +patterns.unsingle=(squote/"")*patterns.nosquote*(squote/"") +patterns.undouble=(dquote/"")*patterns.nodquote*(dquote/"") +patterns.unquoted=patterns.undouble+patterns.unsingle +patterns.unspacer=((patterns.spacer^1)/"")^0 +patterns.singlequoted=squote*patterns.nosquote*squote +patterns.doublequoted=dquote*patterns.nodquote*dquote +patterns.quoted=patterns.doublequoted+patterns.singlequoted +patterns.digit=digit +patterns.octdigit=octdigit +patterns.hexdigit=hexdigit +patterns.sign=sign +patterns.cardinal=digit^1 +patterns.integer=sign^-1*digit^1 +patterns.unsigned=digit^0*period*digit^1 +patterns.float=sign^-1*patterns.unsigned +patterns.cunsigned=digit^0*comma*digit^1 +patterns.cpunsigned=digit^0*(period+comma)*digit^1 +patterns.cfloat=sign^-1*patterns.cunsigned +patterns.cpfloat=sign^-1*patterns.cpunsigned +patterns.number=patterns.float+patterns.integer +patterns.cnumber=patterns.cfloat+patterns.integer +patterns.cpnumber=patterns.cpfloat+patterns.integer +patterns.oct=zero*octdigit^1 +patterns.octal=patterns.oct +patterns.HEX=zero*P("X")*(digit+uppercase)^1 +patterns.hex=zero*P("x")*(digit+lowercase)^1 +patterns.hexadecimal=zero*S("xX")*hexdigit^1 +patterns.hexafloat=sign^-1*zero*S("xX")*(hexdigit^0*period*hexdigit^1+hexdigit^1*period*hexdigit^0+hexdigit^1)*(S("pP")*sign^-1*hexdigit^1)^-1 +patterns.decafloat=sign^-1*(digit^0*period*digit^1+digit^1*period*digit^0+digit^1)*S("eE")*sign^-1*digit^1 +patterns.propername=(uppercase+lowercase+underscore)*(uppercase+lowercase+underscore+digit)^0*endofstring +patterns.somecontent=(anything-newline-space)^1 +patterns.beginline=#(1-newline) +patterns.longtostring=Cs(whitespace^0/""*((patterns.quoted+nonwhitespace^1+whitespace^1/""*(P(-1)+Cc(" ")))^0)) +local function anywhere(pattern) + return P { P(pattern)+1*V(1) } +end +lpeg.anywhere=anywhere +function lpeg.instringchecker(p) + p=anywhere(p) + return function(str) + return lpegmatch(p,str) and true or false + end +end +function lpeg.splitter(pattern,action) + return (((1-P(pattern))^1)/action+1)^0 +end +function lpeg.tsplitter(pattern,action) + return Ct((((1-P(pattern))^1)/action+1)^0) +end +local splitters_s,splitters_m,splitters_t={},{},{} +local function splitat(separator,single) + local splitter=(single and splitters_s[separator]) or splitters_m[separator] + if not splitter then + separator=P(separator) + local other=C((1-separator)^0) + if single then + local any=anything + splitter=other*(separator*C(any^0)+"") + splitters_s[separator]=splitter + else + splitter=other*(separator*other)^0 + splitters_m[separator]=splitter + end + end + return splitter +end +local function tsplitat(separator) + local splitter=splitters_t[separator] + if not splitter then + splitter=Ct(splitat(separator)) + splitters_t[separator]=splitter + end + return splitter +end +lpeg.splitat=splitat +lpeg.tsplitat=tsplitat +function string.splitup(str,separator) + if not separator then + separator="," + end + return lpegmatch(splitters_m[separator] or splitat(separator),str) +end +local cache={} +function lpeg.split(separator,str) + local c=cache[separator] + if not c then + c=tsplitat(separator) + cache[separator]=c + end + return lpegmatch(c,str) +end +function string.split(str,separator) + if separator then + local c=cache[separator] + if not c then + c=tsplitat(separator) + cache[separator]=c + end + return lpegmatch(c,str) + else + return { str } + end +end +local spacing=patterns.spacer^0*newline +local empty=spacing*Cc("") +local nonempty=Cs((1-spacing)^1)*spacing^-1 +local content=(empty+nonempty)^1 +patterns.textline=content +local linesplitter=tsplitat(newline) +patterns.linesplitter=linesplitter +function string.splitlines(str) + return lpegmatch(linesplitter,str) +end +local cache={} +function lpeg.checkedsplit(separator,str) + local c=cache[separator] + if not c then + separator=P(separator) + local other=C((1-separator)^1) + c=Ct(separator^0*other*(separator^1*other)^0) + cache[separator]=c + end + return lpegmatch(c,str) +end +function string.checkedsplit(str,separator) + local c=cache[separator] + if not c then + separator=P(separator) + local other=C((1-separator)^1) + c=Ct(separator^0*other*(separator^1*other)^0) + cache[separator]=c + end + return lpegmatch(c,str) +end +local function f2(s) local c1,c2=byte(s,1,2) return c1*64+c2-12416 end +local function f3(s) local c1,c2,c3=byte(s,1,3) return (c1*64+c2)*64+c3-925824 end +local function f4(s) local c1,c2,c3,c4=byte(s,1,4) return ((c1*64+c2)*64+c3)*64+c4-63447168 end +local utf8byte=patterns.utf8one/byte+patterns.utf8two/f2+patterns.utf8three/f3+patterns.utf8four/f4 +patterns.utf8byte=utf8byte +local cache={} +function lpeg.stripper(str) + if type(str)=="string" then + local s=cache[str] + if not s then + s=Cs(((S(str)^1)/""+1)^0) + cache[str]=s + end + return s + else + return Cs(((str^1)/""+1)^0) + end +end +local cache={} +function lpeg.keeper(str) + if type(str)=="string" then + local s=cache[str] + if not s then + s=Cs((((1-S(str))^1)/""+1)^0) + cache[str]=s + end + return s + else + return Cs((((1-str)^1)/""+1)^0) + end +end +function lpeg.frontstripper(str) + return (P(str)+P(true))*Cs(anything^0) +end +function lpeg.endstripper(str) + return Cs((1-P(str)*endofstring)^0) +end +function lpeg.replacer(one,two,makefunction,isutf) + local pattern + local u=isutf and utf8char or 1 + if type(one)=="table" then + local no=#one + local p=P(false) + if no==0 then + for k,v in next,one do + p=p+P(k)/v + end + pattern=Cs((p+u)^0) + elseif no==1 then + local o=one[1] + one,two=P(o[1]),o[2] + pattern=Cs((one/two+u)^0) + else + for i=1,no do + local o=one[i] + p=p+P(o[1])/o[2] + end + pattern=Cs((p+u)^0) + end + else + pattern=Cs((P(one)/(two or "")+u)^0) + end + if makefunction then + return function(str) + return lpegmatch(pattern,str) + end + else + return pattern + end +end +function lpeg.finder(lst,makefunction,isutf) + local pattern + if type(lst)=="table" then + pattern=P(false) + if #lst==0 then + for k,v in next,lst do + pattern=pattern+P(k) + end + else + for i=1,#lst do + pattern=pattern+P(lst[i]) + end + end + else + pattern=P(lst) + end + if isutf then + pattern=((utf8char or 1)-pattern)^0*pattern + else + pattern=(1-pattern)^0*pattern + end + if makefunction then + return function(str) + return lpegmatch(pattern,str) + end + else + return pattern + end +end +local splitters_f,splitters_s={},{} +function lpeg.firstofsplit(separator) + local splitter=splitters_f[separator] + if not splitter then + local pattern=P(separator) + splitter=C((1-pattern)^0) + splitters_f[separator]=splitter + end + return splitter +end +function lpeg.secondofsplit(separator) + local splitter=splitters_s[separator] + if not splitter then + local pattern=P(separator) + splitter=(1-pattern)^0*pattern*C(anything^0) + splitters_s[separator]=splitter + end + return splitter +end +local splitters_s,splitters_p={},{} +function lpeg.beforesuffix(separator) + local splitter=splitters_s[separator] + if not splitter then + local pattern=P(separator) + splitter=C((1-pattern)^0)*pattern*endofstring + splitters_s[separator]=splitter + end + return splitter +end +function lpeg.afterprefix(separator) + local splitter=splitters_p[separator] + if not splitter then + local pattern=P(separator) + splitter=pattern*C(anything^0) + splitters_p[separator]=splitter + end + return splitter +end +function lpeg.balancer(left,right) + left,right=P(left),P(right) + return P { left*((1-left-right)+V(1))^0*right } +end +local nany=utf8char/"" +function lpeg.counter(pattern) + pattern=Cs((P(pattern)/" "+nany)^0) + return function(str) + return #lpegmatch(pattern,str) + end +end +utf=utf or (unicode and unicode.utf8) or {} +local utfcharacters=utf and utf.characters or string.utfcharacters +local utfgmatch=utf and utf.gmatch +local utfchar=utf and utf.char +lpeg.UP=lpeg.P +if utfcharacters then + function lpeg.US(str) + local p=P(false) + for uc in utfcharacters(str) do + p=p+P(uc) + end + return p + end +elseif utfgmatch then + function lpeg.US(str) + local p=P(false) + for uc in utfgmatch(str,".") do + p=p+P(uc) + end + return p + end +else + function lpeg.US(str) + local p=P(false) + local f=function(uc) + p=p+P(uc) + end + lpegmatch((utf8char/f)^0,str) + return p + end +end +local range=utf8byte*utf8byte+Cc(false) +function lpeg.UR(str,more) + local first,last + if type(str)=="number" then + first=str + last=more or first + else + first,last=lpegmatch(range,str) + if not last then + return P(str) + end + end + if first==last then + return P(str) + elseif utfchar and (last-first<8) then + local p=P(false) + for i=first,last do + p=p+P(utfchar(i)) + end + return p + else + local f=function(b) + return b>=first and b<=last + end + return utf8byte/f + end +end +function lpeg.is_lpeg(p) + return p and lpegtype(p)=="pattern" +end +function lpeg.oneof(list,...) + if type(list)~="table" then + list={ list,... } + end + local p=P(list[1]) + for l=2,#list do + p=p+P(list[l]) + end + return p +end +local sort=table.sort +local function copyindexed(old) + local new={} + for i=1,#old do + new[i]=old + end + return new +end +local function sortedkeys(tab) + local keys,s={},0 + for key,_ in next,tab do + s=s+1 + keys[s]=key + end + sort(keys) + return keys +end +function lpeg.append(list,pp,delayed,checked) + local p=pp + if #list>0 then + local keys=copyindexed(list) + sort(keys) + for i=#keys,1,-1 do + local k=keys[i] + if p then + p=P(k)+p + else + p=P(k) + end + end + elseif delayed then + local keys=sortedkeys(list) + if p then + for i=1,#keys,1 do + local k=keys[i] + local v=list[k] + p=P(k)/list+p + end + else + for i=1,#keys do + local k=keys[i] + local v=list[k] + if p then + p=P(k)+p + else + p=P(k) + end + end + if p then + p=p/list + end + end + elseif checked then + local keys=sortedkeys(list) + for i=1,#keys do + local k=keys[i] + local v=list[k] + if p then + if k==v then + p=P(k)+p + else + p=P(k)/v+p + end + else + if k==v then + p=P(k) + else + p=P(k)/v + end + end + end + else + local keys=sortedkeys(list) + for i=1,#keys do + local k=keys[i] + local v=list[k] + if p then + p=P(k)/v+p + else + p=P(k)/v + end + end + end + return p +end +local function make(t,hash) + local p=P(false) + local keys=sortedkeys(t) + for i=1,#keys do + local k=keys[i] + local v=t[k] + local h=hash[v] + if h then + if next(v) then + p=p+P(k)*(make(v,hash)+P(true)) + else + p=p+P(k)*P(true) + end + else + if next(v) then + p=p+P(k)*make(v,hash) + else + p=p+P(k) + end + end + end + return p +end +function lpeg.utfchartabletopattern(list) + local tree={} + local hash={} + local n=#list + if n==0 then + for s in next,list do + local t=tree + for c in gmatch(s,".") do + local tc=t[c] + if not tc then + tc={} + t[c]=tc + end + t=tc + end + hash[t]=s + end + else + for i=1,n do + local t=tree + local s=list[i] + for c in gmatch(s,".") do + local tc=t[c] + if not tc then + tc={} + t[c]=tc + end + t=tc + end + hash[t]=s + end + end + return make(tree,hash) +end +patterns.containseol=lpeg.finder(eol) +local function nextstep(n,step,result) + local m=n%step + local d=floor(n/step) + if d>0 then + local v=V(tostring(step)) + local s=result.start + for i=1,d do + if s then + s=v*s + else + s=v + end + end + result.start=s + end + if step>1 and result.start then + local v=V(tostring(step/2)) + result[tostring(step)]=v*v + end + if step>0 then + return nextstep(m,step/2,result) + else + return result + end +end +function lpeg.times(pattern,n) + return P(nextstep(n,2^16,{ "start",["1"]=pattern })) +end +local trailingzeros=zero^0*-digit +local case_1=period*trailingzeros/"" +local case_2=period*(digit-trailingzeros)^1*(trailingzeros/"") +local number=digit^1*(case_1+case_2) +local stripper=Cs((number+1)^0) +lpeg.patterns.stripzeros=stripper +local byte_to_HEX={} +local byte_to_hex={} +local byte_to_dec={} +local hex_to_byte={} +for i=0,255 do + local H=format("%02X",i) + local h=format("%02x",i) + local d=format("%03i",i) + local c=char(i) + byte_to_HEX[c]=H + byte_to_hex[c]=h + byte_to_dec[c]=d + hex_to_byte[h]=c + hex_to_byte[H]=c +end +local hextobyte=P(2)/hex_to_byte +local bytetoHEX=P(1)/byte_to_HEX +local bytetohex=P(1)/byte_to_hex +local bytetodec=P(1)/byte_to_dec +local hextobytes=Cs(hextobyte^0) +local bytestoHEX=Cs(bytetoHEX^0) +local bytestohex=Cs(bytetohex^0) +local bytestodec=Cs(bytetodec^0) +patterns.hextobyte=hextobyte +patterns.bytetoHEX=bytetoHEX +patterns.bytetohex=bytetohex +patterns.bytetodec=bytetodec +patterns.hextobytes=hextobytes +patterns.bytestoHEX=bytestoHEX +patterns.bytestohex=bytestohex +patterns.bytestodec=bytestodec +function string.toHEX(s) + if not s or s=="" then + return s + else + return lpegmatch(bytestoHEX,s) + end +end +function string.tohex(s) + if not s or s=="" then + return s + else + return lpegmatch(bytestohex,s) + end +end +function string.todec(s) + if not s or s=="" then + return s + else + return lpegmatch(bytestodec,s) + end +end +function string.tobytes(s) + if not s or s=="" then + return s + else + return lpegmatch(hextobytes,s) + end +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['l-functions']={ + version=1.001, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +functions=functions or {} +function functions.dummy() end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['l-string']={ + version=1.001, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local string=string +local sub,gmatch,format,char,byte,rep,lower=string.sub,string.gmatch,string.format,string.char,string.byte,string.rep,string.lower +local lpegmatch,patterns=lpeg.match,lpeg.patterns +local P,S,C,Ct,Cc,Cs=lpeg.P,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cc,lpeg.Cs +local unquoted=patterns.squote*C(patterns.nosquote)*patterns.squote+patterns.dquote*C(patterns.nodquote)*patterns.dquote +function string.unquoted(str) + return lpegmatch(unquoted,str) or str +end +function string.quoted(str) + return format("%q",str) +end +function string.count(str,pattern) + local n=0 + for _ in gmatch(str,pattern) do + n=n+1 + end + return n +end +function string.limit(str,n,sentinel) + if #str>n then + sentinel=sentinel or "..." + return sub(str,1,(n-#sentinel))..sentinel + else + return str + end +end +local stripper=patterns.stripper +local fullstripper=patterns.fullstripper +local collapser=patterns.collapser +local longtostring=patterns.longtostring +function string.strip(str) + return lpegmatch(stripper,str) or "" +end +function string.fullstrip(str) + return lpegmatch(fullstripper,str) or "" +end +function string.collapsespaces(str) + return lpegmatch(collapser,str) or "" +end +function string.longtostring(str) + return lpegmatch(longtostring,str) or "" +end +local pattern=P(" ")^0*P(-1) +function string.is_empty(str) + if str=="" then + return true + else + return lpegmatch(pattern,str) and true or false + end +end +local anything=patterns.anything +local allescapes=Cc("%")*S(".-+%?()[]*") +local someescapes=Cc("%")*S(".-+%()[]") +local matchescapes=Cc(".")*S("*?") +local pattern_a=Cs ((allescapes+anything )^0 ) +local pattern_b=Cs ((someescapes+matchescapes+anything )^0 ) +local pattern_c=Cs (Cc("^")*(someescapes+matchescapes+anything )^0*Cc("$") ) +function string.escapedpattern(str,simple) + return lpegmatch(simple and pattern_b or pattern_a,str) +end +function string.topattern(str,lowercase,strict) + if str=="" or type(str)~="string" then + return ".*" + elseif strict then + str=lpegmatch(pattern_c,str) + else + str=lpegmatch(pattern_b,str) + end + if lowercase then + return lower(str) + else + return str + end +end +function string.valid(str,default) + return (type(str)=="string" and str~="" and str) or default or nil +end +string.itself=function(s) return s end +local pattern=Ct(C(1)^0) +function string.totable(str) + return lpegmatch(pattern,str) +end +local replacer=lpeg.replacer("@","%%") +function string.tformat(fmt,...) + return format(lpegmatch(replacer,fmt),...) +end +string.quote=string.quoted +string.unquote=string.unquoted + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['l-table']={ + version=1.001, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local type,next,tostring,tonumber,ipairs,select=type,next,tostring,tonumber,ipairs,select +local table,string=table,string +local concat,sort,insert,remove=table.concat,table.sort,table.insert,table.remove +local format,lower,dump=string.format,string.lower,string.dump +local getmetatable,setmetatable=getmetatable,setmetatable +local getinfo=debug.getinfo +local lpegmatch,patterns=lpeg.match,lpeg.patterns +local floor=math.floor +local stripper=patterns.stripper +function table.strip(tab) + local lst,l={},0 + for i=1,#tab do + local s=lpegmatch(stripper,tab[i]) or "" + if s=="" then + else + l=l+1 + lst[l]=s + end + end + return lst +end +function table.keys(t) + if t then + local keys,k={},0 + for key,_ in next,t do + k=k+1 + keys[k]=key + end + return keys + else + return {} + end +end +local function compare(a,b) + local ta,tb=type(a),type(b) + if ta==tb then + return a0 then + local n=0 + for _,v in next,t do + n=n+1 + end + if n==#t then + local tt,nt={},0 + for i=1,#t do + local v=t[i] + local tv=type(v) + if tv=="number" then + nt=nt+1 + if hexify then + tt[nt]=format("0x%X",v) + else + tt[nt]=tostring(v) + end + elseif tv=="string" then + nt=nt+1 + tt[nt]=format("%q",v) + elseif tv=="boolean" then + nt=nt+1 + tt[nt]=v and "true" or "false" + else + tt=nil + break + end + end + return tt + end + end + return nil +end +local propername=patterns.propername +local function dummy() end +local function do_serialize(root,name,depth,level,indexed) + if level>0 then + depth=depth.." " + if indexed then + handle(format("%s{",depth)) + else + local tn=type(name) + if tn=="number" then + if hexify then + handle(format("%s[0x%X]={",depth,name)) + else + handle(format("%s[%s]={",depth,name)) + end + elseif tn=="string" then + if noquotes and not reserved[name] and lpegmatch(propername,name) then + handle(format("%s%s={",depth,name)) + else + handle(format("%s[%q]={",depth,name)) + end + elseif tn=="boolean" then + handle(format("%s[%s]={",depth,name and "true" or "false")) + else + handle(format("%s{",depth)) + end + end + end + if root and next(root) then + local first,last=nil,0 + if compact then + last=#root + for k=1,last do + if root[k]==nil then + last=k-1 + break + end + end + if last>0 then + first=1 + end + end + local sk=sortedkeys(root) + for i=1,#sk do + local k=sk[i] + local v=root[k] + local tv,tk=type(v),type(k) + if compact and first and tk=="number" and k>=first and k<=last then + if tv=="number" then + if hexify then + handle(format("%s 0x%X,",depth,v)) + else + handle(format("%s %s,",depth,v)) + end + elseif tv=="string" then + if reduce and tonumber(v) then + handle(format("%s %s,",depth,v)) + else + handle(format("%s %q,",depth,v)) + end + elseif tv=="table" then + if not next(v) then + handle(format("%s {},",depth)) + elseif inline then + local st=simple_table(v) + if st then + handle(format("%s { %s },",depth,concat(st,", "))) + else + do_serialize(v,k,depth,level+1,true) + end + else + do_serialize(v,k,depth,level+1,true) + end + elseif tv=="boolean" then + handle(format("%s %s,",depth,v and "true" or "false")) + elseif tv=="function" then + if functions then + handle(format('%s load(%q),',depth,dump(v))) + else + handle(format('%s "function",',depth)) + end + else + handle(format("%s %q,",depth,tostring(v))) + end + elseif k=="__p__" then + if false then + handle(format("%s __p__=nil,",depth)) + end + elseif tv=="number" then + if tk=="number" then + if hexify then + handle(format("%s [0x%X]=0x%X,",depth,k,v)) + else + handle(format("%s [%s]=%s,",depth,k,v)) + end + elseif tk=="boolean" then + if hexify then + handle(format("%s [%s]=0x%X,",depth,k and "true" or "false",v)) + else + handle(format("%s [%s]=%s,",depth,k and "true" or "false",v)) + end + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + if hexify then + handle(format("%s %s=0x%X,",depth,k,v)) + else + handle(format("%s %s=%s,",depth,k,v)) + end + else + if hexify then + handle(format("%s [%q]=0x%X,",depth,k,v)) + else + handle(format("%s [%q]=%s,",depth,k,v)) + end + end + elseif tv=="string" then + if reduce and tonumber(v) then + if tk=="number" then + if hexify then + handle(format("%s [0x%X]=%s,",depth,k,v)) + else + handle(format("%s [%s]=%s,",depth,k,v)) + end + elseif tk=="boolean" then + handle(format("%s [%s]=%s,",depth,k and "true" or "false",v)) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s=%s,",depth,k,v)) + else + handle(format("%s [%q]=%s,",depth,k,v)) + end + else + if tk=="number" then + if hexify then + handle(format("%s [0x%X]=%q,",depth,k,v)) + else + handle(format("%s [%s]=%q,",depth,k,v)) + end + elseif tk=="boolean" then + handle(format("%s [%s]=%q,",depth,k and "true" or "false",v)) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s=%q,",depth,k,v)) + else + handle(format("%s [%q]=%q,",depth,k,v)) + end + end + elseif tv=="table" then + if not next(v) then + if tk=="number" then + if hexify then + handle(format("%s [0x%X]={},",depth,k)) + else + handle(format("%s [%s]={},",depth,k)) + end + elseif tk=="boolean" then + handle(format("%s [%s]={},",depth,k and "true" or "false")) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s={},",depth,k)) + else + handle(format("%s [%q]={},",depth,k)) + end + elseif inline then + local st=simple_table(v) + if st then + if tk=="number" then + if hexify then + handle(format("%s [0x%X]={ %s },",depth,k,concat(st,", "))) + else + handle(format("%s [%s]={ %s },",depth,k,concat(st,", "))) + end + elseif tk=="boolean" then + handle(format("%s [%s]={ %s },",depth,k and "true" or "false",concat(st,", "))) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s={ %s },",depth,k,concat(st,", "))) + else + handle(format("%s [%q]={ %s },",depth,k,concat(st,", "))) + end + else + do_serialize(v,k,depth,level+1) + end + else + do_serialize(v,k,depth,level+1) + end + elseif tv=="boolean" then + if tk=="number" then + if hexify then + handle(format("%s [0x%X]=%s,",depth,k,v and "true" or "false")) + else + handle(format("%s [%s]=%s,",depth,k,v and "true" or "false")) + end + elseif tk=="boolean" then + handle(format("%s [%s]=%s,",depth,tostring(k),v and "true" or "false")) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s=%s,",depth,k,v and "true" or "false")) + else + handle(format("%s [%q]=%s,",depth,k,v and "true" or "false")) + end + elseif tv=="function" then + if functions then + local f=getinfo(v).what=="C" and dump(dummy) or dump(v) + if tk=="number" then + if hexify then + handle(format("%s [0x%X]=load(%q),",depth,k,f)) + else + handle(format("%s [%s]=load(%q),",depth,k,f)) + end + elseif tk=="boolean" then + handle(format("%s [%s]=load(%q),",depth,k and "true" or "false",f)) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s=load(%q),",depth,k,f)) + else + handle(format("%s [%q]=load(%q),",depth,k,f)) + end + end + else + if tk=="number" then + if hexify then + handle(format("%s [0x%X]=%q,",depth,k,tostring(v))) + else + handle(format("%s [%s]=%q,",depth,k,tostring(v))) + end + elseif tk=="boolean" then + handle(format("%s [%s]=%q,",depth,k and "true" or "false",tostring(v))) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s=%q,",depth,k,tostring(v))) + else + handle(format("%s [%q]=%q,",depth,k,tostring(v))) + end + end + end + end + if level>0 then + handle(format("%s},",depth)) + end +end +local function serialize(_handle,root,name,specification) + local tname=type(name) + if type(specification)=="table" then + noquotes=specification.noquotes + hexify=specification.hexify + handle=_handle or specification.handle or print + reduce=specification.reduce or false + functions=specification.functions + compact=specification.compact + inline=specification.inline and compact + if functions==nil then + functions=true + end + if compact==nil then + compact=true + end + if inline==nil then + inline=compact + end + else + noquotes=false + hexify=false + handle=_handle or print + reduce=false + compact=true + inline=true + functions=true + end + if tname=="string" then + if name=="return" then + handle("return {") + else + handle(name.."={") + end + elseif tname=="number" then + if hexify then + handle(format("[0x%X]={",name)) + else + handle("["..name.."]={") + end + elseif tname=="boolean" then + if name then + handle("return {") + else + handle("{") + end + else + handle("t={") + end + if root then + if getmetatable(root) then + local dummy=root._w_h_a_t_e_v_e_r_ + root._w_h_a_t_e_v_e_r_=nil + end + if next(root) then + do_serialize(root,name,"",0) + end + end + handle("}") +end +function table.serialize(root,name,specification) + local t,n={},0 + local function flush(s) + n=n+1 + t[n]=s + end + serialize(flush,root,name,specification) + return concat(t,"\n") +end +table.tohandle=serialize +local maxtab=2*1024 +function table.tofile(filename,root,name,specification) + local f=io.open(filename,'w') + if f then + if maxtab>1 then + local t,n={},0 + local function flush(s) + n=n+1 + t[n]=s + if n>maxtab then + f:write(concat(t,"\n"),"\n") + t,n={},0 + end + end + serialize(flush,root,name,specification) + f:write(concat(t,"\n"),"\n") + else + local function flush(s) + f:write(s,"\n") + end + serialize(flush,root,name,specification) + end + f:close() + io.flush() + end +end +local function flattened(t,f,depth) + if f==nil then + f={} + depth=0xFFFF + elseif tonumber(f) then + depth=f + f={} + elseif not depth then + depth=0xFFFF + end + for k,v in next,t do + if type(k)~="number" then + if depth>0 and type(v)=="table" then + flattened(v,f,depth-1) + else + f[#f+1]=v + end + end + end + for k=1,#t do + local v=t[k] + if depth>0 and type(v)=="table" then + flattened(v,f,depth-1) + else + f[#f+1]=v + end + end + return f +end +table.flattened=flattened +local function unnest(t,f) + if not f then + f={} + end + for i=1,#t do + local v=t[i] + if type(v)=="table" then + if type(v[1])=="table" then + unnest(v,f) + else + f[#f+1]=v + end + else + f[#f+1]=v + end + end + return f +end +function table.unnest(t) + return unnest(t) +end +local function are_equal(a,b,n,m) + if a and b and #a==#b then + n=n or 1 + m=m or #a + for i=n,m do + local ai,bi=a[i],b[i] + if ai==bi then + elseif type(ai)=="table" and type(bi)=="table" then + if not are_equal(ai,bi) then + return false + end + else + return false + end + end + return true + else + return false + end +end +local function identical(a,b) + for ka,va in next,a do + local vb=b[ka] + if va==vb then + elseif type(va)=="table" and type(vb)=="table" then + if not identical(va,vb) then + return false + end + else + return false + end + end + return true +end +table.identical=identical +table.are_equal=are_equal +local function sparse(old,nest,keeptables) + local new={} + for k,v in next,old do + if not (v=="" or v==false) then + if nest and type(v)=="table" then + v=sparse(v,nest) + if keeptables or next(v) then + new[k]=v + end + else + new[k]=v + end + end + end + return new +end +table.sparse=sparse +function table.compact(t) + return sparse(t,true,true) +end +function table.contains(t,v) + if t then + for i=1,#t do + if t[i]==v then + return i + end + end + end + return false +end +function table.count(t) + local n=0 + for k,v in next,t do + n=n+1 + end + return n +end +function table.swapped(t,s) + local n={} + if s then + for k,v in next,s do + n[k]=v + end + end + for k,v in next,t do + n[v]=k + end + return n +end +function table.mirrored(t) + local n={} + for k,v in next,t do + n[v]=k + n[k]=v + end + return n +end +function table.reversed(t) + if t then + local tt,tn={},#t + if tn>0 then + local ttn=0 + for i=tn,1,-1 do + ttn=ttn+1 + tt[ttn]=t[i] + end + end + return tt + end +end +function table.reverse(t) + if t then + local n=#t + for i=1,floor(n/2) do + local j=n-i+1 + t[i],t[j]=t[j],t[i] + end + return t + end +end +function table.sequenced(t,sep,simple) + if not t then + return "" + end + local n=#t + local s={} + if n>0 then + for i=1,n do + s[i]=tostring(t[i]) + end + else + n=0 + for k,v in sortedhash(t) do + if simple then + if v==true then + n=n+1 + s[n]=k + elseif v and v~="" then + n=n+1 + s[n]=k.."="..tostring(v) + end + else + n=n+1 + s[n]=k.."="..tostring(v) + end + end + end + return concat(s,sep or " | ") +end +function table.print(t,...) + if type(t)~="table" then + print(tostring(t)) + else + serialize(print,t,...) + end +end +if setinspector then + setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end) +end +function table.sub(t,i,j) + return { unpack(t,i,j) } +end +function table.is_empty(t) + return not t or not next(t) +end +function table.has_one_entry(t) + return t and not next(t,next(t)) +end +function table.loweredkeys(t) + local l={} + for k,v in next,t do + l[lower(k)]=v + end + return l +end +function table.unique(old) + local hash={} + local new={} + local n=0 + for i=1,#old do + local oi=old[i] + if not hash[oi] then + n=n+1 + new[n]=oi + hash[oi]=true + end + end + return new +end +function table.sorted(t,...) + sort(t,...) + return t +end +function table.values(t,s) + if t then + local values,keys,v={},{},0 + for key,value in next,t do + if not keys[value] then + v=v+1 + values[v]=value + keys[k]=key + end + end + if s then + sort(values) + end + return values + else + return {} + end +end +function table.filtered(t,pattern,sort,cmp) + if t and type(pattern)=="string" then + if sort then + local s + if cmp then + s=sortedhashkeys(t,function(a,b) return cmp(t,a,b) end) + else + s=sortedkeys(t) + end + local n=0 + local m=#s + local function kv(s) + while n16*1024*1024 then + step=16*1024*1024 + else + step=floor(size/(1024*1024))*1024*1024/8 + end + local data={} + while true do + local r=f:read(step) + if not r then + return concat(data) + else + data[#data+1]=r + end + end + end +end +io.readall=readall +function io.loaddata(filename,textmode) + local f=io.open(filename,(textmode and 'r') or 'rb') + if f then + local data=readall(f) + f:close() + if #data>0 then + return data + end + end +end +function io.savedata(filename,data,joiner) + local f=io.open(filename,"wb") + if f then + if type(data)=="table" then + f:write(concat(data,joiner or "")) + elseif type(data)=="function" then + data(f) + else + f:write(data or "") + end + f:close() + io.flush() + return true + else + return false + end +end +function io.loadlines(filename,n) + local f=io.open(filename,'r') + if not f then + elseif n then + local lines={} + for i=1,n do + local line=f:read("*lines") + if line then + lines[#lines+1]=line + else + break + end + end + f:close() + lines=concat(lines,"\n") + if #lines>0 then + return lines + end + else + local line=f:read("*line") or "" + f:close() + if #line>0 then + return line + end + end +end +function io.loadchunk(filename,n) + local f=io.open(filename,'rb') + if f then + local data=f:read(n or 1024) + f:close() + if #data>0 then + return data + end + end +end +function io.exists(filename) + local f=io.open(filename) + if f==nil then + return false + else + f:close() + return true + end +end +function io.size(filename) + local f=io.open(filename) + if f==nil then + return 0 + else + local s=f:seek("end") + f:close() + return s + end +end +function io.noflines(f) + if type(f)=="string" then + local f=io.open(filename) + if f then + local n=f and io.noflines(f) or 0 + f:close() + return n + else + return 0 + end + else + local n=0 + for _ in f:lines() do + n=n+1 + end + f:seek('set',0) + return n + end +end +local nextchar={ + [ 4]=function(f) + return f:read(1,1,1,1) + end, + [ 2]=function(f) + return f:read(1,1) + end, + [ 1]=function(f) + return f:read(1) + end, + [-2]=function(f) + local a,b=f:read(1,1) + return b,a + end, + [-4]=function(f) + local a,b,c,d=f:read(1,1,1,1) + return d,c,b,a + end +} +function io.characters(f,n) + if f then + return nextchar[n or 1],f + end +end +local nextbyte={ + [4]=function(f) + local a,b,c,d=f:read(1,1,1,1) + if d then + return byte(a),byte(b),byte(c),byte(d) + end + end, + [3]=function(f) + local a,b,c=f:read(1,1,1) + if b then + return byte(a),byte(b),byte(c) + end + end, + [2]=function(f) + local a,b=f:read(1,1) + if b then + return byte(a),byte(b) + end + end, + [1]=function (f) + local a=f:read(1) + if a then + return byte(a) + end + end, + [-2]=function (f) + local a,b=f:read(1,1) + if b then + return byte(b),byte(a) + end + end, + [-3]=function(f) + local a,b,c=f:read(1,1,1) + if b then + return byte(c),byte(b),byte(a) + end + end, + [-4]=function(f) + local a,b,c,d=f:read(1,1,1,1) + if d then + return byte(d),byte(c),byte(b),byte(a) + end + end +} +function io.bytes(f,n) + if f then + return nextbyte[n or 1],f + else + return nil,nil + end +end +function io.ask(question,default,options) + while true do + io.write(question) + if options then + io.write(format(" [%s]",concat(options,"|"))) + end + if default then + io.write(format(" [%s]",default)) + end + io.write(format(" ")) + io.flush() + local answer=io.read() + answer=gsub(answer,"^%s*(.*)%s*$","%1") + if answer=="" and default then + return default + elseif not options then + return answer + else + for k=1,#options do + if options[k]==answer then + return answer + end + end + local pattern="^"..answer + for k=1,#options do + local v=options[k] + if find(v,pattern) then + return v + end + end + end + end +end +local function readnumber(f,n,m) + if m then + f:seek("set",n) + n=m + end + if n==1 then + return byte(f:read(1)) + elseif n==2 then + local a,b=byte(f:read(2),1,2) + return 256*a+b + elseif n==3 then + local a,b,c=byte(f:read(3),1,3) + return 256*256*a+256*b+c + elseif n==4 then + local a,b,c,d=byte(f:read(4),1,4) + return 256*256*256*a+256*256*b+256*c+d + elseif n==8 then + local a,b=readnumber(f,4),readnumber(f,4) + return 256*a+b + elseif n==12 then + local a,b,c=readnumber(f,4),readnumber(f,4),readnumber(f,4) + return 256*256*a+256*b+c + elseif n==-2 then + local b,a=byte(f:read(2),1,2) + return 256*a+b + elseif n==-3 then + local c,b,a=byte(f:read(3),1,3) + return 256*256*a+256*b+c + elseif n==-4 then + local d,c,b,a=byte(f:read(4),1,4) + return 256*256*256*a+256*256*b+256*c+d + elseif n==-8 then + local h,g,f,e,d,c,b,a=byte(f:read(8),1,8) + return 256*256*256*256*256*256*256*a+256*256*256*256*256*256*b+256*256*256*256*256*c+256*256*256*256*d+256*256*256*e+256*256*f+256*g+h + else + return 0 + end +end +io.readnumber=readnumber +function io.readstring(f,n,m) + if m then + f:seek("set",n) + n=m + end + local str=gsub(f:read(n),"\000","") + return str +end +if not io.i_limiter then function io.i_limiter() end end +if not io.o_limiter then function io.o_limiter() end end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['l-file']={ + version=1.001, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +file=file or {} +local file=file +if not lfs then + lfs=optionalrequire("lfs") +end +if not lfs then + lfs={ + getcurrentdir=function() + return "." + end, + attributes=function() + return nil + end, + isfile=function(name) + local f=io.open(name,'rb') + if f then + f:close() + return true + end + end, + isdir=function(name) + print("you need to load lfs") + return false + end + } +elseif not lfs.isfile then + local attributes=lfs.attributes + function lfs.isdir(name) + return attributes(name,"mode")=="directory" + end + function lfs.isfile(name) + return attributes(name,"mode")=="file" + end +end +local insert,concat=table.insert,table.concat +local match,find,gmatch=string.match,string.find,string.gmatch +local lpegmatch=lpeg.match +local getcurrentdir,attributes=lfs.currentdir,lfs.attributes +local checkedsplit=string.checkedsplit +local P,R,S,C,Cs,Cp,Cc,Ct=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Cs,lpeg.Cp,lpeg.Cc,lpeg.Ct +local colon=P(":") +local period=P(".") +local periods=P("..") +local fwslash=P("/") +local bwslash=P("\\") +local slashes=S("\\/") +local noperiod=1-period +local noslashes=1-slashes +local name=noperiod^1 +local suffix=period/""*(1-period-slashes)^1*-1 +local pattern=C((1-(slashes^1*noslashes^1*-1))^1)*P(1) +local function pathpart(name,default) + return name and lpegmatch(pattern,name) or default or "" +end +local pattern=(noslashes^0*slashes)^1*C(noslashes^1)*-1 +local function basename(name) + return name and lpegmatch(pattern,name) or name +end +local pattern=(noslashes^0*slashes^1)^0*Cs((1-suffix)^1)*suffix^0 +local function nameonly(name) + return name and lpegmatch(pattern,name) or name +end +local pattern=(noslashes^0*slashes)^0*(noperiod^1*period)^1*C(noperiod^1)*-1 +local function suffixonly(name) + return name and lpegmatch(pattern,name) or "" +end +local pattern=(noslashes^0*slashes)^0*noperiod^1*((period*C(noperiod^1))^1)*-1+Cc("") +local function suffixesonly(name) + if name then + return lpegmatch(pattern,name) + else + return "" + end +end +file.pathpart=pathpart +file.basename=basename +file.nameonly=nameonly +file.suffixonly=suffixonly +file.suffix=suffixonly +file.suffixesonly=suffixesonly +file.suffixes=suffixesonly +file.dirname=pathpart +file.extname=suffixonly +local drive=C(R("az","AZ"))*colon +local path=C((noslashes^0*slashes)^0) +local suffix=period*C(P(1-period)^0*P(-1)) +local base=C((1-suffix)^0) +local rest=C(P(1)^0) +drive=drive+Cc("") +path=path+Cc("") +base=base+Cc("") +suffix=suffix+Cc("") +local pattern_a=drive*path*base*suffix +local pattern_b=path*base*suffix +local pattern_c=C(drive*path)*C(base*suffix) +local pattern_d=path*rest +function file.splitname(str,splitdrive) + if not str then + elseif splitdrive then + return lpegmatch(pattern_a,str) + else + return lpegmatch(pattern_b,str) + end +end +function file.splitbase(str) + if str then + return lpegmatch(pattern_d,str) + else + return "",str + end +end +function file.nametotable(str,splitdrive) + if str then + local path,drive,subpath,name,base,suffix=lpegmatch(pattern_c,str) + if splitdrive then + return { + path=path, + drive=drive, + subpath=subpath, + name=name, + base=base, + suffix=suffix, + } + else + return { + path=path, + name=name, + base=base, + suffix=suffix, + } + end + end +end +local pattern=Cs(((period*(1-period-slashes)^1*-1)/""+1)^1) +function file.removesuffix(name) + return name and lpegmatch(pattern,name) +end +local suffix=period/""*(1-period-slashes)^1*-1 +local pattern=Cs((noslashes^0*slashes^1)^0*((1-suffix)^1))*Cs(suffix) +function file.addsuffix(filename,suffix,criterium) + if not filename or not suffix or suffix=="" then + return filename + elseif criterium==true then + return filename.."."..suffix + elseif not criterium then + local n,s=lpegmatch(pattern,filename) + if not s or s=="" then + return filename.."."..suffix + else + return filename + end + else + local n,s=lpegmatch(pattern,filename) + if s and s~="" then + local t=type(criterium) + if t=="table" then + for i=1,#criterium do + if s==criterium[i] then + return filename + end + end + elseif t=="string" then + if s==criterium then + return filename + end + end + end + return (n or filename).."."..suffix + end +end +local suffix=period*(1-period-slashes)^1*-1 +local pattern=Cs((1-suffix)^0) +function file.replacesuffix(name,suffix) + if name and suffix and suffix~="" then + return lpegmatch(pattern,name).."."..suffix + else + return name + end +end +local reslasher=lpeg.replacer(P("\\"),"/") +function file.reslash(str) + return str and lpegmatch(reslasher,str) +end +function file.is_writable(name) + if not name then + elseif lfs.isdir(name) then + name=name.."/m_t_x_t_e_s_t.tmp" + local f=io.open(name,"wb") + if f then + f:close() + os.remove(name) + return true + end + elseif lfs.isfile(name) then + local f=io.open(name,"ab") + if f then + f:close() + return true + end + else + local f=io.open(name,"ab") + if f then + f:close() + os.remove(name) + return true + end + end + return false +end +local readable=P("r")*Cc(true) +function file.is_readable(name) + if name then + local a=attributes(name) + return a and lpegmatch(readable,a.permissions) or false + else + return false + end +end +file.isreadable=file.is_readable +file.iswritable=file.is_writable +function file.size(name) + if name then + local a=attributes(name) + return a and a.size or 0 + else + return 0 + end +end +function file.splitpath(str,separator) + return str and checkedsplit(lpegmatch(reslasher,str),separator or io.pathseparator) +end +function file.joinpath(tab,separator) + return tab and concat(tab,separator or io.pathseparator) +end +local someslash=S("\\/") +local stripper=Cs(P(fwslash)^0/""*reslasher) +local isnetwork=someslash*someslash*(1-someslash)+(1-fwslash-colon)^1*colon +local isroot=fwslash^1*-1 +local hasroot=fwslash^1 +local reslasher=lpeg.replacer(S("\\/"),"/") +local deslasher=lpeg.replacer(S("\\/")^1,"/") +function file.join(one,two,three,...) + if not two then + return one=="" and one or lpegmatch(stripper,one) + end + if one=="" then + return lpegmatch(stripper,three and concat({ two,three,... },"/") or two) + end + if lpegmatch(isnetwork,one) then + local one=lpegmatch(reslasher,one) + local two=lpegmatch(deslasher,three and concat({ two,three,... },"/") or two) + if lpegmatch(hasroot,two) then + return one..two + else + return one.."/"..two + end + elseif lpegmatch(isroot,one) then + local two=lpegmatch(deslasher,three and concat({ two,three,... },"/") or two) + if lpegmatch(hasroot,two) then + return two + else + return "/"..two + end + else + return lpegmatch(deslasher,concat({ one,two,three,... },"/")) + end +end +local drivespec=R("az","AZ")^1*colon +local anchors=fwslash+drivespec +local untouched=periods+(1-period)^1*P(-1) +local mswindrive=Cs(drivespec*(bwslash/"/"+fwslash)^0) +local mswinuncpath=(bwslash+fwslash)*(bwslash+fwslash)*Cc("//") +local splitstarter=(mswindrive+mswinuncpath+Cc(false))*Ct(lpeg.splitat(S("/\\")^1)) +local absolute=fwslash +function file.collapsepath(str,anchor) + if not str then + return + end + if anchor==true and not lpegmatch(anchors,str) then + str=getcurrentdir().."/"..str + end + if str=="" or str=="." then + return "." + elseif lpegmatch(untouched,str) then + return lpegmatch(reslasher,str) + end + local starter,oldelements=lpegmatch(splitstarter,str) + local newelements={} + local i=#oldelements + while i>0 do + local element=oldelements[i] + if element=='.' then + elseif element=='..' then + local n=i-1 + while n>0 do + local element=oldelements[n] + if element~='..' and element~='.' then + oldelements[n]='.' + break + else + n=n-1 + end + end + if n<1 then + insert(newelements,1,'..') + end + elseif element~="" then + insert(newelements,1,element) + end + i=i-1 + end + if #newelements==0 then + return starter or "." + elseif starter then + return starter..concat(newelements,'/') + elseif lpegmatch(absolute,str) then + return "/"..concat(newelements,'/') + else + newelements=concat(newelements,'/') + if anchor=="." and find(str,"^%./") then + return "./"..newelements + else + return newelements + end + end +end +local tricky=S("/\\")*P(-1) +local attributes=lfs.attributes +function lfs.isdir(name) + if lpegmatch(tricky,name) then + return attributes(name,"mode")=="directory" + else + return attributes(name.."/.","mode")=="directory" + end +end +function lfs.isfile(name) + return attributes(name,"mode")=="file" +end +local validchars=R("az","09","AZ","--","..") +local pattern_a=lpeg.replacer(1-validchars) +local pattern_a=Cs((validchars+P(1)/"-")^1) +local whatever=P("-")^0/"" +local pattern_b=Cs(whatever*(1-whatever*-1)^1) +function file.robustname(str,strict) + if str then + str=lpegmatch(pattern_a,str) or str + if strict then + return lpegmatch(pattern_b,str) or str + else + return str + end + end +end +file.readdata=io.loaddata +file.savedata=io.savedata +function file.copy(oldname,newname) + if oldname and newname then + local data=io.loaddata(oldname) + if data and data~="" then + file.savedata(newname,data) + end + end +end +local letter=R("az","AZ")+S("_-+") +local separator=P("://") +local qualified=period^0*fwslash+letter*colon+letter^1*separator+letter^1*fwslash +local rootbased=fwslash+letter*colon +lpeg.patterns.qualified=qualified +lpeg.patterns.rootbased=rootbased +function file.is_qualified_path(filename) + return filename and lpegmatch(qualified,filename)~=nil +end +function file.is_rootbased_path(filename) + return filename and lpegmatch(rootbased,filename)~=nil +end +function file.strip(name,dir) + if name then + local b,a=match(name,"^(.-)"..dir.."(.*)$") + return a~="" and a or name + end +end +function lfs.mkdirs(path) + local full="" + for sub in gmatch(path,"(/*[^\\/]+)") do + full=full..sub + lfs.mkdir(full) + end +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['l-boolean']={ + version=1.001, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local type,tonumber=type,tonumber +boolean=boolean or {} +local boolean=boolean +function boolean.tonumber(b) + if b then return 1 else return 0 end +end +function toboolean(str,tolerant) + if str==nil then + return false + elseif str==false then + return false + elseif str==true then + return true + elseif str=="true" then + return true + elseif str=="false" then + return false + elseif not tolerant then + return false + elseif str==0 then + return false + elseif (tonumber(str) or 0)>0 then + return true + else + return str=="yes" or str=="on" or str=="t" + end +end +string.toboolean=toboolean +function string.booleanstring(str) + if str=="0" then + return false + elseif str=="1" then + return true + elseif str=="" then + return false + elseif str=="false" then + return false + elseif str=="true" then + return true + elseif (tonumber(str) or 0)>0 then + return true + else + return str=="yes" or str=="on" or str=="t" + end +end +function string.is_boolean(str,default,strict) + if type(str)=="string" then + if str=="true" or str=="yes" or str=="on" or str=="t" or (not strict and str=="1") then + return true + elseif str=="false" or str=="no" or str=="off" or str=="f" or (not strict and str=="0") then + return false + end + end + return default +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['l-math']={ + version=1.001, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local floor,sin,cos,tan=math.floor,math.sin,math.cos,math.tan +if not math.round then + function math.round(x) return floor(x+0.5) end +end +if not math.div then + function math.div(n,m) return floor(n/m) end +end +if not math.mod then + function math.mod(n,m) return n%m end +end +local pipi=2*math.pi/360 +if not math.sind then + function math.sind(d) return sin(d*pipi) end + function math.cosd(d) return cos(d*pipi) end + function math.tand(d) return tan(d*pipi) end +end +if not math.odd then + function math.odd (n) return n%2~=0 end + function math.even(n) return n%2==0 end +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['util-str']={ + version=1.001, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +utilities=utilities or {} +utilities.strings=utilities.strings or {} +local strings=utilities.strings +local format,gsub,rep,sub=string.format,string.gsub,string.rep,string.sub +local load,dump=load,string.dump +local tonumber,type,tostring=tonumber,type,tostring +local unpack,concat=table.unpack,table.concat +local P,V,C,S,R,Ct,Cs,Cp,Carg,Cc=lpeg.P,lpeg.V,lpeg.C,lpeg.S,lpeg.R,lpeg.Ct,lpeg.Cs,lpeg.Cp,lpeg.Carg,lpeg.Cc +local patterns,lpegmatch=lpeg.patterns,lpeg.match +local utfchar,utfbyte=utf.char,utf.byte +local loadstripped=nil +if _LUAVERSION<5.2 then + loadstripped=function(str,shortcuts) + return load(str) + end +else + loadstripped=function(str,shortcuts) + if shortcuts then + return load(dump(load(str),true),nil,nil,shortcuts) + else + return load(dump(load(str),true)) + end + end +end +if not number then number={} end +local stripper=patterns.stripzeros +local function points(n) + n=tonumber(n) + return (not n or n==0) and "0pt" or lpegmatch(stripper,format("%.5fpt",n/65536)) +end +local function basepoints(n) + n=tonumber(n) + return (not n or n==0) and "0bp" or lpegmatch(stripper,format("%.5fbp",n*(7200/7227)/65536)) +end +number.points=points +number.basepoints=basepoints +local rubish=patterns.spaceortab^0*patterns.newline +local anyrubish=patterns.spaceortab+patterns.newline +local anything=patterns.anything +local stripped=(patterns.spaceortab^1/"")*patterns.newline +local leading=rubish^0/"" +local trailing=(anyrubish^1*patterns.endofstring)/"" +local redundant=rubish^3/"\n" +local pattern=Cs(leading*(trailing+redundant+stripped+anything)^0) +function strings.collapsecrlf(str) + return lpegmatch(pattern,str) +end +local repeaters={} +function strings.newrepeater(str,offset) + offset=offset or 0 + local s=repeaters[str] + if not s then + s={} + repeaters[str]=s + end + local t=s[offset] + if t then + return t + end + t={} + setmetatable(t,{ __index=function(t,k) + if not k then + return "" + end + local n=k+offset + local s=n>0 and rep(str,n) or "" + t[k]=s + return s + end }) + s[offset]=t + return t +end +local extra,tab,start=0,0,4,0 +local nspaces=strings.newrepeater(" ") +string.nspaces=nspaces +local pattern=Carg(1)/function(t) + extra,tab,start=0,t or 7,1 + end*Cs(( + Cp()*patterns.tab/function(position) + local current=(position-start+1)+extra + local spaces=tab-(current-1)%tab + if spaces>0 then + extra=extra+spaces-1 + return nspaces[spaces] + else + return "" + end + end+patterns.newline*Cp()/function(position) + extra,start=0,position + end+patterns.anything + )^1) +function strings.tabtospace(str,tab) + return lpegmatch(pattern,str,1,tab or 7) +end +local newline=patterns.newline +local endofstring=patterns.endofstring +local whitespace=patterns.whitespace +local spacer=patterns.spacer +local space=spacer^0 +local nospace=space/"" +local endofline=nospace*newline +local stripend=(whitespace^1*endofstring)/"" +local normalline=(nospace*((1-space*(newline+endofstring))^1)*nospace) +local stripempty=endofline^1/"" +local normalempty=endofline^1 +local singleempty=endofline*(endofline^0/"") +local doubleempty=endofline*endofline^-1*(endofline^0/"") +local stripstart=stripempty^0 +local p_prune_normal=Cs (stripstart*(stripend+normalline+normalempty )^0 ) +local p_prune_collapse=Cs (stripstart*(stripend+normalline+doubleempty )^0 ) +local p_prune_noempty=Cs (stripstart*(stripend+normalline+singleempty )^0 ) +local p_retain_normal=Cs ((normalline+normalempty )^0 ) +local p_retain_collapse=Cs ((normalline+doubleempty )^0 ) +local p_retain_noempty=Cs ((normalline+singleempty )^0 ) +local striplinepatterns={ + ["prune"]=p_prune_normal, + ["prune and collapse"]=p_prune_collapse, + ["prune and no empty"]=p_prune_noempty, + ["retain"]=p_retain_normal, + ["retain and collapse"]=p_retain_collapse, + ["retain and no empty"]=p_retain_noempty, + ["collapse"]=patterns.collapser, +} +strings.striplinepatterns=striplinepatterns +function strings.striplines(str,how) + return str and lpegmatch(how and striplinepatterns[how] or p_prune_collapse,str) or str +end +strings.striplong=strings.striplines +function strings.nice(str) + str=gsub(str,"[:%-+_]+"," ") + return str +end +local n=0 +local sequenced=table.sequenced +function string.autodouble(s,sep) + if s==nil then + return '""' + end + local t=type(s) + if t=="number" then + return tostring(s) + end + if t=="table" then + return ('"'..sequenced(s,sep or ",")..'"') + end + return ('"'..tostring(s)..'"') +end +function string.autosingle(s,sep) + if s==nil then + return "''" + end + local t=type(s) + if t=="number" then + return tostring(s) + end + if t=="table" then + return ("'"..sequenced(s,sep or ",").."'") + end + return ("'"..tostring(s).."'") +end +local tracedchars={} +string.tracedchars=tracedchars +strings.tracers=tracedchars +function string.tracedchar(b) + if type(b)=="number" then + return tracedchars[b] or (utfchar(b).." (U+"..format('%05X',b)..")") + else + local c=utfbyte(b) + return tracedchars[c] or (b.." (U+"..format('%05X',c)..")") + end +end +function number.signed(i) + if i>0 then + return "+",i + else + return "-",-i + end +end +local zero=P("0")^1/"" +local plus=P("+")/"" +local minus=P("-") +local separator=S(".") +local digit=R("09") +local trailing=zero^1*#S("eE") +local exponent=(S("eE")*(plus+Cs((minus*zero^0*P(-1))/"")+minus)*zero^0*(P(-1)*Cc("0")+P(1)^1)) +local pattern_a=Cs(minus^0*digit^1*(separator/""*trailing+separator*(trailing+digit)^0)*exponent) +local pattern_b=Cs((exponent+P(1))^0) +function number.sparseexponent(f,n) + if not n then + n=f + f="%e" + end + local tn=type(n) + if tn=="string" then + local m=tonumber(n) + if m then + return lpegmatch((f=="%e" or f=="%E") and pattern_a or pattern_b,format(f,m)) + end + elseif tn=="number" then + return lpegmatch((f=="%e" or f=="%E") and pattern_a or pattern_b,format(f,n)) + end + return tostring(n) +end +local template=[[ +%s +%s +return function(%s) return %s end +]] +local preamble,environment="",{} +if _LUAVERSION<5.2 then + preamble=[[ +local lpeg=lpeg +local type=type +local tostring=tostring +local tonumber=tonumber +local format=string.format +local concat=table.concat +local signed=number.signed +local points=number.points +local basepoints= number.basepoints +local utfchar=utf.char +local utfbyte=utf.byte +local lpegmatch=lpeg.match +local nspaces=string.nspaces +local tracedchar=string.tracedchar +local autosingle=string.autosingle +local autodouble=string.autodouble +local sequenced=table.sequenced +local formattednumber=number.formatted +local sparseexponent=number.sparseexponent + ]] +else + environment={ + global=global or _G, + lpeg=lpeg, + type=type, + tostring=tostring, + tonumber=tonumber, + format=string.format, + concat=table.concat, + signed=number.signed, + points=number.points, + basepoints=number.basepoints, + utfchar=utf.char, + utfbyte=utf.byte, + lpegmatch=lpeg.match, + nspaces=string.nspaces, + tracedchar=string.tracedchar, + autosingle=string.autosingle, + autodouble=string.autodouble, + sequenced=table.sequenced, + formattednumber=number.formatted, + sparseexponent=number.sparseexponent, + } +end +local arguments={ "a1" } +setmetatable(arguments,{ __index=function(t,k) + local v=t[k-1]..",a"..k + t[k]=v + return v + end +}) +local prefix_any=C((S("+- .")+R("09"))^0) +local prefix_tab=P("{")*C((1-P("}"))^0)*P("}")+C((1-R("az","AZ","09","%%"))^0) +local format_s=function(f) + n=n+1 + if f and f~="" then + return format("format('%%%ss',a%s)",f,n) + else + return format("(a%s or '')",n) + end +end +local format_S=function(f) + n=n+1 + if f and f~="" then + return format("format('%%%ss',tostring(a%s))",f,n) + else + return format("tostring(a%s)",n) + end +end +local format_q=function() + n=n+1 + return format("(a%s and format('%%q',a%s) or '')",n,n) +end +local format_Q=function() + n=n+1 + return format("format('%%q',tostring(a%s))",n) +end +local format_i=function(f) + n=n+1 + if f and f~="" then + return format("format('%%%si',a%s)",f,n) + else + return format("format('%%i',a%s)",n) + end +end +local format_d=format_i +local format_I=function(f) + n=n+1 + return format("format('%%s%%%si',signed(a%s))",f,n) +end +local format_f=function(f) + n=n+1 + return format("format('%%%sf',a%s)",f,n) +end +local format_F=function(f) + n=n+1 + if not f or f=="" then + return format("(((a%s > -0.0000000005 and a%s < 0.0000000005) and '0') or format((a%s %% 1 == 0) and '%%i' or '%%.9f',a%s))",n,n,n,n) + else + return format("format((a%s %% 1 == 0) and '%%i' or '%%%sf',a%s)",n,f,n) + end +end +local format_g=function(f) + n=n+1 + return format("format('%%%sg',a%s)",f,n) +end +local format_G=function(f) + n=n+1 + return format("format('%%%sG',a%s)",f,n) +end +local format_e=function(f) + n=n+1 + return format("format('%%%se',a%s)",f,n) +end +local format_E=function(f) + n=n+1 + return format("format('%%%sE',a%s)",f,n) +end +local format_j=function(f) + n=n+1 + return format("sparseexponent('%%%se',a%s)",f,n) +end +local format_J=function(f) + n=n+1 + return format("sparseexponent('%%%sE',a%s)",f,n) +end +local format_x=function(f) + n=n+1 + return format("format('%%%sx',a%s)",f,n) +end +local format_X=function(f) + n=n+1 + return format("format('%%%sX',a%s)",f,n) +end +local format_o=function(f) + n=n+1 + return format("format('%%%so',a%s)",f,n) +end +local format_c=function() + n=n+1 + return format("utfchar(a%s)",n) +end +local format_C=function() + n=n+1 + return format("tracedchar(a%s)",n) +end +local format_r=function(f) + n=n+1 + return format("format('%%%s.0f',a%s)",f,n) +end +local format_h=function(f) + n=n+1 + if f=="-" then + f=sub(f,2) + return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) + else + return format("format('0x%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) + end +end +local format_H=function(f) + n=n+1 + if f=="-" then + f=sub(f,2) + return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) + else + return format("format('0x%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) + end +end +local format_u=function(f) + n=n+1 + if f=="-" then + f=sub(f,2) + return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) + else + return format("format('u+%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) + end +end +local format_U=function(f) + n=n+1 + if f=="-" then + f=sub(f,2) + return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) + else + return format("format('U+%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) + end +end +local format_p=function() + n=n+1 + return format("points(a%s)",n) +end +local format_b=function() + n=n+1 + return format("basepoints(a%s)",n) +end +local format_t=function(f) + n=n+1 + if f and f~="" then + return format("concat(a%s,%q)",n,f) + else + return format("concat(a%s)",n) + end +end +local format_T=function(f) + n=n+1 + if f and f~="" then + return format("sequenced(a%s,%q)",n,f) + else + return format("sequenced(a%s)",n) + end +end +local format_l=function() + n=n+1 + return format("(a%s and 'true' or 'false')",n) +end +local format_L=function() + n=n+1 + return format("(a%s and 'TRUE' or 'FALSE')",n) +end +local format_N=function() + n=n+1 + return format("tostring(tonumber(a%s) or a%s)",n,n) +end +local format_a=function(f) + n=n+1 + if f and f~="" then + return format("autosingle(a%s,%q)",n,f) + else + return format("autosingle(a%s)",n) + end +end +local format_A=function(f) + n=n+1 + if f and f~="" then + return format("autodouble(a%s,%q)",n,f) + else + return format("autodouble(a%s)",n) + end +end +local format_w=function(f) + n=n+1 + f=tonumber(f) + if f then + return format("nspaces[%s+a%s]",f,n) + else + return format("nspaces[a%s]",n) + end +end +local format_W=function(f) + return format("nspaces[%s]",tonumber(f) or 0) +end +local digit=patterns.digit +local period=patterns.period +local three=digit*digit*digit +local splitter=Cs ( + (((1-(three^1*period))^1+C(three))*(Carg(1)*three)^1+C((1-period)^1))*(P(1)/""*Carg(2))*C(2) +) +patterns.formattednumber=splitter +function number.formatted(n,sep1,sep2) + local s=type(s)=="string" and n or format("%0.2f",n) + if sep1==true then + return lpegmatch(splitter,s,1,".",",") + elseif sep1=="." then + return lpegmatch(splitter,s,1,sep1,sep2 or ",") + elseif sep1=="," then + return lpegmatch(splitter,s,1,sep1,sep2 or ".") + else + return lpegmatch(splitter,s,1,sep1 or ",",sep2 or ".") + end +end +local format_m=function(f) + n=n+1 + if not f or f=="" then + f="," + end + return format([[formattednumber(a%s,%q,".")]],n,f) +end +local format_M=function(f) + n=n+1 + if not f or f=="" then + f="." + end + return format([[formattednumber(a%s,%q,",")]],n,f) +end +local format_z=function(f) + n=n+(tonumber(f) or 1) + return "''" +end +local format_rest=function(s) + return format("%q",s) +end +local format_extension=function(extensions,f,name) + local extension=extensions[name] or "tostring(%s)" + local f=tonumber(f) or 1 + if f==0 then + return extension + elseif f==1 then + n=n+1 + local a="a"..n + return format(extension,a,a) + elseif f<0 then + local a="a"..(n+f+1) + return format(extension,a,a) + else + local t={} + for i=1,f do + n=n+1 + t[#t+1]="a"..n + end + return format(extension,unpack(t)) + end +end +local builder=Cs { "start", + start=( + ( + P("%")/""*( + V("!") ++V("s")+V("q")+V("i")+V("d")+V("f")+V("F")+V("g")+V("G")+V("e")+V("E")+V("x")+V("X")+V("o") ++V("c")+V("C")+V("S") ++V("Q") ++V("N") ++V("r")+V("h")+V("H")+V("u")+V("U")+V("p")+V("b")+V("t")+V("T")+V("l")+V("L")+V("I")+V("w") ++V("W") ++V("a") ++V("A") ++V("j")+V("J") ++V("m")+V("M") ++V("z") + )+V("*") + )*(P(-1)+Carg(1)) + )^0, + ["s"]=(prefix_any*P("s"))/format_s, + ["q"]=(prefix_any*P("q"))/format_q, + ["i"]=(prefix_any*P("i"))/format_i, + ["d"]=(prefix_any*P("d"))/format_d, + ["f"]=(prefix_any*P("f"))/format_f, + ["F"]=(prefix_any*P("F"))/format_F, + ["g"]=(prefix_any*P("g"))/format_g, + ["G"]=(prefix_any*P("G"))/format_G, + ["e"]=(prefix_any*P("e"))/format_e, + ["E"]=(prefix_any*P("E"))/format_E, + ["x"]=(prefix_any*P("x"))/format_x, + ["X"]=(prefix_any*P("X"))/format_X, + ["o"]=(prefix_any*P("o"))/format_o, + ["S"]=(prefix_any*P("S"))/format_S, + ["Q"]=(prefix_any*P("Q"))/format_S, + ["N"]=(prefix_any*P("N"))/format_N, + ["c"]=(prefix_any*P("c"))/format_c, + ["C"]=(prefix_any*P("C"))/format_C, + ["r"]=(prefix_any*P("r"))/format_r, + ["h"]=(prefix_any*P("h"))/format_h, + ["H"]=(prefix_any*P("H"))/format_H, + ["u"]=(prefix_any*P("u"))/format_u, + ["U"]=(prefix_any*P("U"))/format_U, + ["p"]=(prefix_any*P("p"))/format_p, + ["b"]=(prefix_any*P("b"))/format_b, + ["t"]=(prefix_tab*P("t"))/format_t, + ["T"]=(prefix_tab*P("T"))/format_T, + ["l"]=(prefix_any*P("l"))/format_l, + ["L"]=(prefix_any*P("L"))/format_L, + ["I"]=(prefix_any*P("I"))/format_I, + ["w"]=(prefix_any*P("w"))/format_w, + ["W"]=(prefix_any*P("W"))/format_W, + ["j"]=(prefix_any*P("j"))/format_j, + ["J"]=(prefix_any*P("J"))/format_J, + ["m"]=(prefix_tab*P("m"))/format_m, + ["M"]=(prefix_tab*P("M"))/format_M, + ["z"]=(prefix_any*P("z"))/format_z, + ["a"]=(prefix_any*P("a"))/format_a, + ["A"]=(prefix_any*P("A"))/format_A, + ["*"]=Cs(((1-P("%"))^1+P("%%")/"%%")^1)/format_rest, + ["?"]=Cs(((1-P("%"))^1 )^1)/format_rest, + ["!"]=Carg(2)*prefix_any*P("!")*C((1-P("!"))^1)*P("!")/format_extension, +} +local direct=Cs ( + P("%")*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*P(-1)/[[local format = string.format return function(str) return format("%0",str) end]] +) +local function make(t,str) + local f + local p + local p=lpegmatch(direct,str) + if p then + f=loadstripped(p)() + else + n=0 + p=lpegmatch(builder,str,1,t._connector_,t._extensions_) + if n>0 then + p=format(template,preamble,t._preamble_,arguments[n],p) + f=loadstripped(p,t._environment_)() + else + f=function() return str end + end + end + t[str]=f + return f +end +local function use(t,fmt,...) + return t[fmt](...) +end +strings.formatters={} +if _LUAVERSION<5.2 then + function strings.formatters.new(noconcat) + local t={ _type_="formatter",_connector_=noconcat and "," or "..",_extensions_={},_preamble_=preamble,_environment_={} } + setmetatable(t,{ __index=make,__call=use }) + return t + end +else + function strings.formatters.new(noconcat) + local e={} + for k,v in next,environment do + e[k]=v + end + local t={ _type_="formatter",_connector_=noconcat and "," or "..",_extensions_={},_preamble_="",_environment_=e } + setmetatable(t,{ __index=make,__call=use }) + return t + end +end +local formatters=strings.formatters.new() +string.formatters=formatters +string.formatter=function(str,...) return formatters[str](...) end +local function add(t,name,template,preamble) + if type(t)=="table" and t._type_=="formatter" then + t._extensions_[name]=template or "%s" + if type(preamble)=="string" then + t._preamble_=preamble.."\n"..t._preamble_ + elseif type(preamble)=="table" then + for k,v in next,preamble do + t._environment_[k]=v + end + end + end +end +strings.formatters.add=add +patterns.xmlescape=Cs((P("<")/"<"+P(">")/">"+P("&")/"&"+P('"')/"""+P(1))^0) +patterns.texescape=Cs((C(S("#$%\\{}"))/"\\%1"+P(1))^0) +patterns.luaescape=Cs(((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0) +patterns.luaquoted=Cs(Cc('"')*((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0*Cc('"')) +if _LUAVERSION<5.2 then + add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],"local xmlescape = lpeg.patterns.xmlescape") + add(formatters,"tex",[[lpegmatch(texescape,%s)]],"local texescape = lpeg.patterns.texescape") + add(formatters,"lua",[[lpegmatch(luaescape,%s)]],"local luaescape = lpeg.patterns.luaescape") +else + add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],{ xmlescape=lpeg.patterns.xmlescape }) + add(formatters,"tex",[[lpegmatch(texescape,%s)]],{ texescape=lpeg.patterns.texescape }) + add(formatters,"lua",[[lpegmatch(luaescape,%s)]],{ luaescape=lpeg.patterns.luaescape }) +end +local dquote=patterns.dquote +local equote=patterns.escaped+dquote/'\\"'+1 +local space=patterns.space +local cquote=Cc('"') +local pattern=Cs(dquote*(equote-P(-2))^0*dquote) ++Cs(cquote*(equote-space)^0*space*equote^0*cquote) +function string.optionalquoted(str) + return lpegmatch(pattern,str) or str +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['luat-basics-gen']={ + version=1.100, + comment="companion to luatex-*.tex", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end +local dummyfunction=function() +end +local dummyreporter=function(c) + return function(...) + (texio.reporter or texio.write_nl)(c.." : "..string.formatters(...)) + end +end +statistics={ + register=dummyfunction, + starttiming=dummyfunction, + stoptiming=dummyfunction, + elapsedtime=nil, +} +directives={ + register=dummyfunction, + enable=dummyfunction, + disable=dummyfunction, +} +trackers={ + register=dummyfunction, + enable=dummyfunction, + disable=dummyfunction, +} +experiments={ + register=dummyfunction, + enable=dummyfunction, + disable=dummyfunction, +} +storage={ + register=dummyfunction, + shared={}, +} +logs={ + new=dummyreporter, + reporter=dummyreporter, + messenger=dummyreporter, + report=dummyfunction, +} +callbacks={ + register=function(n,f) return callback.register(n,f) end, +} +utilities={ + storage={ + allocate=function(t) return t or {} end, + mark=function(t) return t or {} end, + }, +} +characters=characters or { + data={} +} +texconfig.kpse_init=true +resolvers=resolvers or {} +local remapper={ + otf="opentype fonts", + ttf="truetype fonts", + ttc="truetype fonts", + dfont="truetype fonts", + cid="cid maps", + cidmap="cid maps", + fea="font feature files", + pfa="type1 fonts", + pfb="type1 fonts", + afm="afm", +} +function resolvers.findfile(name,fileformat) + name=string.gsub(name,"\\","/") + if not fileformat or fileformat=="" then + fileformat=file.suffix(name) + if fileformat=="" then + fileformat="tex" + end + end + fileformat=string.lower(fileformat) + fileformat=remapper[fileformat] or fileformat + local found=kpse.find_file(name,fileformat) + if not found or found=="" then + found=kpse.find_file(name,"other text files") + end + return found +end +resolvers.findbinfile=resolvers.findfile +function resolvers.loadbinfile(filename,filetype) + local data=io.loaddata(filename) + return true,data,#data +end +function resolvers.resolve(s) + return s +end +function resolvers.unresolve(s) + return s +end +caches={} +local writable=nil +local readables={} +local usingjit=jit +if not caches.namespace or caches.namespace=="" or caches.namespace=="context" then + caches.namespace='generic' +end +do + local cachepaths=kpse.expand_var('$TEXMFCACHE') or "" + if cachepaths=="" or cachepaths=="$TEXMFCACHE" then + cachepaths=kpse.expand_var('$TEXMFVAR') or "" + end + if cachepaths=="" or cachepaths=="$TEXMFVAR" then + cachepaths=kpse.expand_var('$VARTEXMF') or "" + end + if cachepaths=="" then + local fallbacks={ "TMPDIR","TEMPDIR","TMP","TEMP","HOME","HOMEPATH" } + for i=1,#fallbacks do + cachepaths=os.getenv(fallbacks[i]) or "" + if cachepath~="" and lfs.isdir(cachepath) then + break + end + end + end + if cachepaths=="" then + cachepaths="." + end + cachepaths=string.split(cachepaths,os.type=="windows" and ";" or ":") + for i=1,#cachepaths do + local cachepath=cachepaths[i] + if not lfs.isdir(cachepath) then + lfs.mkdirs(cachepath) + if lfs.isdir(cachepath) then + texio.write(string.format("(created cache path: %s)",cachepath)) + end + end + if file.is_writable(cachepath) then + writable=file.join(cachepath,"luatex-cache") + lfs.mkdir(writable) + writable=file.join(writable,caches.namespace) + lfs.mkdir(writable) + break + end + end + for i=1,#cachepaths do + if file.is_readable(cachepaths[i]) then + readables[#readables+1]=file.join(cachepaths[i],"luatex-cache",caches.namespace) + end + end + if not writable then + texio.write_nl("quiting: fix your writable cache path") + os.exit() + elseif #readables==0 then + texio.write_nl("quiting: fix your readable cache path") + os.exit() + elseif #readables==1 and readables[1]==writable then + texio.write(string.format("(using cache: %s)",writable)) + else + texio.write(string.format("(using write cache: %s)",writable)) + texio.write(string.format("(using read cache: %s)",table.concat(readables," "))) + end +end +function caches.getwritablepath(category,subcategory) + local path=file.join(writable,category) + lfs.mkdir(path) + path=file.join(path,subcategory) + lfs.mkdir(path) + return path +end +function caches.getreadablepaths(category,subcategory) + local t={} + for i=1,#readables do + t[i]=file.join(readables[i],category,subcategory) + end + return t +end +local function makefullname(path,name) + if path and path~="" then + return file.addsuffix(file.join(path,name),"lua"),file.addsuffix(file.join(path,name),usingjit and "lub" or "luc") + end +end +function caches.is_writable(path,name) + local fullname=makefullname(path,name) + return fullname and file.is_writable(fullname) +end +function caches.loaddata(paths,name) + for i=1,#paths do + local data=false + local luaname,lucname=makefullname(paths[i],name) + if lucname and not lfs.isfile(lucname) and type(caches.compile)=="function" then + texio.write(string.format("(compiling luc: %s)",lucname)) + data=loadfile(luaname) + if data then + data=data() + end + if data then + caches.compile(data,luaname,lucname) + return data + end + end + if lucname and lfs.isfile(lucname) then + texio.write(string.format("(load luc: %s)",lucname)) + data=loadfile(lucname) + if data then + data=data() + end + if data then + return data + else + texio.write(string.format("(loading failed: %s)",lucname)) + end + end + if luaname and lfs.isfile(luaname) then + texio.write(string.format("(load lua: %s)",luaname)) + data=loadfile(luaname) + if data then + data=data() + end + if data then + return data + end + end + end +end +function caches.savedata(path,name,data) + local luaname,lucname=makefullname(path,name) + if luaname then + texio.write(string.format("(save: %s)",luaname)) + table.tofile(luaname,data,true) + if lucname and type(caches.compile)=="function" then + os.remove(lucname) + texio.write(string.format("(save: %s)",lucname)) + caches.compile(data,luaname,lucname) + end + end +end +function caches.compile(data,luaname,lucname) + local d=io.loaddata(luaname) + if not d or d=="" then + d=table.serialize(data,true) + end + if d and d~="" then + local f=io.open(lucname,'wb') + if f then + local s=loadstring(d) + if s then + f:write(string.dump(s,true)) + end + f:close() + end + end +end +function table.setmetatableindex(t,f) + if type(t)~="table" then + f=f or t + t={} + end + setmetatable(t,{ __index=f }) + return t +end +arguments={} +if arg then + for i=1,#arg do + local k,v=string.match(arg[i],"^%-%-([^=]+)=?(.-)$") + if k and v then + arguments[k]=v + end + end +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['data-con']={ + version=1.100, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local format,lower,gsub=string.format,string.lower,string.gsub +local trace_cache=false trackers.register("resolvers.cache",function(v) trace_cache=v end) +local trace_containers=false trackers.register("resolvers.containers",function(v) trace_containers=v end) +local trace_storage=false trackers.register("resolvers.storage",function(v) trace_storage=v end) +containers=containers or {} +local containers=containers +containers.usecache=true +local report_containers=logs.reporter("resolvers","containers") +local allocated={} +local mt={ + __index=function(t,k) + if k=="writable" then + local writable=caches.getwritablepath(t.category,t.subcategory) or { "." } + t.writable=writable + return writable + elseif k=="readables" then + local readables=caches.getreadablepaths(t.category,t.subcategory) or { "." } + t.readables=readables + return readables + end + end, + __storage__=true +} +function containers.define(category,subcategory,version,enabled) + if category and subcategory then + local c=allocated[category] + if not c then + c={} + allocated[category]=c + end + local s=c[subcategory] + if not s then + s={ + category=category, + subcategory=subcategory, + storage={}, + enabled=enabled, + version=version or math.pi, + trace=false, + } + setmetatable(s,mt) + c[subcategory]=s + end + return s + end +end +function containers.is_usable(container,name) + return container.enabled and caches and caches.is_writable(container.writable,name) +end +function containers.is_valid(container,name) + if name and name~="" then + local storage=container.storage[name] + return storage and storage.cache_version==container.version + else + return false + end +end +function containers.read(container,name) + local storage=container.storage + local stored=storage[name] + if not stored and container.enabled and caches and containers.usecache then + stored=caches.loaddata(container.readables,name) + if stored and stored.cache_version==container.version then + if trace_cache or trace_containers then + report_containers("action %a, category %a, name %a","load",container.subcategory,name) + end + else + stored=nil + end + storage[name]=stored + elseif stored then + if trace_cache or trace_containers then + report_containers("action %a, category %a, name %a","reuse",container.subcategory,name) + end + end + return stored +end +function containers.write(container,name,data) + if data then + data.cache_version=container.version + if container.enabled and caches then + local unique,shared=data.unique,data.shared + data.unique,data.shared=nil,nil + caches.savedata(container.writable,name,data) + if trace_cache or trace_containers then + report_containers("action %a, category %a, name %a","save",container.subcategory,name) + end + data.unique,data.shared=unique,shared + end + if trace_cache or trace_containers then + report_containers("action %a, category %a, name %a","store",container.subcategory,name) + end + container.storage[name]=data + end + return data +end +function containers.content(container,name) + return container.storage[name] +end +function containers.cleanname(name) + return (gsub(lower(name),"[^%w\128-\255]+","-")) +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['luatex-fonts-nod']={ + version=1.001, + comment="companion to luatex-fonts.lua", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end +if tex.attribute[0]~=0 then + texio.write_nl("log","!") + texio.write_nl("log","! Attribute 0 is reserved for ConTeXt's font feature management and has to be") + texio.write_nl("log","! set to zero. Also, some attributes in the range 1-255 are used for special") + texio.write_nl("log","! purposes so setting them at the TeX end might break the font handler.") + texio.write_nl("log","!") + tex.attribute[0]=0 +end +attributes=attributes or {} +attributes.unsetvalue=-0x7FFFFFFF +local numbers,last={},127 +attributes.private=attributes.private or function(name) + local number=numbers[name] + if not number then + if last<255 then + last=last+1 + end + number=last + numbers[name]=number + end + return number +end +nodes={} +nodes.pool={} +nodes.handlers={} +local nodecodes={} for k,v in next,node.types () do nodecodes[string.gsub(v,"_","")]=k end +local whatcodes={} for k,v in next,node.whatsits() do whatcodes[string.gsub(v,"_","")]=k end +local glyphcodes={ [0]="character","glyph","ligature","ghost","left","right" } +local disccodes={ [0]="discretionary","explicit","automatic","regular","first","second" } +nodes.nodecodes=nodecodes +nodes.whatcodes=whatcodes +nodes.whatsitcodes=whatcodes +nodes.glyphcodes=glyphcodes +nodes.disccodes=disccodes +local free_node=node.free +local remove_node=node.remove +local new_node=node.new +local traverse_id=node.traverse_id +nodes.handlers.protectglyphs=node.protect_glyphs +nodes.handlers.unprotectglyphs=node.unprotect_glyphs +local math_code=nodecodes.math +local end_of_math=node.end_of_math +function node.end_of_math(n) + if n.id==math_code and n.subtype==1 then + return n + else + return end_of_math(n) + end +end +function nodes.remove(head,current,free_too) + local t=current + head,current=remove_node(head,current) + if t then + if free_too then + free_node(t) + t=nil + else + t.next,t.prev=nil,nil + end + end + return head,current,t +end +function nodes.delete(head,current) + return nodes.remove(head,current,true) +end +function nodes.pool.kern(k) + local n=new_node("kern",1) + n.kern=k + return n +end +local getfield=node.getfield +local setfield=node.setfield +nodes.getfield=getfield +nodes.setfield=setfield +nodes.getattr=getfield +nodes.setattr=setfield +nodes.tostring=node.tostring or tostring +nodes.copy=node.copy +nodes.copy_list=node.copy_list +nodes.delete=node.delete +nodes.dimensions=node.dimensions +nodes.end_of_math=node.end_of_math +nodes.flush_list=node.flush_list +nodes.flush_node=node.flush_node +nodes.free=node.free +nodes.insert_after=node.insert_after +nodes.insert_before=node.insert_before +nodes.hpack=node.hpack +nodes.new=node.new +nodes.tail=node.tail +nodes.traverse=node.traverse +nodes.traverse_id=node.traverse_id +nodes.slide=node.slide +nodes.vpack=node.vpack +nodes.first_glyph=node.first_glyph +nodes.first_character=node.first_character +nodes.has_glyph=node.has_glyph or node.first_glyph +nodes.current_attr=node.current_attr +nodes.do_ligature_n=node.do_ligature_n +nodes.has_field=node.has_field +nodes.last_node=node.last_node +nodes.usedlist=node.usedlist +nodes.protrusion_skippable=node.protrusion_skippable +nodes.write=node.write +nodes.has_attribute=node.has_attribute +nodes.set_attribute=node.set_attribute +nodes.unset_attribute=node.unset_attribute +nodes.protect_glyphs=node.protect_glyphs +nodes.unprotect_glyphs=node.unprotect_glyphs +nodes.kerning=node.kerning +nodes.ligaturing=node.ligaturing +nodes.mlist_to_hlist=node.mlist_to_hlist +local direct=node.direct +local nuts={} +nodes.nuts=nuts +local tonode=direct.tonode +local tonut=direct.todirect +nodes.tonode=tonode +nodes.tonut=tonut +nuts.tonode=tonode +nuts.tonut=tonut +local getfield=direct.getfield +local setfield=direct.setfield +nuts.getfield=getfield +nuts.setfield=setfield +nuts.getnext=direct.getnext +nuts.getprev=direct.getprev +nuts.getid=direct.getid +nuts.getattr=getfield +nuts.setattr=setfield +nuts.getfont=direct.getfont +nuts.getsubtype=direct.getsubtype +nuts.getchar=direct.getchar +nuts.insert_before=direct.insert_before +nuts.insert_after=direct.insert_after +nuts.delete=direct.delete +nuts.copy=direct.copy +nuts.tail=direct.tail +nuts.flush_list=direct.flush_list +nuts.end_of_math=direct.end_of_math +nuts.traverse=direct.traverse +nuts.traverse_id=direct.traverse_id +nuts.getprop=nuts.getattr +nuts.setprop=nuts.setattr +local new_nut=direct.new +nuts.new=new_nut +nuts.pool={} +function nuts.pool.kern(k) + local n=new_nut("kern",1) + setfield(n,"kern",k) + return n +end +local propertydata=direct.get_properties_table() +nodes.properties={ data=propertydata } +direct.set_properties_mode(true,true) +function direct.set_properties_mode() end +nuts.getprop=function(n,k) + local p=propertydata[n] + if p then + return p[k] + end +end +nuts.setprop=function(n,k,v) + if v then + local p=propertydata[n] + if p then + p[k]=v + else + propertydata[n]={ [k]=v } + end + end +end +nodes.setprop=nodes.setproperty +nodes.getprop=nodes.getproperty + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-ini']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local allocate=utilities.storage.allocate +local report_defining=logs.reporter("fonts","defining") +fonts=fonts or {} +local fonts=fonts +fonts.hashes={ identifiers=allocate() } +fonts.tables=fonts.tables or {} +fonts.helpers=fonts.helpers or {} +fonts.tracers=fonts.tracers or {} +fonts.specifiers=fonts.specifiers or {} +fonts.analyzers={} +fonts.readers={} +fonts.definers={ methods={} } +fonts.loggers={ register=function() end } +fontloader.totable=fontloader.to_table + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-con']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local next,tostring,rawget=next,tostring,rawget +local format,match,lower,gsub=string.format,string.match,string.lower,string.gsub +local utfbyte=utf.byte +local sort,insert,concat,sortedkeys,serialize,fastcopy=table.sort,table.insert,table.concat,table.sortedkeys,table.serialize,table.fastcopy +local derivetable=table.derive +local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end) +local trace_scaling=false trackers.register("fonts.scaling",function(v) trace_scaling=v end) +local report_defining=logs.reporter("fonts","defining") +local fonts=fonts +local constructors=fonts.constructors or {} +fonts.constructors=constructors +local handlers=fonts.handlers or {} +fonts.handlers=handlers +local allocate=utilities.storage.allocate +local setmetatableindex=table.setmetatableindex +constructors.dontembed=allocate() +constructors.autocleanup=true +constructors.namemode="fullpath" +constructors.version=1.01 +constructors.cache=containers.define("fonts","constructors",constructors.version,false) +constructors.privateoffset=0xF0000 +constructors.cacheintex=true +constructors.keys={ + properties={ + encodingbytes="number", + embedding="number", + cidinfo={}, + format="string", + fontname="string", + fullname="string", + filename="filename", + psname="string", + name="string", + virtualized="boolean", + hasitalics="boolean", + autoitalicamount="basepoints", + nostackmath="boolean", + noglyphnames="boolean", + mode="string", + hasmath="boolean", + mathitalics="boolean", + textitalics="boolean", + finalized="boolean", + }, + parameters={ + mathsize="number", + scriptpercentage="float", + scriptscriptpercentage="float", + units="cardinal", + designsize="scaledpoints", + expansion={ + stretch="integerscale", + shrink="integerscale", + step="integerscale", + auto="boolean", + }, + protrusion={ + auto="boolean", + }, + slantfactor="float", + extendfactor="float", + factor="float", + hfactor="float", + vfactor="float", + size="scaledpoints", + units="scaledpoints", + scaledpoints="scaledpoints", + slantperpoint="scaledpoints", + spacing={ + width="scaledpoints", + stretch="scaledpoints", + shrink="scaledpoints", + extra="scaledpoints", + }, + xheight="scaledpoints", + quad="scaledpoints", + ascender="scaledpoints", + descender="scaledpoints", + synonyms={ + space="spacing.width", + spacestretch="spacing.stretch", + spaceshrink="spacing.shrink", + extraspace="spacing.extra", + x_height="xheight", + space_stretch="spacing.stretch", + space_shrink="spacing.shrink", + extra_space="spacing.extra", + em="quad", + ex="xheight", + slant="slantperpoint", + }, + }, + description={ + width="basepoints", + height="basepoints", + depth="basepoints", + boundingbox={}, + }, + character={ + width="scaledpoints", + height="scaledpoints", + depth="scaledpoints", + italic="scaledpoints", + }, +} +local designsizes=allocate() +constructors.designsizes=designsizes +local loadedfonts=allocate() +constructors.loadedfonts=loadedfonts +local factors={ + pt=65536.0, + bp=65781.8, +} +function constructors.setfactor(f) + constructors.factor=factors[f or 'pt'] or factors.pt +end +constructors.setfactor() +function constructors.scaled(scaledpoints,designsize) + if scaledpoints<0 then + if designsize then + local factor=constructors.factor + if designsize>factor then + return (- scaledpoints/1000)*designsize + else + return (- scaledpoints/1000)*designsize*factor + end + else + return (- scaledpoints/1000)*10*factor + end + else + return scaledpoints + end +end +function constructors.cleanuptable(tfmdata) + if constructors.autocleanup and tfmdata.properties.virtualized then + for k,v in next,tfmdata.characters do + if v.commands then v.commands=nil end + end + end +end +function constructors.calculatescale(tfmdata,scaledpoints) + local parameters=tfmdata.parameters + if scaledpoints<0 then + scaledpoints=(- scaledpoints/1000)*(tfmdata.designsize or parameters.designsize) + end + return scaledpoints,scaledpoints/(parameters.units or 1000) +end +local unscaled={ + ScriptPercentScaleDown=true, + ScriptScriptPercentScaleDown=true, + RadicalDegreeBottomRaisePercent=true +} +function constructors.assignmathparameters(target,original) + local mathparameters=original.mathparameters + if mathparameters and next(mathparameters) then + local targetparameters=target.parameters + local targetproperties=target.properties + local targetmathparameters={} + local factor=targetproperties.math_is_scaled and 1 or targetparameters.factor + for name,value in next,mathparameters do + if unscaled[name] then + targetmathparameters[name]=value + else + targetmathparameters[name]=value*factor + end + end + if not targetmathparameters.FractionDelimiterSize then + targetmathparameters.FractionDelimiterSize=1.01*targetparameters.size + end + if not mathparameters.FractionDelimiterDisplayStyleSize then + targetmathparameters.FractionDelimiterDisplayStyleSize=2.40*targetparameters.size + end + target.mathparameters=targetmathparameters + end +end +function constructors.beforecopyingcharacters(target,original) +end +function constructors.aftercopyingcharacters(target,original) +end +constructors.sharefonts=false +constructors.nofsharedfonts=0 +local sharednames={} +function constructors.trytosharefont(target,tfmdata) + if constructors.sharefonts then + local characters=target.characters + local n=1 + local t={ target.psname } + local u=sortedkeys(characters) + for i=1,#u do + local k=u[i] + n=n+1;t[n]=k + n=n+1;t[n]=characters[k].index or k + end + local h=md5.HEX(concat(t," ")) + local s=sharednames[h] + if s then + if trace_defining then + report_defining("font %a uses backend resources of font %a",target.fullname,s) + end + target.fullname=s + constructors.nofsharedfonts=constructors.nofsharedfonts+1 + target.properties.sharedwith=s + else + sharednames[h]=target.fullname + end + end +end +function constructors.enhanceparameters(parameters) + local xheight=parameters.x_height + local quad=parameters.quad + local space=parameters.space + local stretch=parameters.space_stretch + local shrink=parameters.space_shrink + local extra=parameters.extra_space + local slant=parameters.slant + parameters.xheight=xheight + parameters.spacestretch=stretch + parameters.spaceshrink=shrink + parameters.extraspace=extra + parameters.em=quad + parameters.ex=xheight + parameters.slantperpoint=slant + parameters.spacing={ + width=space, + stretch=stretch, + shrink=shrink, + extra=extra, + } +end +function constructors.scale(tfmdata,specification) + local target={} + if tonumber(specification) then + specification={ size=specification } + end + target.specification=specification + local scaledpoints=specification.size + local relativeid=specification.relativeid + local properties=tfmdata.properties or {} + local goodies=tfmdata.goodies or {} + local resources=tfmdata.resources or {} + local descriptions=tfmdata.descriptions or {} + local characters=tfmdata.characters or {} + local changed=tfmdata.changed or {} + local shared=tfmdata.shared or {} + local parameters=tfmdata.parameters or {} + local mathparameters=tfmdata.mathparameters or {} + local targetcharacters={} + local targetdescriptions=derivetable(descriptions) + local targetparameters=derivetable(parameters) + local targetproperties=derivetable(properties) + local targetgoodies=goodies + target.characters=targetcharacters + target.descriptions=targetdescriptions + target.parameters=targetparameters + target.properties=targetproperties + target.goodies=targetgoodies + target.shared=shared + target.resources=resources + target.unscaled=tfmdata + local mathsize=tonumber(specification.mathsize) or 0 + local textsize=tonumber(specification.textsize) or scaledpoints + local forcedsize=tonumber(parameters.mathsize ) or 0 + local extrafactor=tonumber(specification.factor ) or 1 + if (mathsize==2 or forcedsize==2) and parameters.scriptpercentage then + scaledpoints=parameters.scriptpercentage*textsize/100 + elseif (mathsize==3 or forcedsize==3) and parameters.scriptscriptpercentage then + scaledpoints=parameters.scriptscriptpercentage*textsize/100 + elseif forcedsize>1000 then + scaledpoints=forcedsize + end + targetparameters.mathsize=mathsize + targetparameters.textsize=textsize + targetparameters.forcedsize=forcedsize + targetparameters.extrafactor=extrafactor + local tounicode=fonts.mappings.tounicode + local defaultwidth=resources.defaultwidth or 0 + local defaultheight=resources.defaultheight or 0 + local defaultdepth=resources.defaultdepth or 0 + local units=parameters.units or 1000 + if target.fonts then + target.fonts=fastcopy(target.fonts) + end + targetproperties.language=properties.language or "dflt" + targetproperties.script=properties.script or "dflt" + targetproperties.mode=properties.mode or "base" + local askedscaledpoints=scaledpoints + local scaledpoints,delta=constructors.calculatescale(tfmdata,scaledpoints,nil,specification) + local hdelta=delta + local vdelta=delta + target.designsize=parameters.designsize + target.units_per_em=units + local direction=properties.direction or tfmdata.direction or 0 + target.direction=direction + properties.direction=direction + target.size=scaledpoints + target.encodingbytes=properties.encodingbytes or 1 + target.embedding=properties.embedding or "subset" + target.tounicode=1 + target.cidinfo=properties.cidinfo + target.format=properties.format + target.cache=constructors.cacheintex and "yes" or "renew" + local fontname=properties.fontname or tfmdata.fontname + local fullname=properties.fullname or tfmdata.fullname + local filename=properties.filename or tfmdata.filename + local psname=properties.psname or tfmdata.psname + local name=properties.name or tfmdata.name + if not psname or psname=="" then + psname=fontname or (fullname and fonts.names.cleanname(fullname)) + end + target.fontname=fontname + target.fullname=fullname + target.filename=filename + target.psname=psname + target.name=name + properties.fontname=fontname + properties.fullname=fullname + properties.filename=filename + properties.psname=psname + properties.name=name + local expansion=parameters.expansion + if expansion then + target.stretch=expansion.stretch + target.shrink=expansion.shrink + target.step=expansion.step + target.auto_expand=expansion.auto + end + local protrusion=parameters.protrusion + if protrusion then + target.auto_protrude=protrusion.auto + end + local extendfactor=parameters.extendfactor or 0 + if extendfactor~=0 and extendfactor~=1 then + hdelta=hdelta*extendfactor + target.extend=extendfactor*1000 + else + target.extend=1000 + end + local slantfactor=parameters.slantfactor or 0 + if slantfactor~=0 then + target.slant=slantfactor*1000 + else + target.slant=0 + end + targetparameters.factor=delta + targetparameters.hfactor=hdelta + targetparameters.vfactor=vdelta + targetparameters.size=scaledpoints + targetparameters.units=units + targetparameters.scaledpoints=askedscaledpoints + local isvirtual=properties.virtualized or tfmdata.type=="virtual" + local hasquality=target.auto_expand or target.auto_protrude + local hasitalics=properties.hasitalics + local autoitalicamount=properties.autoitalicamount + local stackmath=not properties.nostackmath + local nonames=properties.noglyphnames + local haskerns=properties.haskerns or properties.mode=="base" + local hasligatures=properties.hasligatures or properties.mode=="base" + if changed and not next(changed) then + changed=false + end + target.type=isvirtual and "virtual" or "real" + target.postprocessors=tfmdata.postprocessors + local targetslant=(parameters.slant or parameters[1] or 0)*factors.pt + local targetspace=(parameters.space or parameters[2] or 0)*hdelta + local targetspace_stretch=(parameters.space_stretch or parameters[3] or 0)*hdelta + local targetspace_shrink=(parameters.space_shrink or parameters[4] or 0)*hdelta + local targetx_height=(parameters.x_height or parameters[5] or 0)*vdelta + local targetquad=(parameters.quad or parameters[6] or 0)*hdelta + local targetextra_space=(parameters.extra_space or parameters[7] or 0)*hdelta + targetparameters.slant=targetslant + targetparameters.space=targetspace + targetparameters.space_stretch=targetspace_stretch + targetparameters.space_shrink=targetspace_shrink + targetparameters.x_height=targetx_height + targetparameters.quad=targetquad + targetparameters.extra_space=targetextra_space + local ascender=parameters.ascender + if ascender then + targetparameters.ascender=delta*ascender + end + local descender=parameters.descender + if descender then + targetparameters.descender=delta*descender + end + constructors.enhanceparameters(targetparameters) + local protrusionfactor=(targetquad~=0 and 1000/targetquad) or 0 + local scaledwidth=defaultwidth*hdelta + local scaledheight=defaultheight*vdelta + local scaleddepth=defaultdepth*vdelta + local hasmath=(properties.hasmath or next(mathparameters)) and true + if hasmath then + constructors.assignmathparameters(target,tfmdata) + properties.hasmath=true + target.nomath=false + target.MathConstants=target.mathparameters + else + properties.hasmath=false + target.nomath=true + target.mathparameters=nil + end + local italickey="italic" + local useitalics=true + if hasmath then + autoitalicamount=false + elseif properties.textitalics then + italickey="italic_correction" + useitalics=false + if properties.delaytextitalics then + autoitalicamount=false + end + end + if trace_defining then + report_defining("defining tfm, name %a, fullname %a, filename %a, hscale %a, vscale %a, math %a, italics %a", + name,fullname,filename,hdelta,vdelta, + hasmath and "enabled" or "disabled",useitalics and "enabled" or "disabled") + end + constructors.beforecopyingcharacters(target,tfmdata) + local sharedkerns={} + for unicode,character in next,characters do + local chr,description,index + if changed then + local c=changed[unicode] + if c then + description=descriptions[c] or descriptions[unicode] or character + character=characters[c] or character + index=description.index or c + else + description=descriptions[unicode] or character + index=description.index or unicode + end + else + description=descriptions[unicode] or character + index=description.index or unicode + end + local width=description.width + local height=description.height + local depth=description.depth + if width then width=hdelta*width else width=scaledwidth end + if height then height=vdelta*height else height=scaledheight end + if depth and depth~=0 then + depth=delta*depth + if nonames then + chr={ + index=index, + height=height, + depth=depth, + width=width, + } + else + chr={ + name=description.name, + index=index, + height=height, + depth=depth, + width=width, + } + end + else + if nonames then + chr={ + index=index, + height=height, + width=width, + } + else + chr={ + name=description.name, + index=index, + height=height, + width=width, + } + end + end + local isunicode=description.unicode + if isunicode then + chr.unicode=isunicode + chr.tounicode=tounicode(isunicode) + end + if hasquality then + local ve=character.expansion_factor + if ve then + chr.expansion_factor=ve*1000 + end + local vl=character.left_protruding + if vl then + chr.left_protruding=protrusionfactor*width*vl + end + local vr=character.right_protruding + if vr then + chr.right_protruding=protrusionfactor*width*vr + end + end + if autoitalicamount then + local vi=description.italic + if not vi then + local vi=description.boundingbox[3]-description.width+autoitalicamount + if vi>0 then + chr[italickey]=vi*hdelta + end + elseif vi~=0 then + chr[italickey]=vi*hdelta + end + elseif hasitalics then + local vi=description.italic + if vi and vi~=0 then + chr[italickey]=vi*hdelta + end + end + if hasmath then + local vn=character.next + if vn then + chr.next=vn + else + local vv=character.vert_variants + if vv then + local t={} + for i=1,#vv do + local vvi=vv[i] + t[i]={ + ["start"]=(vvi["start"] or 0)*vdelta, + ["end"]=(vvi["end"] or 0)*vdelta, + ["advance"]=(vvi["advance"] or 0)*vdelta, + ["extender"]=vvi["extender"], + ["glyph"]=vvi["glyph"], + } + end + chr.vert_variants=t + else + local hv=character.horiz_variants + if hv then + local t={} + for i=1,#hv do + local hvi=hv[i] + t[i]={ + ["start"]=(hvi["start"] or 0)*hdelta, + ["end"]=(hvi["end"] or 0)*hdelta, + ["advance"]=(hvi["advance"] or 0)*hdelta, + ["extender"]=hvi["extender"], + ["glyph"]=hvi["glyph"], + } + end + chr.horiz_variants=t + end + end + end + local va=character.top_accent + if va then + chr.top_accent=vdelta*va + end + if stackmath then + local mk=character.mathkerns + if mk then + local kerns={} + local v=mk.top_right if v then local k={} for i=1,#v do local vi=v[i] + k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern } + end kerns.top_right=k end + local v=mk.top_left if v then local k={} for i=1,#v do local vi=v[i] + k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern } + end kerns.top_left=k end + local v=mk.bottom_left if v then local k={} for i=1,#v do local vi=v[i] + k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern } + end kerns.bottom_left=k end + local v=mk.bottom_right if v then local k={} for i=1,#v do local vi=v[i] + k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern } + end kerns.bottom_right=k end + chr.mathkern=kerns + end + end + end + if haskerns then + local vk=character.kerns + if vk then + local s=sharedkerns[vk] + if not s then + s={} + for k,v in next,vk do s[k]=v*hdelta end + sharedkerns[vk]=s + end + chr.kerns=s + end + end + if hasligatures then + local vl=character.ligatures + if vl then + if true then + chr.ligatures=vl + else + local tt={} + for i,l in next,vl do + tt[i]=l + end + chr.ligatures=tt + end + end + end + if isvirtual then + local vc=character.commands + if vc then + local ok=false + for i=1,#vc do + local key=vc[i][1] + if key=="right" or key=="down" then + ok=true + break + end + end + if ok then + local tt={} + for i=1,#vc do + local ivc=vc[i] + local key=ivc[1] + if key=="right" then + tt[i]={ key,ivc[2]*hdelta } + elseif key=="down" then + tt[i]={ key,ivc[2]*vdelta } + elseif key=="rule" then + tt[i]={ key,ivc[2]*vdelta,ivc[3]*hdelta } + else + tt[i]=ivc + end + end + chr.commands=tt + else + chr.commands=vc + end + chr.index=nil + end + end + targetcharacters[unicode]=chr + end + constructors.aftercopyingcharacters(target,tfmdata) + constructors.trytosharefont(target,tfmdata) + return target +end +function constructors.finalize(tfmdata) + if tfmdata.properties and tfmdata.properties.finalized then + return + end + if not tfmdata.characters then + return nil + end + if not tfmdata.goodies then + tfmdata.goodies={} + end + local parameters=tfmdata.parameters + if not parameters then + return nil + end + if not parameters.expansion then + parameters.expansion={ + stretch=tfmdata.stretch or 0, + shrink=tfmdata.shrink or 0, + step=tfmdata.step or 0, + auto=tfmdata.auto_expand or false, + } + end + if not parameters.protrusion then + parameters.protrusion={ + auto=auto_protrude + } + end + if not parameters.size then + parameters.size=tfmdata.size + end + if not parameters.extendfactor then + parameters.extendfactor=tfmdata.extend or 0 + end + if not parameters.slantfactor then + parameters.slantfactor=tfmdata.slant or 0 + end + if not parameters.designsize then + parameters.designsize=tfmdata.designsize or (factors.pt*10) + end + if not parameters.units then + parameters.units=tfmdata.units_per_em or 1000 + end + if not tfmdata.descriptions then + local descriptions={} + setmetatableindex(descriptions,function(t,k) local v={} t[k]=v return v end) + tfmdata.descriptions=descriptions + end + local properties=tfmdata.properties + if not properties then + properties={} + tfmdata.properties=properties + end + if not properties.virtualized then + properties.virtualized=tfmdata.type=="virtual" + end + if not tfmdata.properties then + tfmdata.properties={ + fontname=tfmdata.fontname, + filename=tfmdata.filename, + fullname=tfmdata.fullname, + name=tfmdata.name, + psname=tfmdata.psname, + encodingbytes=tfmdata.encodingbytes or 1, + embedding=tfmdata.embedding or "subset", + tounicode=tfmdata.tounicode or 1, + cidinfo=tfmdata.cidinfo or nil, + format=tfmdata.format or "type1", + direction=tfmdata.direction or 0, + } + end + if not tfmdata.resources then + tfmdata.resources={} + end + if not tfmdata.shared then + tfmdata.shared={} + end + if not properties.hasmath then + properties.hasmath=not tfmdata.nomath + end + tfmdata.MathConstants=nil + tfmdata.postprocessors=nil + tfmdata.fontname=nil + tfmdata.filename=nil + tfmdata.fullname=nil + tfmdata.name=nil + tfmdata.psname=nil + tfmdata.encodingbytes=nil + tfmdata.embedding=nil + tfmdata.tounicode=nil + tfmdata.cidinfo=nil + tfmdata.format=nil + tfmdata.direction=nil + tfmdata.type=nil + tfmdata.nomath=nil + tfmdata.designsize=nil + tfmdata.size=nil + tfmdata.stretch=nil + tfmdata.shrink=nil + tfmdata.step=nil + tfmdata.auto_expand=nil + tfmdata.auto_protrude=nil + tfmdata.extend=nil + tfmdata.slant=nil + tfmdata.units_per_em=nil + tfmdata.cache=nil + properties.finalized=true + return tfmdata +end +local hashmethods={} +constructors.hashmethods=hashmethods +function constructors.hashfeatures(specification) + local features=specification.features + if features then + local t,tn={},0 + for category,list in next,features do + if next(list) then + local hasher=hashmethods[category] + if hasher then + local hash=hasher(list) + if hash then + tn=tn+1 + t[tn]=category..":"..hash + end + end + end + end + if tn>0 then + return concat(t," & ") + end + end + return "unknown" +end +hashmethods.normal=function(list) + local s={} + local n=0 + for k,v in next,list do + if not k then + elseif k=="number" or k=="features" then + else + n=n+1 + s[n]=k + end + end + if n>0 then + sort(s) + for i=1,n do + local k=s[i] + s[i]=k..'='..tostring(list[k]) + end + return concat(s,"+") + end +end +function constructors.hashinstance(specification,force) + local hash,size,fallbacks=specification.hash,specification.size,specification.fallbacks + if force or not hash then + hash=constructors.hashfeatures(specification) + specification.hash=hash + end + if size<1000 and designsizes[hash] then + size=math.round(constructors.scaled(size,designsizes[hash])) + specification.size=size + end + if fallbacks then + return hash..' @ '..tostring(size)..' @ '..fallbacks + else + return hash..' @ '..tostring(size) + end +end +function constructors.setname(tfmdata,specification) + if constructors.namemode=="specification" then + local specname=specification.specification + if specname then + tfmdata.properties.name=specname + if trace_defining then + report_otf("overloaded fontname %a",specname) + end + end + end +end +function constructors.checkedfilename(data) + local foundfilename=data.foundfilename + if not foundfilename then + local askedfilename=data.filename or "" + if askedfilename~="" then + askedfilename=resolvers.resolve(askedfilename) + foundfilename=resolvers.findbinfile(askedfilename,"") or "" + if foundfilename=="" then + report_defining("source file %a is not found",askedfilename) + foundfilename=resolvers.findbinfile(file.basename(askedfilename),"") or "" + if foundfilename~="" then + report_defining("using source file %a due to cache mismatch",foundfilename) + end + end + end + data.foundfilename=foundfilename + end + return foundfilename +end +local formats=allocate() +fonts.formats=formats +setmetatableindex(formats,function(t,k) + local l=lower(k) + if rawget(t,k) then + t[k]=l + return l + end + return rawget(t,file.suffix(l)) +end) +local locations={} +local function setindeed(mode,target,group,name,action,position) + local t=target[mode] + if not t then + report_defining("fatal error in setting feature %a, group %a, mode %a",name,group,mode) + os.exit() + elseif position then + insert(t,position,{ name=name,action=action }) + else + for i=1,#t do + local ti=t[i] + if ti.name==name then + ti.action=action + return + end + end + insert(t,{ name=name,action=action }) + end +end +local function set(group,name,target,source) + target=target[group] + if not target then + report_defining("fatal target error in setting feature %a, group %a",name,group) + os.exit() + end + local source=source[group] + if not source then + report_defining("fatal source error in setting feature %a, group %a",name,group) + os.exit() + end + local node=source.node + local base=source.base + local position=source.position + if node then + setindeed("node",target,group,name,node,position) + end + if base then + setindeed("base",target,group,name,base,position) + end +end +local function register(where,specification) + local name=specification.name + if name and name~="" then + local default=specification.default + local description=specification.description + local initializers=specification.initializers + local processors=specification.processors + local manipulators=specification.manipulators + local modechecker=specification.modechecker + if default then + where.defaults[name]=default + end + if description and description~="" then + where.descriptions[name]=description + end + if initializers then + set('initializers',name,where,specification) + end + if processors then + set('processors',name,where,specification) + end + if manipulators then + set('manipulators',name,where,specification) + end + if modechecker then + where.modechecker=modechecker + end + end +end +constructors.registerfeature=register +function constructors.getfeatureaction(what,where,mode,name) + what=handlers[what].features + if what then + where=what[where] + if where then + mode=where[mode] + if mode then + for i=1,#mode do + local m=mode[i] + if m.name==name then + return m.action + end + end + end + end + end +end +function constructors.newhandler(what) + local handler=handlers[what] + if not handler then + handler={} + handlers[what]=handler + end + return handler +end +function constructors.newfeatures(what) + local handler=handlers[what] + local features=handler.features + if not features then + local tables=handler.tables + local statistics=handler.statistics + features=allocate { + defaults={}, + descriptions=tables and tables.features or {}, + used=statistics and statistics.usedfeatures or {}, + initializers={ base={},node={} }, + processors={ base={},node={} }, + manipulators={ base={},node={} }, + } + features.register=function(specification) return register(features,specification) end + handler.features=features + end + return features +end +function constructors.checkedfeatures(what,features) + local defaults=handlers[what].features.defaults + if features and next(features) then + features=fastcopy(features) + for key,value in next,defaults do + if features[key]==nil then + features[key]=value + end + end + return features + else + return fastcopy(defaults) + end +end +function constructors.initializefeatures(what,tfmdata,features,trace,report) + if features and next(features) then + local properties=tfmdata.properties or {} + local whathandler=handlers[what] + local whatfeatures=whathandler.features + local whatinitializers=whatfeatures.initializers + local whatmodechecker=whatfeatures.modechecker + local mode=properties.mode or (whatmodechecker and whatmodechecker(tfmdata,features,features.mode)) or features.mode or "base" + properties.mode=mode + features.mode=mode + local done={} + while true do + local redo=false + local initializers=whatfeatures.initializers[mode] + if initializers then + for i=1,#initializers do + local step=initializers[i] + local feature=step.name + local value=features[feature] + if not value then + elseif done[feature] then + else + local action=step.action + if trace then + report("initializing feature %a to %a for mode %a for font %a",feature, + value,mode,tfmdata.properties.fullname) + end + action(tfmdata,value,features) + if mode~=properties.mode or mode~=features.mode then + if whatmodechecker then + properties.mode=whatmodechecker(tfmdata,features,properties.mode) + features.mode=properties.mode + end + if mode~=properties.mode then + mode=properties.mode + redo=true + end + end + done[feature]=true + end + if redo then + break + end + end + if not redo then + break + end + else + break + end + end + properties.mode=mode + return true + else + return false + end +end +function constructors.collectprocessors(what,tfmdata,features,trace,report) + local processes,nofprocesses={},0 + if features and next(features) then + local properties=tfmdata.properties + local whathandler=handlers[what] + local whatfeatures=whathandler.features + local whatprocessors=whatfeatures.processors + local mode=properties.mode + local processors=whatprocessors[mode] + if processors then + for i=1,#processors do + local step=processors[i] + local feature=step.name + if features[feature] then + local action=step.action + if trace then + report("installing feature processor %a for mode %a for font %a",feature,mode,tfmdata.properties.fullname) + end + if action then + nofprocesses=nofprocesses+1 + processes[nofprocesses]=action + end + end + end + elseif trace then + report("no feature processors for mode %a for font %a",mode,properties.fullname) + end + end + return processes +end +function constructors.applymanipulators(what,tfmdata,features,trace,report) + if features and next(features) then + local properties=tfmdata.properties + local whathandler=handlers[what] + local whatfeatures=whathandler.features + local whatmanipulators=whatfeatures.manipulators + local mode=properties.mode + local manipulators=whatmanipulators[mode] + if manipulators then + for i=1,#manipulators do + local step=manipulators[i] + local feature=step.name + local value=features[feature] + if value then + local action=step.action + if trace then + report("applying feature manipulator %a for mode %a for font %a",feature,mode,properties.fullname) + end + if action then + action(tfmdata,feature,value) + end + end + end + end + end +end +function constructors.addcoreunicodes(unicodes) + if not unicodes then + unicodes={} + end + unicodes.space=0x0020 + unicodes.hyphen=0x002D + unicodes.zwj=0x200D + unicodes.zwnj=0x200C + return unicodes +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['luatex-font-enc']={ + version=1.001, + comment="companion to luatex-*.tex", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end +local fonts=fonts +fonts.encodings={} +fonts.encodings.agl={} +fonts.encodings.known={} +setmetatable(fonts.encodings.agl,{ __index=function(t,k) + if k=="unicodes" then + texio.write(" ") + local unicodes=dofile(resolvers.findfile("font-age.lua")) + fonts.encodings.agl={ unicodes=unicodes } + return unicodes + else + return nil + end +end }) + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-cid']={ + version=1.001, + comment="companion to font-otf.lua (cidmaps)", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local format,match,lower=string.format,string.match,string.lower +local tonumber=tonumber +local P,S,R,C,V,lpegmatch=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.V,lpeg.match +local fonts,logs,trackers=fonts,logs,trackers +local trace_loading=false trackers.register("otf.loading",function(v) trace_loading=v end) +local report_otf=logs.reporter("fonts","otf loading") +local cid={} +fonts.cid=cid +local cidmap={} +local cidmax=10 +local number=C(R("09","af","AF")^1) +local space=S(" \n\r\t") +local spaces=space^0 +local period=P(".") +local periods=period*period +local name=P("/")*C((1-space)^1) +local unicodes,names={},{} +local function do_one(a,b) + unicodes[tonumber(a)]=tonumber(b,16) +end +local function do_range(a,b,c) + c=tonumber(c,16) + for i=tonumber(a),tonumber(b) do + unicodes[i]=c + c=c+1 + end +end +local function do_name(a,b) + names[tonumber(a)]=b +end +local grammar=P { "start", + start=number*spaces*number*V("series"), + series=(spaces*(V("one")+V("range")+V("named")))^1, + one=(number*spaces*number)/do_one, + range=(number*periods*number*spaces*number)/do_range, + named=(number*spaces*name)/do_name +} +local function loadcidfile(filename) + local data=io.loaddata(filename) + if data then + unicodes,names={},{} + lpegmatch(grammar,data) + local supplement,registry,ordering=match(filename,"^(.-)%-(.-)%-()%.(.-)$") + return { + supplement=supplement, + registry=registry, + ordering=ordering, + filename=filename, + unicodes=unicodes, + names=names, + } + end +end +cid.loadfile=loadcidfile +local template="%s-%s-%s.cidmap" +local function locate(registry,ordering,supplement) + local filename=format(template,registry,ordering,supplement) + local hashname=lower(filename) + local found=cidmap[hashname] + if not found then + if trace_loading then + report_otf("checking cidmap, registry %a, ordering %a, supplement %a, filename %a",registry,ordering,supplement,filename) + end + local fullname=resolvers.findfile(filename,'cid') or "" + if fullname~="" then + found=loadcidfile(fullname) + if found then + if trace_loading then + report_otf("using cidmap file %a",filename) + end + cidmap[hashname]=found + found.usedname=file.basename(filename) + end + end + end + return found +end +function cid.getmap(specification) + if not specification then + report_otf("invalid cidinfo specification, table expected") + return + end + local registry=specification.registry + local ordering=specification.ordering + local supplement=specification.supplement + local filename=format(registry,ordering,supplement) + local lowername=lower(filename) + local found=cidmap[lowername] + if found then + return found + end + if ordering=="Identity" then + local found={ + supplement=supplement, + registry=registry, + ordering=ordering, + filename=filename, + unicodes={}, + names={}, + } + cidmap[lowername]=found + return found + end + if trace_loading then + report_otf("cidmap needed, registry %a, ordering %a, supplement %a",registry,ordering,supplement) + end + found=locate(registry,ordering,supplement) + if not found then + local supnum=tonumber(supplement) + local cidnum=nil + if supnum0 then + for s=supnum-1,0,-1 do + local c=locate(registry,ordering,s) + if c then + found,cidnum=c,s + break + end + end + end + registry=lower(registry) + ordering=lower(ordering) + if found and cidnum>0 then + for s=0,cidnum-1 do + local filename=format(template,registry,ordering,s) + if not cidmap[filename] then + cidmap[filename]=found + end + end + end + end + return found +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-map']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local tonumber,next,type=tonumber,next,type +local match,format,find,concat,gsub,lower=string.match,string.format,string.find,table.concat,string.gsub,string.lower +local P,R,S,C,Ct,Cc,lpegmatch=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cc,lpeg.match +local utfbyte=utf.byte +local floor=math.floor +local formatters=string.formatters +local trace_loading=false trackers.register("fonts.loading",function(v) trace_loading=v end) +local trace_mapping=false trackers.register("fonts.mapping",function(v) trace_unimapping=v end) +local report_fonts=logs.reporter("fonts","loading") +local fonts=fonts or {} +local mappings=fonts.mappings or {} +fonts.mappings=mappings +local function loadlumtable(filename) + local lumname=file.replacesuffix(file.basename(filename),"lum") + local lumfile=resolvers.findfile(lumname,"map") or "" + if lumfile~="" and lfs.isfile(lumfile) then + if trace_loading or trace_mapping then + report_fonts("loading map table %a",lumfile) + end + lumunic=dofile(lumfile) + return lumunic,lumfile + end +end +local hex=R("AF","09") +local hexfour=(hex*hex*hex*hex)/function(s) return tonumber(s,16) end +local hexsix=(hex*hex*hex*hex*hex*hex)/function(s) return tonumber(s,16) end +local dec=(R("09")^1)/tonumber +local period=P(".") +local unicode=P("uni")*(hexfour*(period+P(-1))*Cc(false)+Ct(hexfour^1)*Cc(true)) +local ucode=P("u")*(hexsix*(period+P(-1))*Cc(false)+Ct(hexsix^1)*Cc(true)) +local index=P("index")*dec*Cc(false) +local parser=unicode+ucode+index +local parsers={} +local function makenameparser(str) + if not str or str=="" then + return parser + else + local p=parsers[str] + if not p then + p=P(str)*period*dec*Cc(false) + parsers[str]=p + end + return p + end +end +local f_single=formatters["%04X"] +local f_double=formatters["%04X%04X"] +local function tounicode16(unicode,name) + if unicode<0x10000 then + return f_single(unicode) + elseif unicode<0x1FFFFFFFFF then + return f_double(floor(unicode/1024),unicode%1024+0xDC00) + else + report_fonts("can't convert %a in %a into tounicode",unicode,name) + end +end +local function tounicode16sequence(unicodes,name) + local t={} + for l=1,#unicodes do + local u=unicodes[l] + if u<0x10000 then + t[l]=f_single(u) + elseif unicode<0x1FFFFFFFFF then + t[l]=f_double(floor(u/1024),u%1024+0xDC00) + else + report_fonts ("can't convert %a in %a into tounicode",u,name) + return + end + end + return concat(t) +end +local function tounicode(unicode,name) + if type(unicode)=="table" then + local t={} + for l=1,#unicode do + local u=unicode[l] + if u<0x10000 then + t[l]=f_single(u) + elseif u<0x1FFFFFFFFF then + t[l]=f_double(floor(u/1024),u%1024+0xDC00) + else + report_fonts ("can't convert %a in %a into tounicode",u,name) + return + end + end + return concat(t) + else + if unicode<0x10000 then + return f_single(unicode) + elseif unicode<0x1FFFFFFFFF then + return f_double(floor(unicode/1024),unicode%1024+0xDC00) + else + report_fonts("can't convert %a in %a into tounicode",unicode,name) + end + end +end +local function fromunicode16(str) + if #str==4 then + return tonumber(str,16) + else + local l,r=match(str,"(....)(....)") + return (tonumber(l,16))*0x400+tonumber(r,16)-0xDC00 + end +end +mappings.loadlumtable=loadlumtable +mappings.makenameparser=makenameparser +mappings.tounicode=tounicode +mappings.tounicode16=tounicode16 +mappings.tounicode16sequence=tounicode16sequence +mappings.fromunicode16=fromunicode16 +local ligseparator=P("_") +local varseparator=P(".") +local namesplitter=Ct(C((1-ligseparator-varseparator)^1)*(ligseparator*C((1-ligseparator-varseparator)^1))^0) +local overloads={ + IJ={ name="I_J",unicode={ 0x49,0x4A },mess=0x0132 }, + ij={ name="i_j",unicode={ 0x69,0x6A },mess=0x0133 }, + ff={ name="f_f",unicode={ 0x66,0x66 },mess=0xFB00 }, + fi={ name="f_i",unicode={ 0x66,0x69 },mess=0xFB01 }, + fl={ name="f_l",unicode={ 0x66,0x6C },mess=0xFB02 }, + ffi={ name="f_f_i",unicode={ 0x66,0x66,0x69 },mess=0xFB03 }, + ffl={ name="f_f_l",unicode={ 0x66,0x66,0x6C },mess=0xFB04 }, + fj={ name="f_j",unicode={ 0x66,0x6A } }, + fk={ name="f_k",unicode={ 0x66,0x6B } }, +} +for k,v in next,overloads do + local name=v.name + local mess=v.mess + if name then + overloads[name]=v + end + if mess then + overloads[mess]=v + end +end +mappings.overloads=overloads +function mappings.addtounicode(data,filename) + local resources=data.resources + local properties=data.properties + local descriptions=data.descriptions + local unicodes=resources.unicodes + local lookuptypes=resources.lookuptypes + if not unicodes then + return + end + unicodes['space']=unicodes['space'] or 32 + unicodes['hyphen']=unicodes['hyphen'] or 45 + unicodes['zwj']=unicodes['zwj'] or 0x200D + unicodes['zwnj']=unicodes['zwnj'] or 0x200C + local private=fonts.constructors.privateoffset + local unicodevector=fonts.encodings.agl.unicodes + local missing={} + local lumunic,uparser,oparser + local cidinfo,cidnames,cidcodes,usedmap + cidinfo=properties.cidinfo + usedmap=cidinfo and fonts.cid.getmap(cidinfo) + if usedmap then + oparser=usedmap and makenameparser(cidinfo.ordering) + cidnames=usedmap.names + cidcodes=usedmap.unicodes + end + uparser=makenameparser() + local ns,nl=0,0 + for unic,glyph in next,descriptions do + local index=glyph.index + local name=glyph.name + local r=overloads[name] + if r then + glyph.unicode=r.unicode + elseif unic==-1 or unic>=private or (unic>=0xE000 and unic<=0xF8FF) or unic==0xFFFE or unic==0xFFFF then + local unicode=lumunic and lumunic[name] or unicodevector[name] + if unicode then + glyph.unicode=unicode + ns=ns+1 + end + if (not unicode) and usedmap then + local foundindex=lpegmatch(oparser,name) + if foundindex then + unicode=cidcodes[foundindex] + if unicode then + glyph.unicode=unicode + ns=ns+1 + else + local reference=cidnames[foundindex] + if reference then + local foundindex=lpegmatch(oparser,reference) + if foundindex then + unicode=cidcodes[foundindex] + if unicode then + glyph.unicode=unicode + ns=ns+1 + end + end + if not unicode or unicode=="" then + local foundcodes,multiple=lpegmatch(uparser,reference) + if foundcodes then + glyph.unicode=foundcodes + if multiple then + nl=nl+1 + unicode=true + else + ns=ns+1 + unicode=foundcodes + end + end + end + end + end + end + end + if not unicode or unicode=="" then + local split=lpegmatch(namesplitter,name) + local nsplit=split and #split or 0 + local t,n={},0 + unicode=true + for l=1,nsplit do + local base=split[l] + local u=unicodes[base] or unicodevector[base] + if not u then + break + elseif type(u)=="table" then + if u[1]>=private then + unicode=false + break + end + n=n+1 + t[n]=u[1] + else + if u>=private then + unicode=false + break + end + n=n+1 + t[n]=u + end + end + if n==0 then + elseif n==1 then + glyph.unicode=t[1] + else + glyph.unicode=t + end + nl=nl+1 + end + if not unicode or unicode=="" then + local foundcodes,multiple=lpegmatch(uparser,name) + if foundcodes then + glyph.unicode=foundcodes + if multiple then + nl=nl+1 + unicode=true + else + ns=ns+1 + unicode=foundcodes + end + end + end + local r=overloads[unicode] + if r then + unicode=r.unicode + glyph.unicode=unicode + end + if not unicode then + missing[name]=true + end + end + end + if next(missing) then + local guess={} + local function check(gname,code,unicode) + local description=descriptions[code] + local variant=description.name + if variant==gname then + return + end + local unic=unicodes[variant] + if unic==-1 or unic>=private or (unic>=0xE000 and unic<=0xF8FF) or unic==0xFFFE or unic==0xFFFF then + else + return + end + if descriptions[code].unicode then + return + end + local g=guess[variant] + if g then + g[gname]=unicode + else + guess[variant]={ [gname]=unicode } + end + end + for unicode,description in next,descriptions do + local slookups=description.slookups + if slookups then + local gname=description.name + for tag,data in next,slookups do + local lookuptype=lookuptypes[tag] + if lookuptype=="alternate" then + for i=1,#data do + check(gname,data[i],unicode) + end + elseif lookuptype=="substitution" then + check(gname,data,unicode) + end + end + end + local mlookups=description.mlookups + if mlookups then + local gname=description.name + for tag,list in next,mlookups do + local lookuptype=lookuptypes[tag] + if lookuptype=="alternate" then + for i=1,#list do + local data=list[i] + for i=1,#data do + check(gname,data[i],unicode) + end + end + elseif lookuptype=="substitution" then + for i=1,#list do + check(gname,list[i],unicode) + end + end + end + end + end + local done=true + while done do + done=false + for k,v in next,guess do + if type(v)~="number" then + for kk,vv in next,v do + if vv==-1 or vv>=private or (vv>=0xE000 and vv<=0xF8FF) or vv==0xFFFE or vv==0xFFFF then + local uu=guess[kk] + if type(uu)=="number" then + guess[k]=uu + done=true + end + else + guess[k]=vv + done=true + end + end + end + end + end + local orphans=0 + local guessed=0 + for k,v in next,guess do + if type(v)=="number" then + descriptions[unicodes[k]].unicode=descriptions[v].unicode or v + guessed=guessed+1 + else + local t=nil + local l=lower(k) + local u=unicodes[l] + if not u then + orphans=orphans+1 + elseif u==-1 or u>=private or (u>=0xE000 and u<=0xF8FF) or u==0xFFFE or u==0xFFFF then + local unicode=descriptions[u].unicode + if unicode then + descriptions[unicodes[k]].unicode=unicode + guessed=guessed+1 + else + orphans=orphans+1 + end + else + orphans=orphans+1 + end + end + end + if trace_loading and orphans>0 or guessed>0 then + report_fonts("%s glyphs with no related unicode, %s guessed, %s orphans",guessed+orphans,guessed,orphans) + end + end + if trace_mapping then + for unic,glyph in table.sortedhash(descriptions) do + local name=glyph.name + local index=glyph.index + local unicode=glyph.unicode + if unicode then + if type(unicode)=="table" then + local unicodes={} + for i=1,#unicode do + unicodes[i]=formatters("%U",unicode[i]) + end + report_fonts("internal slot %U, name %a, unicode %U, tounicode % t",index,name,unic,unicodes) + else + report_fonts("internal slot %U, name %a, unicode %U, tounicode %U",index,name,unic,unicode) + end + else + report_fonts("internal slot %U, name %a, unicode %U",index,name,unic) + end + end + end + if trace_loading and (ns>0 or nl>0) then + report_fonts("%s tounicode entries added, ligatures %s",nl+ns,ns) + end +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['luatex-fonts-syn']={ + version=1.001, + comment="companion to luatex-*.tex", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end +local fonts=fonts +fonts.names=fonts.names or {} +fonts.names.version=1.001 +fonts.names.basename="luatex-fonts-names" +fonts.names.new_to_old={} +fonts.names.old_to_new={} +fonts.names.cache=containers.define("fonts","data",fonts.names.version,true) +local data,loaded=nil,false +local fileformats={ "lua","tex","other text files" } +function fonts.names.reportmissingbase() + texio.write("") + fonts.names.reportmissingbase=nil +end +function fonts.names.reportmissingname() + texio.write("") + fonts.names.reportmissingname=nil +end +function fonts.names.resolve(name,sub) + if not loaded then + local basename=fonts.names.basename + if basename and basename~="" then + data=containers.read(fonts.names.cache,basename) + if not data then + basename=file.addsuffix(basename,"lua") + for i=1,#fileformats do + local format=fileformats[i] + local foundname=resolvers.findfile(basename,format) or "" + if foundname~="" then + data=dofile(foundname) + texio.write("") + break + end + end + end + end + loaded=true + end + if type(data)=="table" and data.version==fonts.names.version then + local condensed=string.gsub(string.lower(name),"[^%a%d]","") + local found=data.mappings and data.mappings[condensed] + if found then + local fontname,filename,subfont=found[1],found[2],found[3] + if subfont then + return filename,fontname + else + return filename,false + end + elseif fonts.names.reportmissingname then + fonts.names.reportmissingname() + return name,false + end + elseif fonts.names.reportmissingbase then + fonts.names.reportmissingbase() + end +end +fonts.names.resolvespec=fonts.names.resolve +function fonts.names.getfilename(askedname,suffix) + return "" +end +function fonts.names.ignoredfile(filename) + return false +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-tfm']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local next=next +local match=string.match +local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end) +local trace_features=false trackers.register("tfm.features",function(v) trace_features=v end) +local report_defining=logs.reporter("fonts","defining") +local report_tfm=logs.reporter("fonts","tfm loading") +local findbinfile=resolvers.findbinfile +local fonts=fonts +local handlers=fonts.handlers +local readers=fonts.readers +local constructors=fonts.constructors +local encodings=fonts.encodings +local tfm=constructors.newhandler("tfm") +local tfmfeatures=constructors.newfeatures("tfm") +local registertfmfeature=tfmfeatures.register +constructors.resolvevirtualtoo=false +fonts.formats.tfm="type1" +function tfm.setfeatures(tfmdata,features) + local okay=constructors.initializefeatures("tfm",tfmdata,features,trace_features,report_tfm) + if okay then + return constructors.collectprocessors("tfm",tfmdata,features,trace_features,report_tfm) + else + return {} + end +end +local function read_from_tfm(specification) + local filename=specification.filename + local size=specification.size + if trace_defining then + report_defining("loading tfm file %a at size %s",filename,size) + end + local tfmdata=font.read_tfm(filename,size) + if tfmdata then + local features=specification.features and specification.features.normal or {} + local resources=tfmdata.resources or {} + local properties=tfmdata.properties or {} + local parameters=tfmdata.parameters or {} + local shared=tfmdata.shared or {} + properties.name=tfmdata.name + properties.fontname=tfmdata.fontname + properties.psname=tfmdata.psname + properties.filename=specification.filename + properties.format=fonts.formats.tfm + parameters.size=size + shared.rawdata={} + shared.features=features + shared.processes=next(features) and tfm.setfeatures(tfmdata,features) or nil + tfmdata.properties=properties + tfmdata.resources=resources + tfmdata.parameters=parameters + tfmdata.shared=shared + parameters.slant=parameters.slant or parameters[1] or 0 + parameters.space=parameters.space or parameters[2] or 0 + parameters.space_stretch=parameters.space_stretch or parameters[3] or 0 + parameters.space_shrink=parameters.space_shrink or parameters[4] or 0 + parameters.x_height=parameters.x_height or parameters[5] or 0 + parameters.quad=parameters.quad or parameters[6] or 0 + parameters.extra_space=parameters.extra_space or parameters[7] or 0 + constructors.enhanceparameters(parameters) + if constructors.resolvevirtualtoo then + fonts.loggers.register(tfmdata,file.suffix(filename),specification) + local vfname=findbinfile(specification.name,'ovf') + if vfname and vfname~="" then + local vfdata=font.read_vf(vfname,size) + if vfdata then + local chars=tfmdata.characters + for k,v in next,vfdata.characters do + chars[k].commands=v.commands + end + properties.virtualized=true + tfmdata.fonts=vfdata.fonts + end + end + end + local allfeatures=tfmdata.shared.features or specification.features.normal + constructors.applymanipulators("tfm",tfmdata,allfeatures.normal,trace_features,report_tfm) + if not features.encoding then + local encoding,filename=match(properties.filename,"^(.-)%-(.*)$") + if filename and encoding and encodings.known and encodings.known[encoding] then + features.encoding=encoding + end + end + properties.haskerns=true + properties.haslogatures=true + resources.unicodes={} + resources.lookuptags={} + return tfmdata + end +end +local function check_tfm(specification,fullname) + local foundname=findbinfile(fullname,'tfm') or "" + if foundname=="" then + foundname=findbinfile(fullname,'ofm') or "" + end + if foundname=="" then + foundname=fonts.names.getfilename(fullname,"tfm") or "" + end + if foundname~="" then + specification.filename=foundname + specification.format="ofm" + return read_from_tfm(specification) + elseif trace_defining then + report_defining("loading tfm with name %a fails",specification.name) + end +end +readers.check_tfm=check_tfm +function readers.tfm(specification) + local fullname=specification.filename or "" + if fullname=="" then + local forced=specification.forced or "" + if forced~="" then + fullname=specification.name.."."..forced + else + fullname=specification.name + end + end + return check_tfm(specification,fullname) +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-afm']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local fonts,logs,trackers,containers,resolvers=fonts,logs,trackers,containers,resolvers +local next,type,tonumber=next,type,tonumber +local format,match,gmatch,lower,gsub,strip=string.format,string.match,string.gmatch,string.lower,string.gsub,string.strip +local abs=math.abs +local P,S,C,R,lpegmatch,patterns=lpeg.P,lpeg.S,lpeg.C,lpeg.R,lpeg.match,lpeg.patterns +local derivetable=table.derive +local trace_features=false trackers.register("afm.features",function(v) trace_features=v end) +local trace_indexing=false trackers.register("afm.indexing",function(v) trace_indexing=v end) +local trace_loading=false trackers.register("afm.loading",function(v) trace_loading=v end) +local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end) +local report_afm=logs.reporter("fonts","afm loading") +local setmetatableindex=table.setmetatableindex +local findbinfile=resolvers.findbinfile +local definers=fonts.definers +local readers=fonts.readers +local constructors=fonts.constructors +local afm=constructors.newhandler("afm") +local pfb=constructors.newhandler("pfb") +local afmfeatures=constructors.newfeatures("afm") +local registerafmfeature=afmfeatures.register +afm.version=1.500 +afm.cache=containers.define("fonts","afm",afm.version,true) +afm.autoprefixed=true +afm.helpdata={} +afm.syncspace=true +afm.addligatures=true +afm.addtexligatures=true +afm.addkerns=true +local overloads=fonts.mappings.overloads +local applyruntimefixes=fonts.treatments and fonts.treatments.applyfixes +local function setmode(tfmdata,value) + if value then + tfmdata.properties.mode=lower(value) + end +end +registerafmfeature { + name="mode", + description="mode", + initializers={ + base=setmode, + node=setmode, + } +} +local comment=P("Comment") +local spacing=patterns.spacer +local lineend=patterns.newline +local words=C((1-lineend)^1) +local number=C((R("09")+S("."))^1)/tonumber*spacing^0 +local data=lpeg.Carg(1) +local pattern=( + comment*spacing*( + data*( + ("CODINGSCHEME"*spacing*words )/function(fd,a) end+("DESIGNSIZE"*spacing*number*words )/function(fd,a) fd[ 1]=a end+("CHECKSUM"*spacing*number*words )/function(fd,a) fd[ 2]=a end+("SPACE"*spacing*number*"plus"*number*"minus"*number)/function(fd,a,b,c) fd[ 3],fd[ 4],fd[ 5]=a,b,c end+("QUAD"*spacing*number )/function(fd,a) fd[ 6]=a end+("EXTRASPACE"*spacing*number )/function(fd,a) fd[ 7]=a end+("NUM"*spacing*number*number*number )/function(fd,a,b,c) fd[ 8],fd[ 9],fd[10]=a,b,c end+("DENOM"*spacing*number*number )/function(fd,a,b ) fd[11],fd[12]=a,b end+("SUP"*spacing*number*number*number )/function(fd,a,b,c) fd[13],fd[14],fd[15]=a,b,c end+("SUB"*spacing*number*number )/function(fd,a,b) fd[16],fd[17]=a,b end+("SUPDROP"*spacing*number )/function(fd,a) fd[18]=a end+("SUBDROP"*spacing*number )/function(fd,a) fd[19]=a end+("DELIM"*spacing*number*number )/function(fd,a,b) fd[20],fd[21]=a,b end+("AXISHEIGHT"*spacing*number )/function(fd,a) fd[22]=a end + )+(1-lineend)^0 + )+(1-comment)^1 +)^0 +local function scan_comment(str) + local fd={} + lpegmatch(pattern,str,1,fd) + return fd +end +local keys={} +function keys.FontName (data,line) data.metadata.fontname=strip (line) + data.metadata.fullname=strip (line) end +function keys.ItalicAngle (data,line) data.metadata.italicangle=tonumber (line) end +function keys.IsFixedPitch(data,line) data.metadata.isfixedpitch=toboolean(line,true) end +function keys.CharWidth (data,line) data.metadata.charwidth=tonumber (line) end +function keys.XHeight (data,line) data.metadata.xheight=tonumber (line) end +function keys.Descender (data,line) data.metadata.descender=tonumber (line) end +function keys.Ascender (data,line) data.metadata.ascender=tonumber (line) end +function keys.Comment (data,line) + line=lower(line) + local designsize=match(line,"designsize[^%d]*(%d+)") + if designsize then data.metadata.designsize=tonumber(designsize) end +end +local function get_charmetrics(data,charmetrics,vector) + local characters=data.characters + local chr,ind={},0 + for k,v in gmatch(charmetrics,"([%a]+) +(.-) *;") do + if k=='C' then + v=tonumber(v) + if v<0 then + ind=ind+1 + else + ind=v + end + chr={ + index=ind + } + elseif k=='WX' then + chr.width=tonumber(v) + elseif k=='N' then + characters[v]=chr + elseif k=='B' then + local llx,lly,urx,ury=match(v,"^ *(.-) +(.-) +(.-) +(.-)$") + chr.boundingbox={ tonumber(llx),tonumber(lly),tonumber(urx),tonumber(ury) } + elseif k=='L' then + local plus,becomes=match(v,"^(.-) +(.-)$") + local ligatures=chr.ligatures + if ligatures then + ligatures[plus]=becomes + else + chr.ligatures={ [plus]=becomes } + end + end + end +end +local function get_kernpairs(data,kernpairs) + local characters=data.characters + for one,two,value in gmatch(kernpairs,"KPX +(.-) +(.-) +(.-)\n") do + local chr=characters[one] + if chr then + local kerns=chr.kerns + if kerns then + kerns[two]=tonumber(value) + else + chr.kerns={ [two]=tonumber(value) } + end + end + end +end +local function get_variables(data,fontmetrics) + for key,rest in gmatch(fontmetrics,"(%a+) *(.-)[\n\r]") do + local keyhandler=keys[key] + if keyhandler then + keyhandler(data,rest) + end + end +end +local function get_indexes(data,pfbname) + data.resources.filename=resolvers.unresolve(pfbname) + local pfbblob=fontloader.open(pfbname) + if pfbblob then + local characters=data.characters + local pfbdata=fontloader.to_table(pfbblob) + if pfbdata then + local glyphs=pfbdata.glyphs + if glyphs then + if trace_loading then + report_afm("getting index data from %a",pfbname) + end + for index,glyph in next,glyphs do + local name=glyph.name + if name then + local char=characters[name] + if char then + if trace_indexing then + report_afm("glyph %a has index %a",name,index) + end + char.index=index + end + end + end + elseif trace_loading then + report_afm("no glyph data in pfb file %a",pfbname) + end + elseif trace_loading then + report_afm("no data in pfb file %a",pfbname) + end + fontloader.close(pfbblob) + elseif trace_loading then + report_afm("invalid pfb file %a",pfbname) + end +end +local function readafm(filename) + local ok,afmblob,size=resolvers.loadbinfile(filename) + if ok and afmblob then + local data={ + resources={ + filename=resolvers.unresolve(filename), + version=afm.version, + creator="context mkiv", + }, + properties={ + hasitalics=false, + }, + goodies={}, + metadata={ + filename=file.removesuffix(file.basename(filename)) + }, + characters={ + }, + descriptions={ + }, + } + afmblob=gsub(afmblob,"StartCharMetrics(.-)EndCharMetrics",function(charmetrics) + if trace_loading then + report_afm("loading char metrics") + end + get_charmetrics(data,charmetrics,vector) + return "" + end) + afmblob=gsub(afmblob,"StartKernPairs(.-)EndKernPairs",function(kernpairs) + if trace_loading then + report_afm("loading kern pairs") + end + get_kernpairs(data,kernpairs) + return "" + end) + afmblob=gsub(afmblob,"StartFontMetrics%s+([%d%.]+)(.-)EndFontMetrics",function(version,fontmetrics) + if trace_loading then + report_afm("loading variables") + end + data.afmversion=version + get_variables(data,fontmetrics) + data.fontdimens=scan_comment(fontmetrics) + return "" + end) + return data + else + if trace_loading then + report_afm("no valid afm file %a",filename) + end + return nil + end +end +local addkerns,addligatures,addtexligatures,unify,normalize,fixnames +function afm.load(filename) + filename=resolvers.findfile(filename,'afm') or "" + if filename~="" and not fonts.names.ignoredfile(filename) then + local name=file.removesuffix(file.basename(filename)) + local data=containers.read(afm.cache,name) + local attr=lfs.attributes(filename) + local size,time=attr.size or 0,attr.modification or 0 + local pfbfile=file.replacesuffix(name,"pfb") + local pfbname=resolvers.findfile(pfbfile,"pfb") or "" + if pfbname=="" then + pfbname=resolvers.findfile(file.basename(pfbfile),"pfb") or "" + end + local pfbsize,pfbtime=0,0 + if pfbname~="" then + local attr=lfs.attributes(pfbname) + pfbsize=attr.size or 0 + pfbtime=attr.modification or 0 + end + if not data or data.size~=size or data.time~=time or data.pfbsize~=pfbsize or data.pfbtime~=pfbtime then + report_afm("reading %a",filename) + data=readafm(filename) + if data then + if pfbname~="" then + get_indexes(data,pfbname) + elseif trace_loading then + report_afm("no pfb file for %a",filename) + end + report_afm("unifying %a",filename) + unify(data,filename) + if afm.addligatures then + report_afm("add ligatures") + addligatures(data) + end + if afm.addtexligatures then + report_afm("add tex ligatures") + addtexligatures(data) + end + if afm.addkerns then + report_afm("add extra kerns") + addkerns(data) + end + normalize(data) + fixnames(data) + report_afm("add tounicode data") + fonts.mappings.addtounicode(data,filename) + data.size=size + data.time=time + data.pfbsize=pfbsize + data.pfbtime=pfbtime + report_afm("saving %a in cache",name) + data.resources.unicodes=nil + data=containers.write(afm.cache,name,data) + data=containers.read(afm.cache,name) + end + if applyruntimefixes and data then + applyruntimefixes(filename,data) + end + end + return data + else + return nil + end +end +local uparser=fonts.mappings.makenameparser() +unify=function(data,filename) + local unicodevector=fonts.encodings.agl.unicodes + local unicodes,names={},{} + local private=constructors.privateoffset + local descriptions=data.descriptions + for name,blob in next,data.characters do + local code=unicodevector[name] + if not code then + code=lpegmatch(uparser,name) + if not code then + code=private + private=private+1 + report_afm("assigning private slot %U for unknown glyph name %a",code,name) + end + end + local index=blob.index + unicodes[name]=code + names[name]=index + blob.name=name + descriptions[code]={ + boundingbox=blob.boundingbox, + width=blob.width, + kerns=blob.kerns, + index=index, + name=name, + } + end + for unicode,description in next,descriptions do + local kerns=description.kerns + if kerns then + local krn={} + for name,kern in next,kerns do + local unicode=unicodes[name] + if unicode then + krn[unicode]=kern + else + end + end + description.kerns=krn + end + end + data.characters=nil + local resources=data.resources + local filename=resources.filename or file.removesuffix(file.basename(filename)) + resources.filename=resolvers.unresolve(filename) + resources.unicodes=unicodes + resources.marks={} + resources.private=private +end +normalize=function(data) +end +fixnames=function(data) + for k,v in next,data.descriptions do + local n=v.name + local r=overloads[n] + if r then + local name=r.name + if trace_indexing then + report_afm("renaming characters %a to %a",n,name) + end + v.name=name + v.unicode=r.unicode + end + end +end +local addthem=function(rawdata,ligatures) + if ligatures then + local descriptions=rawdata.descriptions + local resources=rawdata.resources + local unicodes=resources.unicodes + for ligname,ligdata in next,ligatures do + local one=descriptions[unicodes[ligname]] + if one then + for _,pair in next,ligdata do + local two,three=unicodes[pair[1]],unicodes[pair[2]] + if two and three then + local ol=one.ligatures + if ol then + if not ol[two] then + ol[two]=three + end + else + one.ligatures={ [two]=three } + end + end + end + end + end + end +end +addligatures=function(rawdata) addthem(rawdata,afm.helpdata.ligatures ) end +addtexligatures=function(rawdata) addthem(rawdata,afm.helpdata.texligatures) end +addkerns=function(rawdata) + local descriptions=rawdata.descriptions + local resources=rawdata.resources + local unicodes=resources.unicodes + local function do_it_left(what) + if what then + for unicode,description in next,descriptions do + local kerns=description.kerns + if kerns then + local extrakerns + for complex,simple in next,what do + complex=unicodes[complex] + simple=unicodes[simple] + if complex and simple then + local ks=kerns[simple] + if ks and not kerns[complex] then + if extrakerns then + extrakerns[complex]=ks + else + extrakerns={ [complex]=ks } + end + end + end + end + if extrakerns then + description.extrakerns=extrakerns + end + end + end + end + end + local function do_it_copy(what) + if what then + for complex,simple in next,what do + complex=unicodes[complex] + simple=unicodes[simple] + if complex and simple then + local complexdescription=descriptions[complex] + if complexdescription then + local simpledescription=descriptions[complex] + if simpledescription then + local extrakerns + local kerns=simpledescription.kerns + if kerns then + for unicode,kern in next,kerns do + if extrakerns then + extrakerns[unicode]=kern + else + extrakerns={ [unicode]=kern } + end + end + end + local extrakerns=simpledescription.extrakerns + if extrakerns then + for unicode,kern in next,extrakerns do + if extrakerns then + extrakerns[unicode]=kern + else + extrakerns={ [unicode]=kern } + end + end + end + if extrakerns then + complexdescription.extrakerns=extrakerns + end + end + end + end + end + end + end + do_it_left(afm.helpdata.leftkerned) + do_it_left(afm.helpdata.bothkerned) + do_it_copy(afm.helpdata.bothkerned) + do_it_copy(afm.helpdata.rightkerned) +end +local function adddimensions(data) + if data then + for unicode,description in next,data.descriptions do + local bb=description.boundingbox + if bb then + local ht,dp=bb[4],-bb[2] + if ht==0 or ht<0 then + else + description.height=ht + end + if dp==0 or dp<0 then + else + description.depth=dp + end + end + end + end +end +local function copytotfm(data) + if data and data.descriptions then + local metadata=data.metadata + local resources=data.resources + local properties=derivetable(data.properties) + local descriptions=derivetable(data.descriptions) + local goodies=derivetable(data.goodies) + local characters={} + local parameters={} + local unicodes=resources.unicodes + for unicode,description in next,data.descriptions do + characters[unicode]={} + end + local filename=constructors.checkedfilename(resources) + local fontname=metadata.fontname or metadata.fullname + local fullname=metadata.fullname or metadata.fontname + local endash=0x0020 + local emdash=0x2014 + local spacer="space" + local spaceunits=500 + local monospaced=metadata.isfixedpitch + local charwidth=metadata.charwidth + local italicangle=metadata.italicangle + local charxheight=metadata.xheight and metadata.xheight>0 and metadata.xheight + properties.monospaced=monospaced + parameters.italicangle=italicangle + parameters.charwidth=charwidth + parameters.charxheight=charxheight + if properties.monospaced then + if descriptions[endash] then + spaceunits,spacer=descriptions[endash].width,"space" + end + if not spaceunits and descriptions[emdash] then + spaceunits,spacer=descriptions[emdash].width,"emdash" + end + if not spaceunits and charwidth then + spaceunits,spacer=charwidth,"charwidth" + end + else + if descriptions[endash] then + spaceunits,spacer=descriptions[endash].width,"space" + end + if not spaceunits and charwidth then + spaceunits,spacer=charwidth,"charwidth" + end + end + spaceunits=tonumber(spaceunits) + if spaceunits<200 then + end + parameters.slant=0 + parameters.space=spaceunits + parameters.space_stretch=500 + parameters.space_shrink=333 + parameters.x_height=400 + parameters.quad=1000 + if italicangle and italicangle~=0 then + parameters.italicangle=italicangle + parameters.italicfactor=math.cos(math.rad(90+italicangle)) + parameters.slant=- math.tan(italicangle*math.pi/180) + end + if monospaced then + parameters.space_stretch=0 + parameters.space_shrink=0 + elseif afm.syncspace then + parameters.space_stretch=spaceunits/2 + parameters.space_shrink=spaceunits/3 + end + parameters.extra_space=parameters.space_shrink + if charxheight then + parameters.x_height=charxheight + else + local x=0x0078 + if x then + local x=descriptions[x] + if x then + parameters.x_height=x.height + end + end + end + local fd=data.fontdimens + if fd and fd[8] and fd[9] and fd[10] then + for k,v in next,fd do + parameters[k]=v + end + end + parameters.designsize=(metadata.designsize or 10)*65536 + parameters.ascender=abs(metadata.ascender or 0) + parameters.descender=abs(metadata.descender or 0) + parameters.units=1000 + properties.spacer=spacer + properties.encodingbytes=2 + properties.format=fonts.formats[filename] or "type1" + properties.filename=filename + properties.fontname=fontname + properties.fullname=fullname + properties.psname=fullname + properties.name=filename or fullname or fontname + if next(characters) then + return { + characters=characters, + descriptions=descriptions, + parameters=parameters, + resources=resources, + properties=properties, + goodies=goodies, + } + end + end + return nil +end +function afm.setfeatures(tfmdata,features) + local okay=constructors.initializefeatures("afm",tfmdata,features,trace_features,report_afm) + if okay then + return constructors.collectprocessors("afm",tfmdata,features,trace_features,report_afm) + else + return {} + end +end +local function addtables(data) + local resources=data.resources + local lookuptags=resources.lookuptags + local unicodes=resources.unicodes + if not lookuptags then + lookuptags={} + resources.lookuptags=lookuptags + end + setmetatableindex(lookuptags,function(t,k) + local v=type(k)=="number" and ("lookup "..k) or k + t[k]=v + return v + end) + if not unicodes then + unicodes={} + resources.unicodes=unicodes + setmetatableindex(unicodes,function(t,k) + setmetatableindex(unicodes,nil) + for u,d in next,data.descriptions do + local n=d.name + if n then + t[n]=u + end + end + return rawget(t,k) + end) + end + constructors.addcoreunicodes(unicodes) +end +local function afmtotfm(specification) + local afmname=specification.filename or specification.name + if specification.forced=="afm" or specification.format=="afm" then + if trace_loading then + report_afm("forcing afm format for %a",afmname) + end + else + local tfmname=findbinfile(afmname,"ofm") or "" + if tfmname~="" then + if trace_loading then + report_afm("fallback from afm to tfm for %a",afmname) + end + return + end + end + if afmname~="" then + local features=constructors.checkedfeatures("afm",specification.features.normal) + specification.features.normal=features + constructors.hashinstance(specification,true) + specification=definers.resolve(specification) + local cache_id=specification.hash + local tfmdata=containers.read(constructors.cache,cache_id) + if not tfmdata then + local rawdata=afm.load(afmname) + if rawdata and next(rawdata) then + addtables(rawdata) + adddimensions(rawdata) + tfmdata=copytotfm(rawdata) + if tfmdata and next(tfmdata) then + local shared=tfmdata.shared + if not shared then + shared={} + tfmdata.shared=shared + end + shared.rawdata=rawdata + shared.features=features + shared.processes=afm.setfeatures(tfmdata,features) + end + elseif trace_loading then + report_afm("no (valid) afm file found with name %a",afmname) + end + tfmdata=containers.write(constructors.cache,cache_id,tfmdata) + end + return tfmdata + end +end +local function read_from_afm(specification) + local tfmdata=afmtotfm(specification) + if tfmdata then + tfmdata.properties.name=specification.name + tfmdata=constructors.scale(tfmdata,specification) + local allfeatures=tfmdata.shared.features or specification.features.normal + constructors.applymanipulators("afm",tfmdata,allfeatures,trace_features,report_afm) + fonts.loggers.register(tfmdata,'afm',specification) + end + return tfmdata +end +local function prepareligatures(tfmdata,ligatures,value) + if value then + local descriptions=tfmdata.descriptions + local hasligatures=false + for unicode,character in next,tfmdata.characters do + local description=descriptions[unicode] + local dligatures=description.ligatures + if dligatures then + local cligatures=character.ligatures + if not cligatures then + cligatures={} + character.ligatures=cligatures + end + for unicode,ligature in next,dligatures do + cligatures[unicode]={ + char=ligature, + type=0 + } + end + hasligatures=true + end + end + tfmdata.properties.hasligatures=hasligatures + end +end +local function preparekerns(tfmdata,kerns,value) + if value then + local rawdata=tfmdata.shared.rawdata + local resources=rawdata.resources + local unicodes=resources.unicodes + local descriptions=tfmdata.descriptions + local haskerns=false + for u,chr in next,tfmdata.characters do + local d=descriptions[u] + local newkerns=d[kerns] + if newkerns then + local kerns=chr.kerns + if not kerns then + kerns={} + chr.kerns=kerns + end + for k,v in next,newkerns do + local uk=unicodes[k] + if uk then + kerns[uk]=v + end + end + haskerns=true + end + end + tfmdata.properties.haskerns=haskerns + end +end +local list={ + [0x0027]=0x2019, +} +local function texreplacements(tfmdata,value) + local descriptions=tfmdata.descriptions + local characters=tfmdata.characters + for k,v in next,list do + characters [k]=characters [v] + descriptions[k]=descriptions[v] + end +end +local function ligatures (tfmdata,value) prepareligatures(tfmdata,'ligatures',value) end +local function texligatures(tfmdata,value) prepareligatures(tfmdata,'texligatures',value) end +local function kerns (tfmdata,value) preparekerns (tfmdata,'kerns',value) end +local function extrakerns (tfmdata,value) preparekerns (tfmdata,'extrakerns',value) end +registerafmfeature { + name="liga", + description="traditional ligatures", + initializers={ + base=ligatures, + node=ligatures, + } +} +registerafmfeature { + name="kern", + description="intercharacter kerning", + initializers={ + base=kerns, + node=kerns, + } +} +registerafmfeature { + name="extrakerns", + description="additional intercharacter kerning", + initializers={ + base=extrakerns, + node=extrakerns, + } +} +registerafmfeature { + name='tlig', + description='tex ligatures', + initializers={ + base=texligatures, + node=texligatures, + } +} +registerafmfeature { + name='trep', + description='tex replacements', + initializers={ + base=texreplacements, + node=texreplacements, + } +} +local check_tfm=readers.check_tfm +fonts.formats.afm="type1" +fonts.formats.pfb="type1" +local function check_afm(specification,fullname) + local foundname=findbinfile(fullname,'afm') or "" + if foundname=="" then + foundname=fonts.names.getfilename(fullname,"afm") or "" + end + if foundname=="" and afm.autoprefixed then + local encoding,shortname=match(fullname,"^(.-)%-(.*)$") + if encoding and shortname and fonts.encodings.known[encoding] then + shortname=findbinfile(shortname,'afm') or "" + if shortname~="" then + foundname=shortname + if trace_defining then + report_afm("stripping encoding prefix from filename %a",afmname) + end + end + end + end + if foundname~="" then + specification.filename=foundname + specification.format="afm" + return read_from_afm(specification) + end +end +function readers.afm(specification,method) + local fullname,tfmdata=specification.filename or "",nil + if fullname=="" then + local forced=specification.forced or "" + if forced~="" then + tfmdata=check_afm(specification,specification.name.."."..forced) + end + if not tfmdata then + method=method or definers.method or "afm or tfm" + if method=="tfm" then + tfmdata=check_tfm(specification,specification.name) + elseif method=="afm" then + tfmdata=check_afm(specification,specification.name) + elseif method=="tfm or afm" then + tfmdata=check_tfm(specification,specification.name) or check_afm(specification,specification.name) + else + tfmdata=check_afm(specification,specification.name) or check_tfm(specification,specification.name) + end + end + else + tfmdata=check_afm(specification,fullname) + end + return tfmdata +end +function readers.pfb(specification,method) + local original=specification.specification + if trace_defining then + report_afm("using afm reader for %a",original) + end + specification.specification=gsub(original,"%.pfb",".afm") + specification.forced="afm" + return readers.afm(specification,method) +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-afk']={ + version=1.001, + comment="companion to font-afm.lua", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files", + dataonly=true, +} +local allocate=utilities.storage.allocate +fonts.handlers.afm.helpdata={ + ligatures=allocate { + ['f']={ + { 'f','ff' }, + { 'i','fi' }, + { 'l','fl' }, + }, + ['ff']={ + { 'i','ffi' } + }, + ['fi']={ + { 'i','fii' } + }, + ['fl']={ + { 'i','fli' } + }, + ['s']={ + { 't','st' } + }, + ['i']={ + { 'j','ij' } + }, + }, + texligatures=allocate { + ['quoteleft']={ + { 'quoteleft','quotedblleft' } + }, + ['quoteright']={ + { 'quoteright','quotedblright' } + }, + ['hyphen']={ + { 'hyphen','endash' } + }, + ['endash']={ + { 'hyphen','emdash' } + } + }, + leftkerned=allocate { + AEligature="A",aeligature="a", + OEligature="O",oeligature="o", + IJligature="I",ijligature="i", + AE="A",ae="a", + OE="O",oe="o", + IJ="I",ij="i", + Ssharp="S",ssharp="s", + }, + rightkerned=allocate { + AEligature="E",aeligature="e", + OEligature="E",oeligature="e", + IJligature="J",ijligature="j", + AE="E",ae="e", + OE="E",oe="e", + IJ="J",ij="j", + Ssharp="S",ssharp="s", + }, + bothkerned=allocate { + Acircumflex="A",acircumflex="a", + Ccircumflex="C",ccircumflex="c", + Ecircumflex="E",ecircumflex="e", + Gcircumflex="G",gcircumflex="g", + Hcircumflex="H",hcircumflex="h", + Icircumflex="I",icircumflex="i", + Jcircumflex="J",jcircumflex="j", + Ocircumflex="O",ocircumflex="o", + Scircumflex="S",scircumflex="s", + Ucircumflex="U",ucircumflex="u", + Wcircumflex="W",wcircumflex="w", + Ycircumflex="Y",ycircumflex="y", + Agrave="A",agrave="a", + Egrave="E",egrave="e", + Igrave="I",igrave="i", + Ograve="O",ograve="o", + Ugrave="U",ugrave="u", + Ygrave="Y",ygrave="y", + Atilde="A",atilde="a", + Itilde="I",itilde="i", + Otilde="O",otilde="o", + Utilde="U",utilde="u", + Ntilde="N",ntilde="n", + Adiaeresis="A",adiaeresis="a",Adieresis="A",adieresis="a", + Ediaeresis="E",ediaeresis="e",Edieresis="E",edieresis="e", + Idiaeresis="I",idiaeresis="i",Idieresis="I",idieresis="i", + Odiaeresis="O",odiaeresis="o",Odieresis="O",odieresis="o", + Udiaeresis="U",udiaeresis="u",Udieresis="U",udieresis="u", + Ydiaeresis="Y",ydiaeresis="y",Ydieresis="Y",ydieresis="y", + Aacute="A",aacute="a", + Cacute="C",cacute="c", + Eacute="E",eacute="e", + Iacute="I",iacute="i", + Lacute="L",lacute="l", + Nacute="N",nacute="n", + Oacute="O",oacute="o", + Racute="R",racute="r", + Sacute="S",sacute="s", + Uacute="U",uacute="u", + Yacute="Y",yacute="y", + Zacute="Z",zacute="z", + Dstroke="D",dstroke="d", + Hstroke="H",hstroke="h", + Tstroke="T",tstroke="t", + Cdotaccent="C",cdotaccent="c", + Edotaccent="E",edotaccent="e", + Gdotaccent="G",gdotaccent="g", + Idotaccent="I",idotaccent="i", + Zdotaccent="Z",zdotaccent="z", + Amacron="A",amacron="a", + Emacron="E",emacron="e", + Imacron="I",imacron="i", + Omacron="O",omacron="o", + Umacron="U",umacron="u", + Ccedilla="C",ccedilla="c", + Kcedilla="K",kcedilla="k", + Lcedilla="L",lcedilla="l", + Ncedilla="N",ncedilla="n", + Rcedilla="R",rcedilla="r", + Scedilla="S",scedilla="s", + Tcedilla="T",tcedilla="t", + Ohungarumlaut="O",ohungarumlaut="o", + Uhungarumlaut="U",uhungarumlaut="u", + Aogonek="A",aogonek="a", + Eogonek="E",eogonek="e", + Iogonek="I",iogonek="i", + Uogonek="U",uogonek="u", + Aring="A",aring="a", + Uring="U",uring="u", + Abreve="A",abreve="a", + Ebreve="E",ebreve="e", + Gbreve="G",gbreve="g", + Ibreve="I",ibreve="i", + Obreve="O",obreve="o", + Ubreve="U",ubreve="u", + Ccaron="C",ccaron="c", + Dcaron="D",dcaron="d", + Ecaron="E",ecaron="e", + Lcaron="L",lcaron="l", + Ncaron="N",ncaron="n", + Rcaron="R",rcaron="r", + Scaron="S",scaron="s", + Tcaron="T",tcaron="t", + Zcaron="Z",zcaron="z", + dotlessI="I",dotlessi="i", + dotlessJ="J",dotlessj="j", + AEligature="AE",aeligature="ae",AE="AE",ae="ae", + OEligature="OE",oeligature="oe",OE="OE",oe="oe", + IJligature="IJ",ijligature="ij",IJ="IJ",ij="ij", + Lstroke="L",lstroke="l",Lslash="L",lslash="l", + Ostroke="O",ostroke="o",Oslash="O",oslash="o", + Ssharp="SS",ssharp="ss", + Aumlaut="A",aumlaut="a", + Eumlaut="E",eumlaut="e", + Iumlaut="I",iumlaut="i", + Oumlaut="O",oumlaut="o", + Uumlaut="U",uumlaut="u", + } +} + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['luatex-fonts-tfm']={ + version=1.001, + comment="companion to luatex-*.tex", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end +local fonts=fonts +local tfm={} +fonts.handlers.tfm=tfm +fonts.formats.tfm="type1" +function fonts.readers.tfm(specification) + local fullname=specification.filename or "" + if fullname=="" then + local forced=specification.forced or "" + if forced~="" then + fullname=specification.name.."."..forced + else + fullname=specification.name + end + end + local foundname=resolvers.findbinfile(fullname,'tfm') or "" + if foundname=="" then + foundname=resolvers.findbinfile(fullname,'ofm') or "" + end + if foundname~="" then + specification.filename=foundname + specification.format="ofm" + return font.read_tfm(specification.filename,specification.size) + end +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-oti']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local lower=string.lower +local fonts=fonts +local constructors=fonts.constructors +local otf=constructors.newhandler("otf") +local otffeatures=constructors.newfeatures("otf") +local otftables=otf.tables +local registerotffeature=otffeatures.register +local allocate=utilities.storage.allocate +registerotffeature { + name="features", + description="initialization of feature handler", + default=true, +} +local function setmode(tfmdata,value) + if value then + tfmdata.properties.mode=lower(value) + end +end +local function setlanguage(tfmdata,value) + if value then + local cleanvalue=lower(value) + local languages=otftables and otftables.languages + local properties=tfmdata.properties + if not languages then + properties.language=cleanvalue + elseif languages[value] then + properties.language=cleanvalue + else + properties.language="dflt" + end + end +end +local function setscript(tfmdata,value) + if value then + local cleanvalue=lower(value) + local scripts=otftables and otftables.scripts + local properties=tfmdata.properties + if not scripts then + properties.script=cleanvalue + elseif scripts[value] then + properties.script=cleanvalue + else + properties.script="dflt" + end + end +end +registerotffeature { + name="mode", + description="mode", + initializers={ + base=setmode, + node=setmode, + } +} +registerotffeature { + name="language", + description="language", + initializers={ + base=setlanguage, + node=setlanguage, + } +} +registerotffeature { + name="script", + description="script", + initializers={ + base=setscript, + node=setscript, + } +} + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-otf']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local utfbyte=utf.byte +local format,gmatch,gsub,find,match,lower,strip=string.format,string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip +local type,next,tonumber,tostring=type,next,tonumber,tostring +local abs=math.abs +local insert=table.insert +local lpegmatch=lpeg.match +local reversed,concat,remove,sortedkeys=table.reversed,table.concat,table.remove,table.sortedkeys +local ioflush=io.flush +local fastcopy,tohash,derivetable=table.fastcopy,table.tohash,table.derive +local formatters=string.formatters +local P,R,S,C,Ct,lpegmatch=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Ct,lpeg.match +local setmetatableindex=table.setmetatableindex +local allocate=utilities.storage.allocate +local registertracker=trackers.register +local registerdirective=directives.register +local starttiming=statistics.starttiming +local stoptiming=statistics.stoptiming +local elapsedtime=statistics.elapsedtime +local findbinfile=resolvers.findbinfile +local trace_private=false registertracker("otf.private",function(v) trace_private=v end) +local trace_loading=false registertracker("otf.loading",function(v) trace_loading=v end) +local trace_features=false registertracker("otf.features",function(v) trace_features=v end) +local trace_dynamics=false registertracker("otf.dynamics",function(v) trace_dynamics=v end) +local trace_sequences=false registertracker("otf.sequences",function(v) trace_sequences=v end) +local trace_markwidth=false registertracker("otf.markwidth",function(v) trace_markwidth=v end) +local trace_defining=false registertracker("fonts.defining",function(v) trace_defining=v end) +local compact_lookups=true registertracker("otf.compactlookups",function(v) compact_lookups=v end) +local purge_names=true registertracker("otf.purgenames",function(v) purge_names=v end) +local report_otf=logs.reporter("fonts","otf loading") +local fonts=fonts +local otf=fonts.handlers.otf +otf.glists={ "gsub","gpos" } +otf.version=2.802 +otf.cache=containers.define("fonts","otf",otf.version,true) +local fontdata=fonts.hashes.identifiers +local chardata=characters and characters.data +local definers=fonts.definers +local readers=fonts.readers +local constructors=fonts.constructors +local otffeatures=constructors.newfeatures("otf") +local registerotffeature=otffeatures.register +local enhancers=allocate() +otf.enhancers=enhancers +local patches={} +enhancers.patches=patches +local forceload=false +local cleanup=0 +local packdata=true +local syncspace=true +local forcenotdef=false +local includesubfonts=false +local overloadkerns=false +local applyruntimefixes=fonts.treatments and fonts.treatments.applyfixes +local wildcard="*" +local default="dflt" +local fontloaderfields=fontloader.fields +local mainfields=nil +local glyphfields=nil +local formats=fonts.formats +formats.otf="opentype" +formats.ttf="truetype" +formats.ttc="truetype" +formats.dfont="truetype" +registerdirective("fonts.otf.loader.cleanup",function(v) cleanup=tonumber(v) or (v and 1) or 0 end) +registerdirective("fonts.otf.loader.force",function(v) forceload=v end) +registerdirective("fonts.otf.loader.pack",function(v) packdata=v end) +registerdirective("fonts.otf.loader.syncspace",function(v) syncspace=v end) +registerdirective("fonts.otf.loader.forcenotdef",function(v) forcenotdef=v end) +registerdirective("fonts.otf.loader.overloadkerns",function(v) overloadkerns=v end) +function otf.fileformat(filename) + local leader=lower(io.loadchunk(filename,4)) + local suffix=lower(file.suffix(filename)) + if leader=="otto" then + return formats.otf,suffix=="otf" + elseif leader=="ttcf" then + return formats.ttc,suffix=="ttc" + elseif suffix=="ttc" then + return formats.ttc,true + elseif suffix=="dfont" then + return formats.dfont,true + else + return formats.ttf,suffix=="ttf" + end +end +local function otf_format(filename) + local format,okay=otf.fileformat(filename) + if not okay then + report_otf("font %a is actually an %a file",filename,format) + end + return format +end +local function load_featurefile(raw,featurefile) + if featurefile and featurefile~="" then + if trace_loading then + report_otf("using featurefile %a",featurefile) + end + fontloader.apply_featurefile(raw,featurefile) + end +end +local function showfeatureorder(rawdata,filename) + local sequences=rawdata.resources.sequences + if sequences and #sequences>0 then + if trace_loading then + report_otf("font %a has %s sequences",filename,#sequences) + report_otf(" ") + end + for nos=1,#sequences do + local sequence=sequences[nos] + local typ=sequence.type or "no-type" + local name=sequence.name or "no-name" + local subtables=sequence.subtables or { "no-subtables" } + local features=sequence.features + if trace_loading then + report_otf("%3i %-15s %-20s [% t]",nos,name,typ,subtables) + end + if features then + for feature,scripts in next,features do + local tt={} + if type(scripts)=="table" then + for script,languages in next,scripts do + local ttt={} + for language,_ in next,languages do + ttt[#ttt+1]=language + end + tt[#tt+1]=formatters["[%s: % t]"](script,ttt) + end + if trace_loading then + report_otf(" %s: % t",feature,tt) + end + else + if trace_loading then + report_otf(" %s: %S",feature,scripts) + end + end + end + end + end + if trace_loading then + report_otf("\n") + end + elseif trace_loading then + report_otf("font %a has no sequences",filename) + end +end +local valid_fields=table.tohash { + "ascent", + "cidinfo", + "copyright", + "descent", + "design_range_bottom", + "design_range_top", + "design_size", + "encodingchanged", + "extrema_bound", + "familyname", + "fontname", + "fontstyle_id", + "fontstyle_name", + "fullname", + "hasvmetrics", + "horiz_base", + "issans", + "isserif", + "italicangle", + "macstyle", + "onlybitmaps", + "origname", + "os2_version", + "pfminfo", + "serifcheck", + "sfd_version", + "strokedfont", + "strokewidth", + "table_version", + "ttf_tables", + "uni_interp", + "uniqueid", + "units_per_em", + "upos", + "use_typo_metrics", + "uwidth", + "validation_state", + "version", + "vert_base", + "weight", + "weight_width_slope_only", +} +local ordered_enhancers={ + "prepare tables", + "prepare glyphs", + "prepare lookups", + "analyze glyphs", + "analyze math", + "reorganize lookups", + "reorganize mark classes", + "reorganize anchor classes", + "reorganize glyph kerns", + "reorganize glyph lookups", + "reorganize glyph anchors", + "merge kern classes", + "reorganize features", + "reorganize subtables", + "check glyphs", + "check metadata", + "check extra features", + "prepare tounicode", + "check encoding", + "add duplicates", + "cleanup tables", + "compact lookups", + "purge names", +} +local actions=allocate() +local before=allocate() +local after=allocate() +patches.before=before +patches.after=after +local function enhance(name,data,filename,raw) + local enhancer=actions[name] + if enhancer then + if trace_loading then + report_otf("apply enhancement %a to file %a",name,filename) + ioflush() + end + enhancer(data,filename,raw) + else + end +end +function enhancers.apply(data,filename,raw) + local basename=file.basename(lower(filename)) + if trace_loading then + report_otf("%s enhancing file %a","start",filename) + end + ioflush() + for e=1,#ordered_enhancers do + local enhancer=ordered_enhancers[e] + local b=before[enhancer] + if b then + for pattern,action in next,b do + if find(basename,pattern) then + action(data,filename,raw) + end + end + end + enhance(enhancer,data,filename,raw) + local a=after[enhancer] + if a then + for pattern,action in next,a do + if find(basename,pattern) then + action(data,filename,raw) + end + end + end + ioflush() + end + if trace_loading then + report_otf("%s enhancing file %a","stop",filename) + end + ioflush() +end +function patches.register(what,where,pattern,action) + local pw=patches[what] + if pw then + local ww=pw[where] + if ww then + ww[pattern]=action + else + pw[where]={ [pattern]=action} + end + end +end +function patches.report(fmt,...) + if trace_loading then + report_otf("patching: %s",formatters[fmt](...)) + end +end +function enhancers.register(what,action) + actions[what]=action +end +function otf.load(filename,sub,featurefile) + local base=file.basename(file.removesuffix(filename)) + local name=file.removesuffix(base) + local attr=lfs.attributes(filename) + local size=attr and attr.size or 0 + local time=attr and attr.modification or 0 + if featurefile then + name=name.."@"..file.removesuffix(file.basename(featurefile)) + end + if sub=="" then + sub=false + end + local hash=name + if sub then + hash=hash.."-"..sub + end + hash=containers.cleanname(hash) + local featurefiles + if featurefile then + featurefiles={} + for s in gmatch(featurefile,"[^,]+") do + local name=resolvers.findfile(file.addsuffix(s,'fea'),'fea') or "" + if name=="" then + report_otf("loading error, no featurefile %a",s) + else + local attr=lfs.attributes(name) + featurefiles[#featurefiles+1]={ + name=name, + size=attr and attr.size or 0, + time=attr and attr.modification or 0, + } + end + end + if #featurefiles==0 then + featurefiles=nil + end + end + local data=containers.read(otf.cache,hash) + local reload=not data or data.size~=size or data.time~=time + if forceload then + report_otf("forced reload of %a due to hard coded flag",filename) + reload=true + end + if not reload then + local featuredata=data.featuredata + if featurefiles then + if not featuredata or #featuredata~=#featurefiles then + reload=true + else + for i=1,#featurefiles do + local fi,fd=featurefiles[i],featuredata[i] + if fi.name~=fd.name or fi.size~=fd.size or fi.time~=fd.time then + reload=true + break + end + end + end + elseif featuredata then + reload=true + end + if reload then + report_otf("loading: forced reload due to changed featurefile specification %a",featurefile) + end + end + if reload then + report_otf("loading %a, hash %a",filename,hash) + local fontdata,messages + if sub then + fontdata,messages=fontloader.open(filename,sub) + else + fontdata,messages=fontloader.open(filename) + end + if fontdata then + mainfields=mainfields or (fontloaderfields and fontloaderfields(fontdata)) + end + if trace_loading and messages and #messages>0 then + if type(messages)=="string" then + report_otf("warning: %s",messages) + else + for m=1,#messages do + report_otf("warning: %S",messages[m]) + end + end + else + report_otf("loading done") + end + if fontdata then + if featurefiles then + for i=1,#featurefiles do + load_featurefile(fontdata,featurefiles[i].name) + end + end + local unicodes={ + } + local splitter=lpeg.splitter(" ",unicodes) + data={ + size=size, + time=time, + format=otf_format(filename), + featuredata=featurefiles, + resources={ + filename=resolvers.unresolve(filename), + version=otf.version, + creator="context mkiv", + unicodes=unicodes, + indices={ + }, + duplicates={ + }, + variants={ + }, + lookuptypes={}, + }, + warnings={}, + metadata={ + }, + properties={ + }, + descriptions={}, + goodies={}, + helpers={ + tounicodelist=splitter, + tounicodetable=Ct(splitter), + }, + } + starttiming(data) + report_otf("file size: %s",size) + enhancers.apply(data,filename,fontdata) + local packtime={} + if packdata then + if cleanup>0 then + collectgarbage("collect") + end + starttiming(packtime) + enhance("pack",data,filename,nil) + stoptiming(packtime) + end + report_otf("saving %a in cache",filename) + data=containers.write(otf.cache,hash,data) + if cleanup>1 then + collectgarbage("collect") + end + stoptiming(data) + if elapsedtime then + report_otf("preprocessing and caching time %s, packtime %s", + elapsedtime(data),packdata and elapsedtime(packtime) or 0) + end + fontloader.close(fontdata) + if cleanup>3 then + collectgarbage("collect") + end + data=containers.read(otf.cache,hash) + if cleanup>2 then + collectgarbage("collect") + end + else + data=nil + report_otf("loading failed due to read error") + end + end + if data then + if trace_defining then + report_otf("loading from cache using hash %a",hash) + end + enhance("unpack",data,filename,nil,false) + local resources=data.resources + local lookuptags=resources.lookuptags + local unicodes=resources.unicodes + if not lookuptags then + lookuptags={} + resources.lookuptags=lookuptags + end + setmetatableindex(lookuptags,function(t,k) + local v=type(k)=="number" and ("lookup "..k) or k + t[k]=v + return v + end) + if not unicodes then + unicodes={} + resources.unicodes=unicodes + setmetatableindex(unicodes,function(t,k) + setmetatableindex(unicodes,nil) + for u,d in next,data.descriptions do + local n=d.name + if n then + t[n]=u + else + end + end + return rawget(t,k) + end) + end + constructors.addcoreunicodes(unicodes) + if applyruntimefixes then + applyruntimefixes(filename,data) + end + enhance("add dimensions",data,filename,nil,false) + if trace_sequences then + showfeatureorder(data,filename) + end + end + return data +end +local mt={ + __index=function(t,k) + if k=="height" then + local ht=t.boundingbox[4] + return ht<0 and 0 or ht + elseif k=="depth" then + local dp=-t.boundingbox[2] + return dp<0 and 0 or dp + elseif k=="width" then + return 0 + elseif k=="name" then + return forcenotdef and ".notdef" + end + end +} +actions["prepare tables"]=function(data,filename,raw) + data.properties.hasitalics=false +end +actions["add dimensions"]=function(data,filename) + if data then + local descriptions=data.descriptions + local resources=data.resources + local defaultwidth=resources.defaultwidth or 0 + local defaultheight=resources.defaultheight or 0 + local defaultdepth=resources.defaultdepth or 0 + local basename=trace_markwidth and file.basename(filename) + for _,d in next,descriptions do + local bb,wd=d.boundingbox,d.width + if not wd then + d.width=defaultwidth + elseif trace_markwidth and wd~=0 and d.class=="mark" then + report_otf("mark %a with width %b found in %a",d.name or "",wd,basename) + end + if bb then + local ht,dp=bb[4],-bb[2] + if ht==0 or ht<0 then + else + d.height=ht + end + if dp==0 or dp<0 then + else + d.depth=dp + end + end + end + end +end +local function somecopy(old) + if old then + local new={} + if type(old)=="table" then + for k,v in next,old do + if k=="glyphs" then + elseif type(v)=="table" then + new[k]=somecopy(v) + else + new[k]=v + end + end + else + for i=1,#mainfields do + local k=mainfields[i] + local v=old[k] + if k=="glyphs" then + elseif type(v)=="table" then + new[k]=somecopy(v) + else + new[k]=v + end + end + end + return new + else + return {} + end +end +actions["prepare glyphs"]=function(data,filename,raw) + local rawglyphs=raw.glyphs + local rawsubfonts=raw.subfonts + local rawcidinfo=raw.cidinfo + local criterium=constructors.privateoffset + local private=criterium + local resources=data.resources + local metadata=data.metadata + local properties=data.properties + local descriptions=data.descriptions + local unicodes=resources.unicodes + local indices=resources.indices + local duplicates=resources.duplicates + local variants=resources.variants + if rawsubfonts then + metadata.subfonts=includesubfonts and {} + properties.cidinfo=rawcidinfo + if rawcidinfo.registry then + local cidmap=fonts.cid.getmap(rawcidinfo) + if cidmap then + rawcidinfo.usedname=cidmap.usedname + local nofnames,nofunicodes=0,0 + local cidunicodes,cidnames=cidmap.unicodes,cidmap.names + for cidindex=1,#rawsubfonts do + local subfont=rawsubfonts[cidindex] + local cidglyphs=subfont.glyphs + if includesubfonts then + metadata.subfonts[cidindex]=somecopy(subfont) + end + for index=0,subfont.glyphcnt-1 do + local glyph=cidglyphs[index] + if glyph then + local unicode=glyph.unicode + if unicode>=0x00E000 and unicode<=0x00F8FF then + unicode=-1 + elseif unicode>=0x0F0000 and unicode<=0x0FFFFD then + unicode=-1 + elseif unicode>=0x100000 and unicode<=0x10FFFD then + unicode=-1 + end + local name=glyph.name or cidnames[index] + if not unicode or unicode==-1 then + unicode=cidunicodes[index] + end + if unicode and descriptions[unicode] then + if trace_private then + report_otf("preventing glyph %a at index %H to overload unicode %U",name or "noname",index,unicode) + end + unicode=-1 + end + if not unicode or unicode==-1 then + if not name then + name=format("u%06X.ctx",private) + end + unicode=private + unicodes[name]=private + if trace_private then + report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private) + end + private=private+1 + nofnames=nofnames+1 + else + if not name then + name=format("u%06X.ctx",unicode) + end + unicodes[name]=unicode + nofunicodes=nofunicodes+1 + end + indices[index]=unicode + local description={ + boundingbox=glyph.boundingbox, + name=glyph.name or name or "unknown", + cidindex=cidindex, + index=index, + glyph=glyph, + } + descriptions[unicode]=description + else + end + end + end + if trace_loading then + report_otf("cid font remapped, %s unicode points, %s symbolic names, %s glyphs",nofunicodes,nofnames,nofunicodes+nofnames) + end + elseif trace_loading then + report_otf("unable to remap cid font, missing cid file for %a",filename) + end + elseif trace_loading then + report_otf("font %a has no glyphs",filename) + end + else + for index=0,raw.glyphcnt-1 do + local glyph=rawglyphs[index] + if glyph then + local unicode=glyph.unicode + local name=glyph.name + if not unicode or unicode==-1 then + unicode=private + unicodes[name]=private + if trace_private then + report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private) + end + private=private+1 + else + if unicode>criterium then + local taken=descriptions[unicode] + if taken then + if unicode>=private then + private=unicode+1 + else + private=private+1 + end + descriptions[private]=taken + unicodes[taken.name]=private + indices[taken.index]=private + if trace_private then + report_otf("slot %U is moved to %U due to private in font",unicode) + end + else + if unicode>=private then + private=unicode+1 + end + end + end + unicodes[name]=unicode + end + indices[index]=unicode + descriptions[unicode]={ + boundingbox=glyph.boundingbox, + name=name, + index=index, + glyph=glyph, + } + local altuni=glyph.altuni + if altuni then + for i=1,#altuni do + local a=altuni[i] + local u=a.unicode + local v=a.variant + if v then + local vv=variants[v] + if vv then + vv[u]=unicode + else + vv={ [u]=unicode } + variants[v]=vv + end + end + end + end + else + report_otf("potential problem: glyph %U is used but empty",index) + end + end + end + resources.private=private +end +actions["check encoding"]=function(data,filename,raw) + local descriptions=data.descriptions + local resources=data.resources + local properties=data.properties + local unicodes=resources.unicodes + local indices=resources.indices + local duplicates=resources.duplicates + local mapdata=raw.map or {} + local unicodetoindex=mapdata and mapdata.map or {} + local indextounicode=mapdata and mapdata.backmap or {} + local encname=lower(data.enc_name or mapdata.enc_name or "") + local criterium=0xFFFF + local privateoffset=constructors.privateoffset + if find(encname,"unicode") then + if trace_loading then + report_otf("checking embedded unicode map %a",encname) + end + local reported={} + for maybeunicode,index in next,unicodetoindex do + if descriptions[maybeunicode] then + else + local unicode=indices[index] + if not unicode then + elseif maybeunicode==unicode then + elseif unicode>privateoffset then + else + local d=descriptions[unicode] + if d then + local c=d.copies + if c then + c[maybeunicode]=true + else + d.copies={ [maybeunicode]=true } + end + elseif index and not reported[index] then + report_otf("missing index %i",index) + reported[index]=true + end + end + end + end + for unicode,data in next,descriptions do + local d=data.copies + if d then + duplicates[unicode]=sortedkeys(d) + data.copies=nil + end + end + elseif properties.cidinfo then + report_otf("warning: no unicode map, used cidmap %a",properties.cidinfo.usedname) + else + report_otf("warning: non unicode map %a, only using glyph unicode data",encname or "whatever") + end + if mapdata then + mapdata.map={} + mapdata.backmap={} + end +end +actions["add duplicates"]=function(data,filename,raw) + local descriptions=data.descriptions + local resources=data.resources + local properties=data.properties + local unicodes=resources.unicodes + local indices=resources.indices + local duplicates=resources.duplicates + for unicode,d in next,duplicates do + local nofduplicates=#d + if nofduplicates>4 then + if trace_loading then + report_otf("ignoring excessive duplicates of %U (n=%s)",unicode,nofduplicates) + end + else + for i=1,nofduplicates do + local u=d[i] + if not descriptions[u] then + local description=descriptions[unicode] + local n=0 + for _,description in next,descriptions do + local kerns=description.kerns + if kerns then + for _,k in next,kerns do + local ku=k[unicode] + if ku then + k[u]=ku + n=n+1 + end + end + end + end + if u>0 then + local duplicate=table.copy(description) + duplicate.comment=format("copy of U+%05X",unicode) + descriptions[u]=duplicate + if trace_loading then + report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n) + end + end + end + end + end + end +end +actions["analyze glyphs"]=function(data,filename,raw) + local descriptions=data.descriptions + local resources=data.resources + local metadata=data.metadata + local properties=data.properties + local hasitalics=false + local widths={} + local marks={} + for unicode,description in next,descriptions do + local glyph=description.glyph + local italic=glyph.italic_correction + if not italic then + elseif italic==0 then + else + description.italic=italic + hasitalics=true + end + local width=glyph.width + widths[width]=(widths[width] or 0)+1 + local class=glyph.class + if class then + if class=="mark" then + marks[unicode]=true + end + description.class=class + end + end + properties.hasitalics=hasitalics + resources.marks=marks + local wd,most=0,1 + for k,v in next,widths do + if v>most then + wd,most=k,v + end + end + if most>1000 then + if trace_loading then + report_otf("most common width: %s (%s times), sharing (cjk font)",wd,most) + end + for unicode,description in next,descriptions do + if description.width==wd then + else + description.width=description.glyph.width + end + end + resources.defaultwidth=wd + else + for unicode,description in next,descriptions do + description.width=description.glyph.width + end + end +end +actions["reorganize mark classes"]=function(data,filename,raw) + local mark_classes=raw.mark_classes + if mark_classes then + local resources=data.resources + local unicodes=resources.unicodes + local markclasses={} + resources.markclasses=markclasses + for name,class in next,mark_classes do + local t={} + for s in gmatch(class,"[^ ]+") do + t[unicodes[s]]=true + end + markclasses[name]=t + end + end +end +actions["reorganize features"]=function(data,filename,raw) + local features={} + data.resources.features=features + for k,what in next,otf.glists do + local dw=raw[what] + if dw then + local f={} + features[what]=f + for i=1,#dw do + local d=dw[i] + local dfeatures=d.features + if dfeatures then + for i=1,#dfeatures do + local df=dfeatures[i] + local tag=strip(lower(df.tag)) + local ft=f[tag] + if not ft then + ft={} + f[tag]=ft + end + local dscripts=df.scripts + for i=1,#dscripts do + local d=dscripts[i] + local languages=d.langs + local script=strip(lower(d.script)) + local fts=ft[script] if not fts then fts={} ft[script]=fts end + for i=1,#languages do + fts[strip(lower(languages[i]))]=true + end + end + end + end + end + end + end +end +actions["reorganize anchor classes"]=function(data,filename,raw) + local resources=data.resources + local anchor_to_lookup={} + local lookup_to_anchor={} + resources.anchor_to_lookup=anchor_to_lookup + resources.lookup_to_anchor=lookup_to_anchor + local classes=raw.anchor_classes + if classes then + for c=1,#classes do + local class=classes[c] + local anchor=class.name + local lookups=class.lookup + if type(lookups)~="table" then + lookups={ lookups } + end + local a=anchor_to_lookup[anchor] + if not a then + a={} + anchor_to_lookup[anchor]=a + end + for l=1,#lookups do + local lookup=lookups[l] + local l=lookup_to_anchor[lookup] + if l then + l[anchor]=true + else + l={ [anchor]=true } + lookup_to_anchor[lookup]=l + end + a[lookup]=true + end + end + end +end +actions["prepare tounicode"]=function(data,filename,raw) + fonts.mappings.addtounicode(data,filename) +end +local g_directions={ + gsub_contextchain=1, + gpos_contextchain=1, + gsub_reversecontextchain=-1, + gpos_reversecontextchain=-1, +} +actions["reorganize subtables"]=function(data,filename,raw) + local resources=data.resources + local sequences={} + local lookups={} + local chainedfeatures={} + resources.sequences=sequences + resources.lookups=lookups + for _,what in next,otf.glists do + local dw=raw[what] + if dw then + for k=1,#dw do + local gk=dw[k] + local features=gk.features + local typ=gk.type + local chain=g_directions[typ] or 0 + local subtables=gk.subtables + if subtables then + local t={} + for s=1,#subtables do + t[s]=subtables[s].name + end + subtables=t + end + local flags,markclass=gk.flags,nil + if flags then + local t={ + (flags.ignorecombiningmarks and "mark") or false, + (flags.ignoreligatures and "ligature") or false, + (flags.ignorebaseglyphs and "base") or false, + flags.r2l or false, + } + markclass=flags.mark_class + if markclass then + markclass=resources.markclasses[markclass] + end + flags=t + end + local name=gk.name + if not name then + report_otf("skipping weird lookup number %s",k) + elseif features then + local f={} + local o={} + for i=1,#features do + local df=features[i] + local tag=strip(lower(df.tag)) + local ft=f[tag] + if not ft then + ft={} + f[tag]=ft + o[#o+1]=tag + end + local dscripts=df.scripts + for i=1,#dscripts do + local d=dscripts[i] + local languages=d.langs + local script=strip(lower(d.script)) + local fts=ft[script] if not fts then fts={} ft[script]=fts end + for i=1,#languages do + fts[strip(lower(languages[i]))]=true + end + end + end + sequences[#sequences+1]={ + type=typ, + chain=chain, + flags=flags, + name=name, + subtables=subtables, + markclass=markclass, + features=f, + order=o, + } + else + lookups[name]={ + type=typ, + chain=chain, + flags=flags, + subtables=subtables, + markclass=markclass, + } + end + end + end + end +end +actions["prepare lookups"]=function(data,filename,raw) + local lookups=raw.lookups + if lookups then + data.lookups=lookups + end +end +local function t_uncover(splitter,cache,covers) + local result={} + for n=1,#covers do + local cover=covers[n] + local uncovered=cache[cover] + if not uncovered then + uncovered=lpegmatch(splitter,cover) + cache[cover]=uncovered + end + result[n]=uncovered + end + return result +end +local function s_uncover(splitter,cache,cover) + if cover=="" then + return nil + else + local uncovered=cache[cover] + if not uncovered then + uncovered=lpegmatch(splitter,cover) + cache[cover]=uncovered + end + return { uncovered } + end +end +local function t_hashed(t,cache) + if t then + local ht={} + for i=1,#t do + local ti=t[i] + local tih=cache[ti] + if not tih then + local tn=#ti + if tn==1 then + tih={ [ti[1]]=true } + else + tih={} + for i=1,tn do + tih[ti[i]]=true + end + end + cache[ti]=tih + end + ht[i]=tih + end + return ht + else + return nil + end +end +local function s_hashed(t,cache) + if t then + local tf=t[1] + local nf=#tf + if nf==1 then + return { [tf[1]]=true } + else + local ht={} + for i=1,nf do + ht[i]={ [tf[i]]=true } + end + return ht + end + else + return nil + end +end +local function r_uncover(splitter,cache,cover,replacements) + if cover=="" then + return nil + else + local uncovered=cover[1] + local replaced=cache[replacements] + if not replaced then + replaced=lpegmatch(splitter,replacements) + cache[replacements]=replaced + end + local nu,nr=#uncovered,#replaced + local r={} + if nu==nr then + for i=1,nu do + r[uncovered[i]]=replaced[i] + end + end + return r + end +end +actions["reorganize lookups"]=function(data,filename,raw) + if data.lookups then + local splitter=data.helpers.tounicodetable + local t_u_cache={} + local s_u_cache=t_u_cache + local t_h_cache={} + local s_h_cache=t_h_cache + local r_u_cache={} + for _,lookup in next,data.lookups do + local rules=lookup.rules + if rules then + local format=lookup.format + if format=="class" then + local before_class=lookup.before_class + if before_class then + before_class=t_uncover(splitter,t_u_cache,reversed(before_class)) + end + local current_class=lookup.current_class + if current_class then + current_class=t_uncover(splitter,t_u_cache,current_class) + end + local after_class=lookup.after_class + if after_class then + after_class=t_uncover(splitter,t_u_cache,after_class) + end + for i=1,#rules do + local rule=rules[i] + local class=rule.class + local before=class.before + if before then + for i=1,#before do + before[i]=before_class[before[i]] or {} + end + rule.before=t_hashed(before,t_h_cache) + end + local current=class.current + local lookups=rule.lookups + if current then + for i=1,#current do + current[i]=current_class[current[i]] or {} + if lookups and not lookups[i] then + lookups[i]="" + end + end + rule.current=t_hashed(current,t_h_cache) + end + local after=class.after + if after then + for i=1,#after do + after[i]=after_class[after[i]] or {} + end + rule.after=t_hashed(after,t_h_cache) + end + rule.class=nil + end + lookup.before_class=nil + lookup.current_class=nil + lookup.after_class=nil + lookup.format="coverage" + elseif format=="coverage" then + for i=1,#rules do + local rule=rules[i] + local coverage=rule.coverage + if coverage then + local before=coverage.before + if before then + before=t_uncover(splitter,t_u_cache,reversed(before)) + rule.before=t_hashed(before,t_h_cache) + end + local current=coverage.current + if current then + current=t_uncover(splitter,t_u_cache,current) + local lookups=rule.lookups + if lookups then + for i=1,#current do + if not lookups[i] then + lookups[i]="" + end + end + end + rule.current=t_hashed(current,t_h_cache) + end + local after=coverage.after + if after then + after=t_uncover(splitter,t_u_cache,after) + rule.after=t_hashed(after,t_h_cache) + end + rule.coverage=nil + end + end + elseif format=="reversecoverage" then + for i=1,#rules do + local rule=rules[i] + local reversecoverage=rule.reversecoverage + if reversecoverage then + local before=reversecoverage.before + if before then + before=t_uncover(splitter,t_u_cache,reversed(before)) + rule.before=t_hashed(before,t_h_cache) + end + local current=reversecoverage.current + if current then + current=t_uncover(splitter,t_u_cache,current) + rule.current=t_hashed(current,t_h_cache) + end + local after=reversecoverage.after + if after then + after=t_uncover(splitter,t_u_cache,after) + rule.after=t_hashed(after,t_h_cache) + end + local replacements=reversecoverage.replacements + if replacements then + rule.replacements=r_uncover(splitter,r_u_cache,current,replacements) + end + rule.reversecoverage=nil + end + end + elseif format=="glyphs" then + for i=1,#rules do + local rule=rules[i] + local glyphs=rule.glyphs + if glyphs then + local fore=glyphs.fore + if fore and fore~="" then + fore=s_uncover(splitter,s_u_cache,fore) + rule.after=s_hashed(fore,s_h_cache) + end + local back=glyphs.back + if back then + back=s_uncover(splitter,s_u_cache,back) + rule.before=s_hashed(back,s_h_cache) + end + local names=glyphs.names + if names then + names=s_uncover(splitter,s_u_cache,names) + rule.current=s_hashed(names,s_h_cache) + end + rule.glyphs=nil + local lookups=rule.lookups + if lookups then + for i=1,#names do + if not lookups[i] then + lookups[i]="" + end + end + end + end + end + end + end + end + end +end +local function check_variants(unicode,the_variants,splitter,unicodes) + local variants=the_variants.variants + if variants then + local glyphs=lpegmatch(splitter,variants) + local done={ [unicode]=true } + local n=0 + for i=1,#glyphs do + local g=glyphs[i] + if done[g] then + if i>1 then + report_otf("skipping cyclic reference %U in math variant %U",g,unicode) + end + else + if n==0 then + n=1 + variants={ g } + else + n=n+1 + variants[n]=g + end + done[g]=true + end + end + if n==0 then + variants=nil + end + end + local parts=the_variants.parts + if parts then + local p=#parts + if p>0 then + for i=1,p do + local pi=parts[i] + pi.glyph=unicodes[pi.component] or 0 + pi.component=nil + end + else + parts=nil + end + end + local italic_correction=the_variants.italic_correction + if italic_correction and italic_correction==0 then + italic_correction=nil + end + return variants,parts,italic_correction +end +actions["analyze math"]=function(data,filename,raw) + if raw.math then + data.metadata.math=raw.math + local unicodes=data.resources.unicodes + local splitter=data.helpers.tounicodetable + for unicode,description in next,data.descriptions do + local glyph=description.glyph + local mathkerns=glyph.mathkern + local horiz_variants=glyph.horiz_variants + local vert_variants=glyph.vert_variants + local top_accent=glyph.top_accent + if mathkerns or horiz_variants or vert_variants or top_accent then + local math={} + if top_accent then + math.top_accent=top_accent + end + if mathkerns then + for k,v in next,mathkerns do + if not next(v) then + mathkerns[k]=nil + else + for k,v in next,v do + if v==0 then + k[v]=nil + end + end + end + end + math.kerns=mathkerns + end + if horiz_variants then + math.horiz_variants,math.horiz_parts,math.horiz_italic_correction=check_variants(unicode,horiz_variants,splitter,unicodes) + end + if vert_variants then + math.vert_variants,math.vert_parts,math.vert_italic_correction=check_variants(unicode,vert_variants,splitter,unicodes) + end + local italic_correction=description.italic + if italic_correction and italic_correction~=0 then + math.italic_correction=italic_correction + end + description.math=math + end + end + end +end +actions["reorganize glyph kerns"]=function(data,filename,raw) + local descriptions=data.descriptions + local resources=data.resources + local unicodes=resources.unicodes + for unicode,description in next,descriptions do + local kerns=description.glyph.kerns + if kerns then + local newkerns={} + for k,kern in next,kerns do + local name=kern.char + local offset=kern.off + local lookup=kern.lookup + if name and offset and lookup then + local unicode=unicodes[name] + if unicode then + if type(lookup)=="table" then + for l=1,#lookup do + local lookup=lookup[l] + local lookupkerns=newkerns[lookup] + if lookupkerns then + lookupkerns[unicode]=offset + else + newkerns[lookup]={ [unicode]=offset } + end + end + else + local lookupkerns=newkerns[lookup] + if lookupkerns then + lookupkerns[unicode]=offset + else + newkerns[lookup]={ [unicode]=offset } + end + end + elseif trace_loading then + report_otf("problems with unicode %a of kern %a of glyph %U",name,k,unicode) + end + end + end + description.kerns=newkerns + end + end +end +actions["merge kern classes"]=function(data,filename,raw) + local gposlist=raw.gpos + if gposlist then + local descriptions=data.descriptions + local resources=data.resources + local unicodes=resources.unicodes + local splitter=data.helpers.tounicodetable + local ignored=0 + local blocked=0 + for gp=1,#gposlist do + local gpos=gposlist[gp] + local subtables=gpos.subtables + if subtables then + local first_done={} + local split={} + for s=1,#subtables do + local subtable=subtables[s] + local kernclass=subtable.kernclass + local lookup=subtable.lookup or subtable.name + if kernclass then + if #kernclass>0 then + kernclass=kernclass[1] + lookup=type(kernclass.lookup)=="string" and kernclass.lookup or lookup + report_otf("fixing kernclass table of lookup %a",lookup) + end + local firsts=kernclass.firsts + local seconds=kernclass.seconds + local offsets=kernclass.offsets + for n,s in next,firsts do + split[s]=split[s] or lpegmatch(splitter,s) + end + local maxseconds=0 + for n,s in next,seconds do + if n>maxseconds then + maxseconds=n + end + split[s]=split[s] or lpegmatch(splitter,s) + end + for fk=1,#firsts do + local fv=firsts[fk] + local splt=split[fv] + if splt then + local extrakerns={} + local baseoffset=(fk-1)*maxseconds + for sk=2,maxseconds do + local sv=seconds[sk] + local splt=split[sv] + if splt then + local offset=offsets[baseoffset+sk] + if offset then + for i=1,#splt do + extrakerns[splt[i]]=offset + end + end + end + end + for i=1,#splt do + local first_unicode=splt[i] + if first_done[first_unicode] then + report_otf("lookup %a: ignoring further kerns of %C",lookup,first_unicode) + blocked=blocked+1 + else + first_done[first_unicode]=true + local description=descriptions[first_unicode] + if description then + local kerns=description.kerns + if not kerns then + kerns={} + description.kerns=kerns + end + local lookupkerns=kerns[lookup] + if not lookupkerns then + lookupkerns={} + kerns[lookup]=lookupkerns + end + if overloadkerns then + for second_unicode,kern in next,extrakerns do + lookupkerns[second_unicode]=kern + end + else + for second_unicode,kern in next,extrakerns do + local k=lookupkerns[second_unicode] + if not k then + lookupkerns[second_unicode]=kern + elseif k~=kern then + if trace_loading then + report_otf("lookup %a: ignoring overload of kern between %C and %C, rejecting %a, keeping %a",lookup,first_unicode,second_unicode,k,kern) + end + ignored=ignored+1 + end + end + end + elseif trace_loading then + report_otf("no glyph data for %U",first_unicode) + end + end + end + end + end + subtable.kernclass={} + end + end + end + end + if ignored>0 then + report_otf("%s kern overloads ignored",ignored) + end + if blocked>0 then + report_otf("%s succesive kerns blocked",blocked) + end + end +end +actions["check glyphs"]=function(data,filename,raw) + for unicode,description in next,data.descriptions do + description.glyph=nil + end +end +local valid=(R("\x00\x7E")-S("(){}[]<>%/ \n\r\f\v"))^0*P(-1) +local function valid_ps_name(str) + return str and str~="" and #str<64 and lpegmatch(valid,str) and true or false +end +actions["check metadata"]=function(data,filename,raw) + local metadata=data.metadata + for _,k in next,mainfields do + if valid_fields[k] then + local v=raw[k] + if not metadata[k] then + metadata[k]=v + end + end + end + local ttftables=metadata.ttf_tables + if ttftables then + for i=1,#ttftables do + ttftables[i].data="deleted" + end + end + if metadata.validation_state and table.contains(metadata.validation_state,"bad_ps_fontname") then + local function valid(what) + local names=raw.names + for i=1,#names do + local list=names[i] + local names=list.names + if names then + local name=names[what] + if name and valid_ps_name(name) then + return name + end + end + end + end + local function check(what) + local oldname=metadata[what] + if valid_ps_name(oldname) then + report_otf("ignoring warning %a because %s %a is proper ASCII","bad_ps_fontname",what,oldname) + else + local newname=valid(what) + if not newname then + newname=formatters["bad-%s-%s"](what,file.nameonly(filename)) + end + local warning=formatters["overloading %s from invalid ASCII name %a to %a"](what,oldname,newname) + data.warnings[#data.warnings+1]=warning + report_otf(warning) + metadata[what]=newname + end + end + check("fontname") + check("fullname") + end +end +actions["cleanup tables"]=function(data,filename,raw) + local duplicates=data.resources.duplicates + if duplicates then + for k,v in next,duplicates do + if #v==1 then + duplicates[k]=v[1] + end + end + end + data.resources.indices=nil + data.resources.unicodes=nil + data.helpers=nil +end +actions["reorganize glyph lookups"]=function(data,filename,raw) + local resources=data.resources + local unicodes=resources.unicodes + local descriptions=data.descriptions + local splitter=data.helpers.tounicodelist + local lookuptypes=resources.lookuptypes + for unicode,description in next,descriptions do + local lookups=description.glyph.lookups + if lookups then + for tag,lookuplist in next,lookups do + for l=1,#lookuplist do + local lookup=lookuplist[l] + local specification=lookup.specification + local lookuptype=lookup.type + local lt=lookuptypes[tag] + if not lt then + lookuptypes[tag]=lookuptype + elseif lt~=lookuptype then + report_otf("conflicting lookuptypes, %a points to %a and %a",tag,lt,lookuptype) + end + if lookuptype=="ligature" then + lookuplist[l]={ lpegmatch(splitter,specification.components) } + elseif lookuptype=="alternate" then + lookuplist[l]={ lpegmatch(splitter,specification.components) } + elseif lookuptype=="substitution" then + lookuplist[l]=unicodes[specification.variant] + elseif lookuptype=="multiple" then + lookuplist[l]={ lpegmatch(splitter,specification.components) } + elseif lookuptype=="position" then + lookuplist[l]={ + specification.x or 0, + specification.y or 0, + specification.h or 0, + specification.v or 0 + } + elseif lookuptype=="pair" then + local one=specification.offsets[1] + local two=specification.offsets[2] + local paired=unicodes[specification.paired] + if one then + if two then + lookuplist[l]={ paired,{ one.x or 0,one.y or 0,one.h or 0,one.v or 0 },{ two.x or 0,two.y or 0,two.h or 0,two.v or 0 } } + else + lookuplist[l]={ paired,{ one.x or 0,one.y or 0,one.h or 0,one.v or 0 } } + end + else + if two then + lookuplist[l]={ paired,{},{ two.x or 0,two.y or 0,two.h or 0,two.v or 0} } + else + lookuplist[l]={ paired } + end + end + end + end + end + local slookups,mlookups + for tag,lookuplist in next,lookups do + if #lookuplist==1 then + if slookups then + slookups[tag]=lookuplist[1] + else + slookups={ [tag]=lookuplist[1] } + end + else + if mlookups then + mlookups[tag]=lookuplist + else + mlookups={ [tag]=lookuplist } + end + end + end + if slookups then + description.slookups=slookups + end + if mlookups then + description.mlookups=mlookups + end + end + end +end +actions["reorganize glyph anchors"]=function(data,filename,raw) + local descriptions=data.descriptions + for unicode,description in next,descriptions do + local anchors=description.glyph.anchors + if anchors then + for class,data in next,anchors do + if class=="baselig" then + for tag,specification in next,data do + for i=1,#specification do + local si=specification[i] + specification[i]={ si.x or 0,si.y or 0 } + end + end + else + for tag,specification in next,data do + data[tag]={ specification.x or 0,specification.y or 0 } + end + end + end + description.anchors=anchors + end + end +end +local bogusname=(P("uni")+P("u"))*R("AF","09")^4+(P("index")+P("glyph")+S("Ii")*P("dentity")*P(".")^0)*R("09")^1 +local uselessname=(1-bogusname)^0*bogusname +actions["purge names"]=function(data,filename,raw) + if purge_names then + local n=0 + for u,d in next,data.descriptions do + if lpegmatch(uselessname,d.name) then + n=n+1 + d.name=nil + end + end + if n>0 then + report_otf("%s bogus names removed",n) + end + end +end +actions["compact lookups"]=function(data,filename,raw) + if not compact_lookups then + report_otf("not compacting") + return + end + local last=0 + local tags=table.setmetatableindex({}, + function(t,k) + last=last+1 + t[k]=last + return last + end + ) + local descriptions=data.descriptions + local resources=data.resources + for u,d in next,descriptions do + local slookups=d.slookups + if type(slookups)=="table" then + local s={} + for k,v in next,slookups do + s[tags[k]]=v + end + d.slookups=s + end + local mlookups=d.mlookups + if type(mlookups)=="table" then + local m={} + for k,v in next,mlookups do + m[tags[k]]=v + end + d.mlookups=m + end + local kerns=d.kerns + if type(kerns)=="table" then + local t={} + for k,v in next,kerns do + t[tags[k]]=v + end + d.kerns=t + end + end + local lookups=data.lookups + if lookups then + local l={} + for k,v in next,lookups do + local rules=v.rules + if rules then + for i=1,#rules do + local l=rules[i].lookups + if type(l)=="table" then + for i=1,#l do + l[i]=tags[l[i]] + end + end + end + end + l[tags[k]]=v + end + data.lookups=l + end + local lookups=resources.lookups + if lookups then + local l={} + for k,v in next,lookups do + local s=v.subtables + if type(s)=="table" then + for i=1,#s do + s[i]=tags[s[i]] + end + end + l[tags[k]]=v + end + resources.lookups=l + end + local sequences=resources.sequences + if sequences then + for i=1,#sequences do + local s=sequences[i] + local n=s.name + if n then + s.name=tags[n] + end + local t=s.subtables + if type(t)=="table" then + for i=1,#t do + t[i]=tags[t[i]] + end + end + end + end + local lookuptypes=resources.lookuptypes + if lookuptypes then + local l={} + for k,v in next,lookuptypes do + l[tags[k]]=v + end + resources.lookuptypes=l + end + local anchor_to_lookup=resources.anchor_to_lookup + if anchor_to_lookup then + for anchor,lookups in next,anchor_to_lookup do + local l={} + for lookup,value in next,lookups do + l[tags[lookup]]=value + end + anchor_to_lookup[anchor]=l + end + end + local lookup_to_anchor=resources.lookup_to_anchor + if lookup_to_anchor then + local l={} + for lookup,value in next,lookup_to_anchor do + l[tags[lookup]]=value + end + resources.lookup_to_anchor=l + end + tags=table.swapped(tags) + report_otf("%s lookup tags compacted",#tags) + resources.lookuptags=tags +end +function otf.setfeatures(tfmdata,features) + local okay=constructors.initializefeatures("otf",tfmdata,features,trace_features,report_otf) + if okay then + return constructors.collectprocessors("otf",tfmdata,features,trace_features,report_otf) + else + return {} + end +end +local function copytotfm(data,cache_id) + if data then + local metadata=data.metadata + local warnings=data.warnings + local resources=data.resources + local properties=derivetable(data.properties) + local descriptions=derivetable(data.descriptions) + local goodies=derivetable(data.goodies) + local characters={} + local parameters={} + local mathparameters={} + local pfminfo=metadata.pfminfo or {} + local resources=data.resources + local unicodes=resources.unicodes + local spaceunits=500 + local spacer="space" + local designsize=metadata.designsize or metadata.design_size or 100 + local mathspecs=metadata.math + if designsize==0 then + designsize=100 + end + if mathspecs then + for name,value in next,mathspecs do + mathparameters[name]=value + end + end + for unicode,_ in next,data.descriptions do + characters[unicode]={} + end + if mathspecs then + for unicode,character in next,characters do + local d=descriptions[unicode] + local m=d.math + if m then + local variants=m.horiz_variants + local parts=m.horiz_parts + if variants then + local c=character + for i=1,#variants do + local un=variants[i] + c.next=un + c=characters[un] + end + c.horiz_variants=parts + elseif parts then + character.horiz_variants=parts + end + local variants=m.vert_variants + local parts=m.vert_parts + if variants then + local c=character + for i=1,#variants do + local un=variants[i] + c.next=un + c=characters[un] + end + c.vert_variants=parts + elseif parts then + character.vert_variants=parts + end + local italic_correction=m.vert_italic_correction + if italic_correction then + character.vert_italic_correction=italic_correction + end + local top_accent=m.top_accent + if top_accent then + character.top_accent=top_accent + end + local kerns=m.kerns + if kerns then + character.mathkerns=kerns + end + end + end + end + local filename=constructors.checkedfilename(resources) + local fontname=metadata.fontname + local fullname=metadata.fullname or fontname + local psname=fontname or fullname + local units=metadata.units_per_em or 1000 + if units==0 then + units=1000 + metadata.units_per_em=1000 + report_otf("changing %a units to %a",0,units) + end + local monospaced=metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose.proportion=="Monospaced") + local charwidth=pfminfo.avgwidth + local charxheight=pfminfo.os2_xheight and pfminfo.os2_xheight>0 and pfminfo.os2_xheight + local italicangle=metadata.italicangle + properties.monospaced=monospaced + parameters.italicangle=italicangle + parameters.charwidth=charwidth + parameters.charxheight=charxheight + local space=0x0020 + local emdash=0x2014 + if monospaced then + if descriptions[space] then + spaceunits,spacer=descriptions[space].width,"space" + end + if not spaceunits and descriptions[emdash] then + spaceunits,spacer=descriptions[emdash].width,"emdash" + end + if not spaceunits and charwidth then + spaceunits,spacer=charwidth,"charwidth" + end + else + if descriptions[space] then + spaceunits,spacer=descriptions[space].width,"space" + end + if not spaceunits and descriptions[emdash] then + spaceunits,spacer=descriptions[emdash].width/2,"emdash/2" + end + if not spaceunits and charwidth then + spaceunits,spacer=charwidth,"charwidth" + end + end + spaceunits=tonumber(spaceunits) or 500 + parameters.slant=0 + parameters.space=spaceunits + parameters.space_stretch=units/2 + parameters.space_shrink=1*units/3 + parameters.x_height=2*units/5 + parameters.quad=units + if spaceunits<2*units/5 then + end + if italicangle and italicangle~=0 then + parameters.italicangle=italicangle + parameters.italicfactor=math.cos(math.rad(90+italicangle)) + parameters.slant=- math.tan(italicangle*math.pi/180) + end + if monospaced then + parameters.space_stretch=0 + parameters.space_shrink=0 + elseif syncspace then + parameters.space_stretch=spaceunits/2 + parameters.space_shrink=spaceunits/3 + end + parameters.extra_space=parameters.space_shrink + if charxheight then + parameters.x_height=charxheight + else + local x=0x0078 + if x then + local x=descriptions[x] + if x then + parameters.x_height=x.height + end + end + end + parameters.designsize=(designsize/10)*65536 + parameters.ascender=abs(metadata.ascent or 0) + parameters.descender=abs(metadata.descent or 0) + parameters.units=units + properties.space=spacer + properties.encodingbytes=2 + properties.format=data.format or otf_format(filename) or formats.otf + properties.noglyphnames=true + properties.filename=filename + properties.fontname=fontname + properties.fullname=fullname + properties.psname=psname + properties.name=filename or fullname + if warnings and #warnings>0 then + report_otf("warnings for font: %s",filename) + report_otf() + for i=1,#warnings do + report_otf(" %s",warnings[i]) + end + report_otf() + end + return { + characters=characters, + descriptions=descriptions, + parameters=parameters, + mathparameters=mathparameters, + resources=resources, + properties=properties, + goodies=goodies, + warnings=warnings, + } + end +end +local function otftotfm(specification) + local cache_id=specification.hash + local tfmdata=containers.read(constructors.cache,cache_id) + if not tfmdata then + local name=specification.name + local sub=specification.sub + local filename=specification.filename + local features=specification.features.normal + local rawdata=otf.load(filename,sub,features and features.featurefile) + if rawdata and next(rawdata) then + local descriptions=rawdata.descriptions + local duplicates=rawdata.resources.duplicates + if duplicates then + local nofduplicates,nofduplicated=0,0 + for parent,list in next,duplicates do + if type(list)=="table" then + local n=#list + for i=1,n do + local unicode=list[i] + if not descriptions[unicode] then + descriptions[unicode]=descriptions[parent] + nofduplicated=nofduplicated+1 + end + end + nofduplicates=nofduplicates+n + else + if not descriptions[list] then + descriptions[list]=descriptions[parent] + nofduplicated=nofduplicated+1 + end + nofduplicates=nofduplicates+1 + end + end + if trace_otf and nofduplicated~=nofduplicates then + report_otf("%i extra duplicates copied out of %i",nofduplicated,nofduplicates) + end + end + rawdata.lookuphash={} + tfmdata=copytotfm(rawdata,cache_id) + if tfmdata and next(tfmdata) then + local features=constructors.checkedfeatures("otf",features) + local shared=tfmdata.shared + if not shared then + shared={} + tfmdata.shared=shared + end + shared.rawdata=rawdata + shared.dynamics={} + tfmdata.changed={} + shared.features=features + shared.processes=otf.setfeatures(tfmdata,features) + end + end + containers.write(constructors.cache,cache_id,tfmdata) + end + return tfmdata +end +local function read_from_otf(specification) + local tfmdata=otftotfm(specification) + if tfmdata then + tfmdata.properties.name=specification.name + tfmdata.properties.sub=specification.sub + tfmdata=constructors.scale(tfmdata,specification) + local allfeatures=tfmdata.shared.features or specification.features.normal + constructors.applymanipulators("otf",tfmdata,allfeatures,trace_features,report_otf) + constructors.setname(tfmdata,specification) + fonts.loggers.register(tfmdata,file.suffix(specification.filename),specification) + end + return tfmdata +end +local function checkmathsize(tfmdata,mathsize) + local mathdata=tfmdata.shared.rawdata.metadata.math + local mathsize=tonumber(mathsize) + if mathdata then + local parameters=tfmdata.parameters + parameters.scriptpercentage=mathdata.ScriptPercentScaleDown + parameters.scriptscriptpercentage=mathdata.ScriptScriptPercentScaleDown + parameters.mathsize=mathsize + end +end +registerotffeature { + name="mathsize", + description="apply mathsize specified in the font", + initializers={ + base=checkmathsize, + node=checkmathsize, + } +} +function otf.collectlookups(rawdata,kind,script,language) + local sequences=rawdata.resources.sequences + if sequences then + local featuremap,featurelist={},{} + for s=1,#sequences do + local sequence=sequences[s] + local features=sequence.features + features=features and features[kind] + features=features and (features[script] or features[default] or features[wildcard]) + features=features and (features[language] or features[default] or features[wildcard]) + if features then + local subtables=sequence.subtables + if subtables then + for s=1,#subtables do + local ss=subtables[s] + if not featuremap[s] then + featuremap[ss]=true + featurelist[#featurelist+1]=ss + end + end + end + end + end + if #featurelist>0 then + return featuremap,featurelist + end + end + return nil,nil +end +local function check_otf(forced,specification,suffix) + local name=specification.name + if forced then + name=specification.forcedname + end + local fullname=findbinfile(name,suffix) or "" + if fullname=="" then + fullname=fonts.names.getfilename(name,suffix) or "" + end + if fullname~="" and not fonts.names.ignoredfile(fullname) then + specification.filename=fullname + return read_from_otf(specification) + end +end +local function opentypereader(specification,suffix) + local forced=specification.forced or "" + if formats[forced] then + return check_otf(true,specification,forced) + else + return check_otf(false,specification,suffix) + end +end +readers.opentype=opentypereader +function readers.otf (specification) return opentypereader(specification,"otf") end +function readers.ttf (specification) return opentypereader(specification,"ttf") end +function readers.ttc (specification) return opentypereader(specification,"ttf") end +function readers.dfont(specification) return opentypereader(specification,"ttf") end +function otf.scriptandlanguage(tfmdata,attr) + local properties=tfmdata.properties + return properties.script or "dflt",properties.language or "dflt" +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-otb']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local concat=table.concat +local format,gmatch,gsub,find,match,lower,strip=string.format,string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip +local type,next,tonumber,tostring,rawget=type,next,tonumber,tostring,rawget +local lpegmatch=lpeg.match +local utfchar=utf.char +local trace_baseinit=false trackers.register("otf.baseinit",function(v) trace_baseinit=v end) +local trace_singles=false trackers.register("otf.singles",function(v) trace_singles=v end) +local trace_multiples=false trackers.register("otf.multiples",function(v) trace_multiples=v end) +local trace_alternatives=false trackers.register("otf.alternatives",function(v) trace_alternatives=v end) +local trace_ligatures=false trackers.register("otf.ligatures",function(v) trace_ligatures=v end) +local trace_ligatures_detail=false trackers.register("otf.ligatures.detail",function(v) trace_ligatures_detail=v end) +local trace_kerns=false trackers.register("otf.kerns",function(v) trace_kerns=v end) +local trace_preparing=false trackers.register("otf.preparing",function(v) trace_preparing=v end) +local report_prepare=logs.reporter("fonts","otf prepare") +local fonts=fonts +local otf=fonts.handlers.otf +local otffeatures=otf.features +local registerotffeature=otffeatures.register +otf.defaultbasealternate="none" +local wildcard="*" +local default="dflt" +local formatters=string.formatters +local f_unicode=formatters["%U"] +local f_uniname=formatters["%U (%s)"] +local f_unilist=formatters["% t (% t)"] +local function gref(descriptions,n) + if type(n)=="number" then + local name=descriptions[n].name + if name then + return f_uniname(n,name) + else + return f_unicode(n) + end + elseif n then + local num,nam,j={},{},0 + for i=1,#n do + local ni=n[i] + if tonumber(ni) then + j=j+1 + local di=descriptions[ni] + num[j]=f_unicode(ni) + nam[j]=di and di.name or "-" + end + end + return f_unilist(num,nam) + else + return "" + end +end +local function cref(feature,lookuptags,lookupname) + if lookupname then + return formatters["feature %a, lookup %a"](feature,lookuptags[lookupname]) + else + return formatters["feature %a"](feature) + end +end +local function report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,comment) + report_prepare("%s: base alternate %s => %s (%S => %S)", + cref(feature,lookuptags,lookupname), + gref(descriptions,unicode), + replacement and gref(descriptions,replacement), + value, + comment) +end +local function report_substitution(feature,lookuptags,lookupname,descriptions,unicode,substitution) + report_prepare("%s: base substitution %s => %S", + cref(feature,lookuptags,lookupname), + gref(descriptions,unicode), + gref(descriptions,substitution)) +end +local function report_ligature(feature,lookuptags,lookupname,descriptions,unicode,ligature) + report_prepare("%s: base ligature %s => %S", + cref(feature,lookuptags,lookupname), + gref(descriptions,ligature), + gref(descriptions,unicode)) +end +local function report_kern(feature,lookuptags,lookupname,descriptions,unicode,otherunicode,value) + report_prepare("%s: base kern %s + %s => %S", + cref(feature,lookuptags,lookupname), + gref(descriptions,unicode), + gref(descriptions,otherunicode), + value) +end +local basemethods={} +local basemethod="" +local function applybasemethod(what,...) + local m=basemethods[basemethod][what] + if m then + return m(...) + end +end +local basehash,basehashes,applied={},1,{} +local function registerbasehash(tfmdata) + local properties=tfmdata.properties + local hash=concat(applied," ") + local base=basehash[hash] + if not base then + basehashes=basehashes+1 + base=basehashes + basehash[hash]=base + end + properties.basehash=base + properties.fullname=properties.fullname.."-"..base + applied={} +end +local function registerbasefeature(feature,value) + applied[#applied+1]=feature.."="..tostring(value) +end +local trace=false +local function finalize_ligatures(tfmdata,ligatures) + local nofligatures=#ligatures + if nofligatures>0 then + local characters=tfmdata.characters + local descriptions=tfmdata.descriptions + local resources=tfmdata.resources + local unicodes=resources.unicodes + local private=resources.private + local alldone=false + while not alldone do + local done=0 + for i=1,nofligatures do + local ligature=ligatures[i] + if ligature then + local unicode,lookupdata=ligature[1],ligature[2] + if trace_ligatures_detail then + report_prepare("building % a into %a",lookupdata,unicode) + end + local size=#lookupdata + local firstcode=lookupdata[1] + local firstdata=characters[firstcode] + local okay=false + if firstdata then + local firstname="ctx_"..firstcode + for i=1,size-1 do + local firstdata=characters[firstcode] + if not firstdata then + firstcode=private + if trace_ligatures_detail then + report_prepare("defining %a as %a",firstname,firstcode) + end + unicodes[firstname]=firstcode + firstdata={ intermediate=true,ligatures={} } + characters[firstcode]=firstdata + descriptions[firstcode]={ name=firstname } + private=private+1 + end + local target + local secondcode=lookupdata[i+1] + local secondname=firstname.."_"..secondcode + if i==size-1 then + target=unicode + if not rawget(unicodes,secondname) then + unicodes[secondname]=unicode + end + okay=true + else + target=rawget(unicodes,secondname) + if not target then + break + end + end + if trace_ligatures_detail then + report_prepare("codes (%a,%a) + (%a,%a) -> %a",firstname,firstcode,secondname,secondcode,target) + end + local firstligs=firstdata.ligatures + if firstligs then + firstligs[secondcode]={ char=target } + else + firstdata.ligatures={ [secondcode]={ char=target } } + end + firstcode=target + firstname=secondname + end + elseif trace_ligatures_detail then + report_prepare("no glyph (%a,%a) for building %a",firstname,firstcode,target) + end + if okay then + ligatures[i]=false + done=done+1 + end + end + end + alldone=done==0 + end + if trace_ligatures_detail then + for k,v in table.sortedhash(characters) do + if v.ligatures then + table.print(v,k) + end + end + end + resources.private=private + return true + end +end +local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist) + local characters=tfmdata.characters + local descriptions=tfmdata.descriptions + local resources=tfmdata.resources + local properties=tfmdata.properties + local changed=tfmdata.changed + local lookuphash=resources.lookuphash + local lookuptypes=resources.lookuptypes + local lookuptags=resources.lookuptags + local ligatures={} + local alternate=tonumber(value) or true and 1 + local defaultalt=otf.defaultbasealternate + local trace_singles=trace_baseinit and trace_singles + local trace_alternatives=trace_baseinit and trace_alternatives + local trace_ligatures=trace_baseinit and trace_ligatures + local actions={ + substitution=function(lookupdata,lookuptags,lookupname,description,unicode) + if trace_singles then + report_substitution(feature,lookuptags,lookupname,descriptions,unicode,lookupdata) + end + changed[unicode]=lookupdata + end, + alternate=function(lookupdata,lookuptags,lookupname,description,unicode) + local replacement=lookupdata[alternate] + if replacement then + changed[unicode]=replacement + if trace_alternatives then + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"normal") + end + elseif defaultalt=="first" then + replacement=lookupdata[1] + changed[unicode]=replacement + if trace_alternatives then + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt) + end + elseif defaultalt=="last" then + replacement=lookupdata[#data] + if trace_alternatives then + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt) + end + else + if trace_alternatives then + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"unknown") + end + end + end, + ligature=function(lookupdata,lookuptags,lookupname,description,unicode) + if trace_ligatures then + report_ligature(feature,lookuptags,lookupname,descriptions,unicode,lookupdata) + end + ligatures[#ligatures+1]={ unicode,lookupdata } + end, + } + for unicode,character in next,characters do + local description=descriptions[unicode] + local lookups=description.slookups + if lookups then + for l=1,#lookuplist do + local lookupname=lookuplist[l] + local lookupdata=lookups[lookupname] + if lookupdata then + local lookuptype=lookuptypes[lookupname] + local action=actions[lookuptype] + if action then + action(lookupdata,lookuptags,lookupname,description,unicode) + end + end + end + end + local lookups=description.mlookups + if lookups then + for l=1,#lookuplist do + local lookupname=lookuplist[l] + local lookuplist=lookups[lookupname] + if lookuplist then + local lookuptype=lookuptypes[lookupname] + local action=actions[lookuptype] + if action then + for i=1,#lookuplist do + action(lookuplist[i],lookuptags,lookupname,description,unicode) + end + end + end + end + end + end + properties.hasligatures=finalize_ligatures(tfmdata,ligatures) +end +local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist) + local characters=tfmdata.characters + local descriptions=tfmdata.descriptions + local resources=tfmdata.resources + local properties=tfmdata.properties + local lookuptags=resources.lookuptags + local sharedkerns={} + local traceindeed=trace_baseinit and trace_kerns + local haskerns=false + for unicode,character in next,characters do + local description=descriptions[unicode] + local rawkerns=description.kerns + if rawkerns then + local s=sharedkerns[rawkerns] + if s==false then + elseif s then + character.kerns=s + else + local newkerns=character.kerns + local done=false + for l=1,#lookuplist do + local lookup=lookuplist[l] + local kerns=rawkerns[lookup] + if kerns then + for otherunicode,value in next,kerns do + if value==0 then + elseif not newkerns then + newkerns={ [otherunicode]=value } + done=true + if traceindeed then + report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,value) + end + elseif not newkerns[otherunicode] then + newkerns[otherunicode]=value + done=true + if traceindeed then + report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,value) + end + end + end + end + end + if done then + sharedkerns[rawkerns]=newkerns + character.kerns=newkerns + haskerns=true + else + sharedkerns[rawkerns]=false + end + end + end + end + properties.haskerns=haskerns +end +basemethods.independent={ + preparesubstitutions=preparesubstitutions, + preparepositionings=preparepositionings, +} +local function makefake(tfmdata,name,present) + local resources=tfmdata.resources + local private=resources.private + local character={ intermediate=true,ligatures={} } + resources.unicodes[name]=private + tfmdata.characters[private]=character + tfmdata.descriptions[private]={ name=name } + resources.private=private+1 + present[name]=private + return character +end +local function make_1(present,tree,name) + for k,v in next,tree do + if k=="ligature" then + present[name]=v + else + make_1(present,v,name.."_"..k) + end + end +end +local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,done,lookuptags,lookupname) + for k,v in next,tree do + if k=="ligature" then + local character=characters[preceding] + if not character then + if trace_baseinit then + report_prepare("weird ligature in lookup %a, current %C, preceding %C",lookuptags[lookupname],v,preceding) + end + character=makefake(tfmdata,name,present) + end + local ligatures=character.ligatures + if ligatures then + ligatures[unicode]={ char=v } + else + character.ligatures={ [unicode]={ char=v } } + end + if done then + local d=done[lookupname] + if not d then + done[lookupname]={ "dummy",v } + else + d[#d+1]=v + end + end + else + local code=present[name] or unicode + local name=name.."_"..k + make_2(present,tfmdata,characters,v,name,code,k,done,lookuptags,lookupname) + end + end +end +local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist) + local characters=tfmdata.characters + local descriptions=tfmdata.descriptions + local resources=tfmdata.resources + local changed=tfmdata.changed + local lookuphash=resources.lookuphash + local lookuptypes=resources.lookuptypes + local lookuptags=resources.lookuptags + local ligatures={} + local alternate=tonumber(value) or true and 1 + local defaultalt=otf.defaultbasealternate + local trace_singles=trace_baseinit and trace_singles + local trace_alternatives=trace_baseinit and trace_alternatives + local trace_ligatures=trace_baseinit and trace_ligatures + for l=1,#lookuplist do + local lookupname=lookuplist[l] + local lookupdata=lookuphash[lookupname] + local lookuptype=lookuptypes[lookupname] + for unicode,data in next,lookupdata do + if lookuptype=="substitution" then + if trace_singles then + report_substitution(feature,lookuptags,lookupname,descriptions,unicode,data) + end + changed[unicode]=data + elseif lookuptype=="alternate" then + local replacement=data[alternate] + if replacement then + changed[unicode]=replacement + if trace_alternatives then + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"normal") + end + elseif defaultalt=="first" then + replacement=data[1] + changed[unicode]=replacement + if trace_alternatives then + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt) + end + elseif defaultalt=="last" then + replacement=data[#data] + if trace_alternatives then + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt) + end + else + if trace_alternatives then + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"unknown") + end + end + elseif lookuptype=="ligature" then + ligatures[#ligatures+1]={ unicode,data,lookupname } + if trace_ligatures then + report_ligature(feature,lookuptags,lookupname,descriptions,unicode,data) + end + end + end + end + local nofligatures=#ligatures + if nofligatures>0 then + local characters=tfmdata.characters + local present={} + local done=trace_baseinit and trace_ligatures and {} + for i=1,nofligatures do + local ligature=ligatures[i] + local unicode,tree=ligature[1],ligature[2] + make_1(present,tree,"ctx_"..unicode) + end + for i=1,nofligatures do + local ligature=ligatures[i] + local unicode,tree,lookupname=ligature[1],ligature[2],ligature[3] + make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,lookuptags,lookupname) + end + end +end +local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist) + local characters=tfmdata.characters + local descriptions=tfmdata.descriptions + local resources=tfmdata.resources + local properties=tfmdata.properties + local lookuphash=resources.lookuphash + local lookuptags=resources.lookuptags + local traceindeed=trace_baseinit and trace_kerns + for l=1,#lookuplist do + local lookupname=lookuplist[l] + local lookupdata=lookuphash[lookupname] + for unicode,data in next,lookupdata do + local character=characters[unicode] + local kerns=character.kerns + if not kerns then + kerns={} + character.kerns=kerns + end + if traceindeed then + for otherunicode,kern in next,data do + if not kerns[otherunicode] and kern~=0 then + kerns[otherunicode]=kern + report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,kern) + end + end + else + for otherunicode,kern in next,data do + if not kerns[otherunicode] and kern~=0 then + kerns[otherunicode]=kern + end + end + end + end + end +end +local function initializehashes(tfmdata) + nodeinitializers.features(tfmdata) +end +basemethods.shared={ + initializehashes=initializehashes, + preparesubstitutions=preparesubstitutions, + preparepositionings=preparepositionings, +} +basemethod="independent" +local function featuresinitializer(tfmdata,value) + if true then + local starttime=trace_preparing and os.clock() + local features=tfmdata.shared.features + local fullname=tfmdata.properties.fullname or "?" + if features then + applybasemethod("initializehashes",tfmdata) + local collectlookups=otf.collectlookups + local rawdata=tfmdata.shared.rawdata + local properties=tfmdata.properties + local script=properties.script + local language=properties.language + local basesubstitutions=rawdata.resources.features.gsub + local basepositionings=rawdata.resources.features.gpos + if basesubstitutions or basepositionings then + local sequences=tfmdata.resources.sequences + for s=1,#sequences do + local sequence=sequences[s] + local sfeatures=sequence.features + if sfeatures then + local order=sequence.order + if order then + for i=1,#order do + local feature=order[i] + local value=features[feature] + if value then + local validlookups,lookuplist=collectlookups(rawdata,feature,script,language) + if not validlookups then + elseif basesubstitutions and basesubstitutions[feature] then + if trace_preparing then + report_prepare("filtering base %s feature %a for %a with value %a","sub",feature,fullname,value) + end + applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist) + registerbasefeature(feature,value) + elseif basepositionings and basepositionings[feature] then + if trace_preparing then + report_prepare("filtering base %a feature %a for %a with value %a","pos",feature,fullname,value) + end + applybasemethod("preparepositionings",tfmdata,feature,value,validlookups,lookuplist) + registerbasefeature(feature,value) + end + end + end + end + end + end + end + registerbasehash(tfmdata) + end + if trace_preparing then + report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,fullname) + end + end +end +registerotffeature { + name="features", + description="features", + default=true, + initializers={ + base=featuresinitializer, + } +} +directives.register("fonts.otf.loader.basemethod",function(v) + if basemethods[v] then + basemethod=v + end +end) + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['node-inj']={ + version=1.001, + comment="companion to node-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files", +} +local next=next +local utfchar=utf.char +local trace_injections=false trackers.register("nodes.injections",function(v) trace_injections=v end) +local report_injections=logs.reporter("nodes","injections") +local attributes,nodes,node=attributes,nodes,node +fonts=fonts +local fontdata=fonts.hashes.identifiers +nodes.injections=nodes.injections or {} +local injections=nodes.injections +local nodecodes=nodes.nodecodes +local glyph_code=nodecodes.glyph +local kern_code=nodecodes.kern +local nuts=nodes.nuts +local nodepool=nuts.pool +local newkern=nodepool.kern +local tonode=nuts.tonode +local tonut=nuts.tonut +local getfield=nuts.getfield +local getnext=nuts.getnext +local getprev=nuts.getprev +local getid=nuts.getid +local getattr=nuts.getattr +local getfont=nuts.getfont +local getsubtype=nuts.getsubtype +local getchar=nuts.getchar +local setfield=nuts.setfield +local setattr=nuts.setattr +local traverse_id=nuts.traverse_id +local insert_node_before=nuts.insert_before +local insert_node_after=nuts.insert_after +local a_kernpair=attributes.private('kernpair') +local a_ligacomp=attributes.private('ligacomp') +local a_markbase=attributes.private('markbase') +local a_markmark=attributes.private('markmark') +local a_markdone=attributes.private('markdone') +local a_cursbase=attributes.private('cursbase') +local a_curscurs=attributes.private('curscurs') +local a_cursdone=attributes.private('cursdone') +local unsetvalue=attributes.unsetvalue +function injections.installnewkern(nk) + newkern=nk or newkern +end +local cursives={} +local marks={} +local kerns={} +function injections.reset(n) +end +function injections.setligaindex(n,index) + setattr(n,a_ligacomp,index) +end +function injections.getligaindex(n,default) + return getattr(n,a_ligacomp) or default +end +function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext) + local dx,dy=factor*(exit[1]-entry[1]),factor*(exit[2]-entry[2]) + local ws,wn=tfmstart.width,tfmnext.width + local bound=#cursives+1 + setattr(start,a_cursbase,bound) + setattr(nxt,a_curscurs,bound) + cursives[bound]={ rlmode,dx,dy,ws,wn } + return dx,dy,bound +end +function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr) + local x,y,w,h=factor*spec[1],factor*spec[2],factor*spec[3],factor*spec[4] + if x~=0 or w~=0 or y~=0 or h~=0 then + local bound=getattr(current,a_kernpair) + if bound then + local kb=kerns[bound] + kb[2],kb[3],kb[4],kb[5]=(kb[2] or 0)+x,(kb[3] or 0)+y,(kb[4] or 0)+w,(kb[5] or 0)+h + else + bound=#kerns+1 + setattr(current,a_kernpair,bound) + kerns[bound]={ rlmode,x,y,w,h,r2lflag,tfmchr.width } + end + return x,y,w,h,bound + end + return x,y,w,h +end +function injections.setkern(current,factor,rlmode,x,tfmchr) + local dx=factor*x + if dx~=0 then + local bound=#kerns+1 + setattr(current,a_kernpair,bound) + kerns[bound]={ rlmode,dx } + return dx,bound + else + return 0,0 + end +end +function injections.setmark(start,base,factor,rlmode,ba,ma) + local dx,dy=factor*(ba[1]-ma[1]),factor*(ba[2]-ma[2]) + local bound=getattr(base,a_markbase) + local index=1 + if bound then + local mb=marks[bound] + if mb then + index=#mb+1 + mb[index]={ dx,dy,rlmode } + setattr(start,a_markmark,bound) + setattr(start,a_markdone,index) + return dx,dy,bound + else + report_injections("possible problem, %U is base mark without data (id %a)",getchar(base),bound) + end + end + index=index or 1 + bound=#marks+1 + setattr(base,a_markbase,bound) + setattr(start,a_markmark,bound) + setattr(start,a_markdone,index) + marks[bound]={ [index]={ dx,dy,rlmode } } + return dx,dy,bound +end +local function dir(n) + return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset" +end +local function trace(head) + report_injections("begin run") + for n in traverse_id(glyph_code,head) do + if getsubtype(n)<256 then + local kp=getattr(n,a_kernpair) + local mb=getattr(n,a_markbase) + local mm=getattr(n,a_markmark) + local md=getattr(n,a_markdone) + local cb=getattr(n,a_cursbase) + local cc=getattr(n,a_curscurs) + local char=getchar(n) + report_injections("font %s, char %U, glyph %c",getfont(n),char,char) + if kp then + local k=kerns[kp] + if k[3] then + report_injections(" pairkern: dir %a, x %p, y %p, w %p, h %p",dir(k[1]),k[2],k[3],k[4],k[5]) + else + report_injections(" kern: dir %a, dx %p",dir(k[1]),k[2]) + end + end + if mb then + report_injections(" markbase: bound %a",mb) + end + if mm then + local m=marks[mm] + if mb then + local m=m[mb] + if m then + report_injections(" markmark: bound %a, index %a, dx %p, dy %p",mm,md,m[1],m[2]) + else + report_injections(" markmark: bound %a, missing index",mm) + end + else + m=m[1] + report_injections(" markmark: bound %a, dx %p, dy %p",mm,m and m[1],m and m[2]) + end + end + if cb then + report_injections(" cursbase: bound %a",cb) + end + if cc then + local c=cursives[cc] + report_injections(" curscurs: bound %a, dir %a, dx %p, dy %p",cc,dir(c[1]),c[2],c[3]) + end + end + end + report_injections("end run") +end +local function show_result(head) + local current=head + local skipping=false + while current do + local id=getid(current) + if id==glyph_code then + report_injections("char: %C, width %p, xoffset %p, yoffset %p", + getchar(current),getfield(current,"width"),getfield(current,"xoffset"),getfield(current,"yoffset")) + skipping=false + elseif id==kern_code then + report_injections("kern: %p",getfield(current,"kern")) + skipping=false + elseif not skipping then + report_injections() + skipping=true + end + current=getnext(current) + end +end +function injections.handler(head,where,keep) + head=tonut(head) + local has_marks,has_cursives,has_kerns=next(marks),next(cursives),next(kerns) + if has_marks or has_cursives then + if trace_injections then + trace(head) + end + local done,ky,rl,valid,cx,wx,mk,nofvalid=false,{},{},{},{},{},{},0 + if has_kerns then + local nf,tm=nil,nil + for n in traverse_id(glyph_code,head) do + if getsubtype(n)<256 then + nofvalid=nofvalid+1 + valid[nofvalid]=n + local f=getfont(n) + if f~=nf then + nf=f + tm=fontdata[nf].resources.marks + end + if tm then + mk[n]=tm[getchar(n)] + end + local k=getattr(n,a_kernpair) + if k then + local kk=kerns[k] + if kk then + local x,y,w,h=kk[2] or 0,kk[3] or 0,kk[4] or 0,kk[5] or 0 + local dy=y-h + if dy~=0 then + ky[n]=dy + end + if w~=0 or x~=0 then + wx[n]=kk + end + rl[n]=kk[1] + end + end + end + end + else + local nf,tm=nil,nil + for n in traverse_id(glyph_code,head) do + if getsubtype(n)<256 then + nofvalid=nofvalid+1 + valid[nofvalid]=n + local f=getfont(n) + if f~=nf then + nf=f + tm=fontdata[nf].resources.marks + end + if tm then + mk[n]=tm[getchar(n)] + end + end + end + end + if nofvalid>0 then + local cx={} + if has_kerns and next(ky) then + for n,k in next,ky do + setfield(n,"yoffset",k) + end + end + if has_cursives then + local p_cursbase,p=nil,nil + local t,d,maxt={},{},0 + for i=1,nofvalid do + local n=valid[i] + if not mk[n] then + local n_cursbase=getattr(n,a_cursbase) + if p_cursbase then + local n_curscurs=getattr(n,a_curscurs) + if p_cursbase==n_curscurs then + local c=cursives[n_curscurs] + if c then + local rlmode,dx,dy,ws,wn=c[1],c[2],c[3],c[4],c[5] + if rlmode>=0 then + dx=dx-ws + else + dx=dx+wn + end + if dx~=0 then + cx[n]=dx + rl[n]=rlmode + end + dy=-dy + maxt=maxt+1 + t[maxt]=p + d[maxt]=dy + else + maxt=0 + end + end + elseif maxt>0 then + local ny=getfield(n,"yoffset") + for i=maxt,1,-1 do + ny=ny+d[i] + local ti=t[i] + setfield(ti,"yoffset",getfield(ti,"yoffset")+ny) + end + maxt=0 + end + if not n_cursbase and maxt>0 then + local ny=getfield(n,"yoffset") + for i=maxt,1,-1 do + ny=ny+d[i] + local ti=t[i] + setfield(ti,"yoffset",ny) + end + maxt=0 + end + p_cursbase,p=n_cursbase,n + end + end + if maxt>0 then + local ny=getfield(n,"yoffset") + for i=maxt,1,-1 do + ny=ny+d[i] + local ti=t[i] + setfield(ti,"yoffset",ny) + end + maxt=0 + end + if not keep then + cursives={} + end + end + if has_marks then + for i=1,nofvalid do + local p=valid[i] + local p_markbase=getattr(p,a_markbase) + if p_markbase then + local mrks=marks[p_markbase] + local nofmarks=#mrks + for n in traverse_id(glyph_code,getnext(p)) do + local n_markmark=getattr(n,a_markmark) + if p_markbase==n_markmark then + local index=getattr(n,a_markdone) or 1 + local d=mrks[index] + if d then + local rlmode=d[3] + local k=wx[p] + local px=getfield(p,"xoffset") + local ox=0 + if k then + local x=k[2] + local w=k[4] + if w then + if rlmode and rlmode>=0 then + ox=px-getfield(p,"width")+d[1]-(w-x) + else + ox=px-d[1]-x + end + else + if rlmode and rlmode>=0 then + ox=px-getfield(p,"width")+d[1] + else + ox=px-d[1]-x + end + end + else + local wp=getfield(p,"width") + local wn=getfield(n,"width") + if rlmode and rlmode>=0 then + ox=px-wp+d[1] + else + ox=px-d[1] + end + if wn~=0 then + insert_node_before(head,n,newkern(-wn/2)) + insert_node_after(head,n,newkern(-wn/2)) + end + end + setfield(n,"xoffset",ox) + local py=getfield(p,"yoffset") + local oy=0 + if mk[p] then + oy=py+d[2] + else + oy=getfield(n,"yoffset")+py+d[2] + end + setfield(n,"yoffset",oy) + if nofmarks==1 then + break + else + nofmarks=nofmarks-1 + end + end + elseif not n_markmark then + break + else + end + end + end + end + if not keep then + marks={} + end + end + if next(wx) then + for n,k in next,wx do + local x=k[2] + local w=k[4] + if w then + local rl=k[1] + local wx=w-x + if rl<0 then + if wx~=0 then + insert_node_before(head,n,newkern(wx)) + end + if x~=0 then + insert_node_after (head,n,newkern(x)) + end + else + if x~=0 then + insert_node_before(head,n,newkern(x)) + end + if wx~=0 then + insert_node_after (head,n,newkern(wx)) + end + end + elseif x~=0 then + insert_node_before(head,n,newkern(x)) + end + end + end + if next(cx) then + for n,k in next,cx do + if k~=0 then + local rln=rl[n] + if rln and rln<0 then + insert_node_before(head,n,newkern(-k)) + else + insert_node_before(head,n,newkern(k)) + end + end + end + end + if not keep then + kerns={} + end + return tonode(head),true + elseif not keep then + kerns,cursives,marks={},{},{} + end + elseif has_kerns then + if trace_injections then + trace(head) + end + for n in traverse_id(glyph_code,head) do + if getsubtype(n)<256 then + local k=getattr(n,a_kernpair) + if k then + local kk=kerns[k] + if kk then + local rl,x,y,w=kk[1],kk[2] or 0,kk[3],kk[4] + if y and y~=0 then + setfield(n,"yoffset",y) + end + if w then + local wx=w-x + if rl<0 then + if wx~=0 then + insert_node_before(head,n,newkern(wx)) + end + if x~=0 then + insert_node_after (head,n,newkern(x)) + end + else + if x~=0 then + insert_node_before(head,n,newkern(x)) + end + if wx~=0 then + insert_node_after(head,n,newkern(wx)) + end + end + else + if x~=0 then + insert_node_before(head,n,newkern(x)) + end + end + end + end + end + end + if not keep then + kerns={} + end + return tonode(head),true + else + end + return tonode(head),false +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-otx']={ + version=1.001, + comment="companion to font-otf.lua (analysing)", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local type=type +if not trackers then trackers={ register=function() end } end +local fonts,nodes,node=fonts,nodes,node +local allocate=utilities.storage.allocate +local otf=fonts.handlers.otf +local analyzers=fonts.analyzers +local initializers=allocate() +local methods=allocate() +analyzers.initializers=initializers +analyzers.methods=methods +analyzers.useunicodemarks=false +local a_state=attributes.private('state') +local nuts=nodes.nuts +local tonut=nuts.tonut +local getfield=nuts.getfield +local getnext=nuts.getnext +local getprev=nuts.getprev +local getid=nuts.getid +local getprop=nuts.getprop +local setprop=nuts.setprop +local getfont=nuts.getfont +local getsubtype=nuts.getsubtype +local getchar=nuts.getchar +local traverse_id=nuts.traverse_id +local traverse_node_list=nuts.traverse +local end_of_math=nuts.end_of_math +local nodecodes=nodes.nodecodes +local glyph_code=nodecodes.glyph +local disc_code=nodecodes.disc +local math_code=nodecodes.math +local fontdata=fonts.hashes.identifiers +local categories=characters and characters.categories or {} +local otffeatures=fonts.constructors.newfeatures("otf") +local registerotffeature=otffeatures.register +local s_init=1 local s_rphf=7 +local s_medi=2 local s_half=8 +local s_fina=3 local s_pref=9 +local s_isol=4 local s_blwf=10 +local s_mark=5 local s_pstf=11 +local s_rest=6 +local states={ + init=s_init, + medi=s_medi, + fina=s_fina, + isol=s_isol, + mark=s_mark, + rest=s_rest, + rphf=s_rphf, + half=s_half, + pref=s_pref, + blwf=s_blwf, + pstf=s_pstf, +} +local features={ + init=s_init, + medi=s_medi, + fina=s_fina, + isol=s_isol, + rphf=s_rphf, + half=s_half, + pref=s_pref, + blwf=s_blwf, + pstf=s_pstf, +} +analyzers.states=states +analyzers.features=features +function analyzers.setstate(head,font) + local useunicodemarks=analyzers.useunicodemarks + local tfmdata=fontdata[font] + local descriptions=tfmdata.descriptions + local first,last,current,n,done=nil,nil,head,0,false + current=tonut(current) + while current do + local id=getid(current) + if id==glyph_code and getfont(current)==font then + done=true + local char=getchar(current) + local d=descriptions[char] + if d then + if d.class=="mark" or (useunicodemarks and categories[char]=="mn") then + done=true + setprop(current,a_state,s_mark) + elseif n==0 then + first,last,n=current,current,1 + setprop(current,a_state,s_init) + else + last,n=current,n+1 + setprop(current,a_state,s_medi) + end + else + if first and first==last then + setprop(last,a_state,s_isol) + elseif last then + setprop(last,a_state,s_fina) + end + first,last,n=nil,nil,0 + end + elseif id==disc_code then + setprop(current,a_state,s_medi) + last=current + else + if first and first==last then + setprop(last,a_state,s_isol) + elseif last then + setprop(last,a_state,s_fina) + end + first,last,n=nil,nil,0 + if id==math_code then + current=end_of_math(current) + end + end + current=getnext(current) + end + if first and first==last then + setprop(last,a_state,s_isol) + elseif last then + setprop(last,a_state,s_fina) + end + return head,done +end +local function analyzeinitializer(tfmdata,value) + local script,language=otf.scriptandlanguage(tfmdata) + local action=initializers[script] + if not action then + elseif type(action)=="function" then + return action(tfmdata,value) + else + local action=action[language] + if action then + return action(tfmdata,value) + end + end +end +local function analyzeprocessor(head,font,attr) + local tfmdata=fontdata[font] + local script,language=otf.scriptandlanguage(tfmdata,attr) + local action=methods[script] + if not action then + elseif type(action)=="function" then + return action(head,font,attr) + else + action=action[language] + if action then + return action(head,font,attr) + end + end + return head,false +end +registerotffeature { + name="analyze", + description="analysis of character classes", + default=true, + initializers={ + node=analyzeinitializer, + }, + processors={ + position=1, + node=analyzeprocessor, + } +} +methods.latn=analyzers.setstate +local tatweel=0x0640 +local zwnj=0x200C +local zwj=0x200D +local isolated={ + [0x0600]=true,[0x0601]=true,[0x0602]=true,[0x0603]=true, + [0x0604]=true, + [0x0608]=true,[0x060B]=true,[0x0621]=true,[0x0674]=true, + [0x06DD]=true, + [0x0856]=true,[0x0858]=true,[0x0857]=true, + [0x07FA]=true, + [zwnj]=true, + [0x08AD]=true, +} +local final={ + [0x0622]=true,[0x0623]=true,[0x0624]=true,[0x0625]=true, + [0x0627]=true,[0x0629]=true,[0x062F]=true,[0x0630]=true, + [0x0631]=true,[0x0632]=true,[0x0648]=true,[0x0671]=true, + [0x0672]=true,[0x0673]=true,[0x0675]=true,[0x0676]=true, + [0x0677]=true,[0x0688]=true,[0x0689]=true,[0x068A]=true, + [0x068B]=true,[0x068C]=true,[0x068D]=true,[0x068E]=true, + [0x068F]=true,[0x0690]=true,[0x0691]=true,[0x0692]=true, + [0x0693]=true,[0x0694]=true,[0x0695]=true,[0x0696]=true, + [0x0697]=true,[0x0698]=true,[0x0699]=true,[0x06C0]=true, + [0x06C3]=true,[0x06C4]=true,[0x06C5]=true,[0x06C6]=true, + [0x06C7]=true,[0x06C8]=true,[0x06C9]=true,[0x06CA]=true, + [0x06CB]=true,[0x06CD]=true,[0x06CF]=true,[0x06D2]=true, + [0x06D3]=true,[0x06D5]=true,[0x06EE]=true,[0x06EF]=true, + [0x0759]=true,[0x075A]=true,[0x075B]=true,[0x076B]=true, + [0x076C]=true,[0x0771]=true,[0x0773]=true,[0x0774]=true, + [0x0778]=true,[0x0779]=true, + [0x08AA]=true,[0x08AB]=true,[0x08AC]=true, + [0xFEF5]=true,[0xFEF7]=true,[0xFEF9]=true,[0xFEFB]=true, + [0x0710]=true,[0x0715]=true,[0x0716]=true,[0x0717]=true, + [0x0718]=true,[0x0719]=true,[0x0728]=true,[0x072A]=true, + [0x072C]=true,[0x071E]=true, + [0x072F]=true,[0x074D]=true, + [0x0840]=true,[0x0849]=true,[0x0854]=true,[0x0846]=true, + [0x084F]=true, + [0x08AE]=true,[0x08B1]=true,[0x08B2]=true, +} +local medial={ + [0x0626]=true,[0x0628]=true,[0x062A]=true,[0x062B]=true, + [0x062C]=true,[0x062D]=true,[0x062E]=true,[0x0633]=true, + [0x0634]=true,[0x0635]=true,[0x0636]=true,[0x0637]=true, + [0x0638]=true,[0x0639]=true,[0x063A]=true,[0x063B]=true, + [0x063C]=true,[0x063D]=true,[0x063E]=true,[0x063F]=true, + [0x0641]=true,[0x0642]=true,[0x0643]=true, + [0x0644]=true,[0x0645]=true,[0x0646]=true,[0x0647]=true, + [0x0649]=true,[0x064A]=true,[0x066E]=true,[0x066F]=true, + [0x0678]=true,[0x0679]=true,[0x067A]=true,[0x067B]=true, + [0x067C]=true,[0x067D]=true,[0x067E]=true,[0x067F]=true, + [0x0680]=true,[0x0681]=true,[0x0682]=true,[0x0683]=true, + [0x0684]=true,[0x0685]=true,[0x0686]=true,[0x0687]=true, + [0x069A]=true,[0x069B]=true,[0x069C]=true,[0x069D]=true, + [0x069E]=true,[0x069F]=true,[0x06A0]=true,[0x06A1]=true, + [0x06A2]=true,[0x06A3]=true,[0x06A4]=true,[0x06A5]=true, + [0x06A6]=true,[0x06A7]=true,[0x06A8]=true,[0x06A9]=true, + [0x06AA]=true,[0x06AB]=true,[0x06AC]=true,[0x06AD]=true, + [0x06AE]=true,[0x06AF]=true,[0x06B0]=true,[0x06B1]=true, + [0x06B2]=true,[0x06B3]=true,[0x06B4]=true,[0x06B5]=true, + [0x06B6]=true,[0x06B7]=true,[0x06B8]=true,[0x06B9]=true, + [0x06BA]=true,[0x06BB]=true,[0x06BC]=true,[0x06BD]=true, + [0x06BE]=true,[0x06BF]=true,[0x06C1]=true,[0x06C2]=true, + [0x06CC]=true,[0x06CE]=true,[0x06D0]=true,[0x06D1]=true, + [0x06FA]=true,[0x06FB]=true,[0x06FC]=true,[0x06FF]=true, + [0x0750]=true,[0x0751]=true,[0x0752]=true,[0x0753]=true, + [0x0754]=true,[0x0755]=true,[0x0756]=true,[0x0757]=true, + [0x0758]=true,[0x075C]=true,[0x075D]=true,[0x075E]=true, + [0x075F]=true,[0x0760]=true,[0x0761]=true,[0x0762]=true, + [0x0763]=true,[0x0764]=true,[0x0765]=true,[0x0766]=true, + [0x0767]=true,[0x0768]=true,[0x0769]=true,[0x076A]=true, + [0x076D]=true,[0x076E]=true,[0x076F]=true,[0x0770]=true, + [0x0772]=true,[0x0775]=true,[0x0776]=true,[0x0777]=true, + [0x077A]=true,[0x077B]=true,[0x077C]=true,[0x077D]=true, + [0x077E]=true,[0x077F]=true, + [0x08A0]=true,[0x08A2]=true,[0x08A4]=true,[0x08A5]=true, + [0x08A6]=true,[0x0620]=true,[0x08A8]=true,[0x08A9]=true, + [0x08A7]=true,[0x08A3]=true, + [0x0712]=true,[0x0713]=true,[0x0714]=true,[0x071A]=true, + [0x071B]=true,[0x071C]=true,[0x071D]=true,[0x071F]=true, + [0x0720]=true,[0x0721]=true,[0x0722]=true,[0x0723]=true, + [0x0724]=true,[0x0725]=true,[0x0726]=true,[0x0727]=true, + [0x0729]=true,[0x072B]=true,[0x072D]=true,[0x072E]=true, + [0x074E]=true,[0x074F]=true, + [0x0841]=true,[0x0842]=true,[0x0843]=true,[0x0844]=true, + [0x0845]=true,[0x0847]=true,[0x0848]=true,[0x0855]=true, + [0x0851]=true,[0x084E]=true,[0x084D]=true,[0x084A]=true, + [0x084B]=true,[0x084C]=true,[0x0850]=true,[0x0852]=true, + [0x0853]=true, + [0x07D7]=true,[0x07E8]=true,[0x07D9]=true,[0x07EA]=true, + [0x07CA]=true,[0x07DB]=true,[0x07CC]=true,[0x07DD]=true, + [0x07CE]=true,[0x07DF]=true,[0x07D4]=true,[0x07E5]=true, + [0x07E9]=true,[0x07E7]=true,[0x07E3]=true,[0x07E2]=true, + [0x07E0]=true,[0x07E1]=true,[0x07DE]=true,[0x07DC]=true, + [0x07D1]=true,[0x07DA]=true,[0x07D8]=true,[0x07D6]=true, + [0x07D2]=true,[0x07D0]=true,[0x07CF]=true,[0x07CD]=true, + [0x07CB]=true,[0x07D3]=true,[0x07E4]=true,[0x07D5]=true, + [0x07E6]=true, + [tatweel]=true,[zwj]=true, + [0x08A1]=true,[0x08AF]=true,[0x08B0]=true, +} +local arab_warned={} +local function warning(current,what) + local char=getchar(current) + if not arab_warned[char] then + log.report("analyze","arab: character %C has no %a class",char,what) + arab_warned[char]=true + end +end +local function finish(first,last) + if last then + if first==last then + local fc=getchar(first) + if medial[fc] or final[fc] then + setprop(first,a_state,s_isol) + else + warning(first,"isol") + setprop(first,a_state,s_error) + end + else + local lc=getchar(last) + if medial[lc] or final[lc] then + setprop(last,a_state,s_fina) + else + warning(last,"fina") + setprop(last,a_state,s_error) + end + end + first,last=nil,nil + elseif first then + local fc=getchar(first) + if medial[fc] or final[fc] then + setprop(first,a_state,s_isol) + else + warning(first,"isol") + setprop(first,a_state,s_error) + end + first=nil + end + return first,last +end +function methods.arab(head,font,attr) + local useunicodemarks=analyzers.useunicodemarks + local tfmdata=fontdata[font] + local marks=tfmdata.resources.marks + local first,last,current,done=nil,nil,head,false + current=tonut(current) + while current do + local id=getid(current) + if id==glyph_code and getfont(current)==font and getsubtype(current)<256 and not getprop(current,a_state) then + done=true + local char=getchar(current) + if marks[char] or (useunicodemarks and categories[char]=="mn") then + setprop(current,a_state,s_mark) + elseif isolated[char] then + first,last=finish(first,last) + setprop(current,a_state,s_isol) + first,last=nil,nil + elseif not first then + if medial[char] then + setprop(current,a_state,s_init) + first,last=first or current,current + elseif final[char] then + setprop(current,a_state,s_isol) + first,last=nil,nil + else + first,last=finish(first,last) + end + elseif medial[char] then + first,last=first or current,current + setprop(current,a_state,s_medi) + elseif final[char] then + if getprop(last,a_state)~=s_init then + setprop(last,a_state,s_medi) + end + setprop(current,a_state,s_fina) + first,last=nil,nil + elseif char>=0x0600 and char<=0x06FF then + setprop(current,a_state,s_rest) + first,last=finish(first,last) + else + first,last=finish(first,last) + end + else + if first or last then + first,last=finish(first,last) + end + if id==math_code then + current=end_of_math(current) + end + end + current=getnext(current) + end + if first or last then + finish(first,last) + end + return head,done +end +methods.syrc=methods.arab +methods.mand=methods.arab +methods.nko=methods.arab +directives.register("otf.analyze.useunicodemarks",function(v) + analyzers.useunicodemarks=v +end) + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-otn']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files", +} +local concat,insert,remove=table.concat,table.insert,table.remove +local gmatch,gsub,find,match,lower,strip=string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip +local type,next,tonumber,tostring=type,next,tonumber,tostring +local lpegmatch=lpeg.match +local random=math.random +local formatters=string.formatters +local logs,trackers,nodes,attributes=logs,trackers,nodes,attributes +local registertracker=trackers.register +local fonts=fonts +local otf=fonts.handlers.otf +local trace_lookups=false registertracker("otf.lookups",function(v) trace_lookups=v end) +local trace_singles=false registertracker("otf.singles",function(v) trace_singles=v end) +local trace_multiples=false registertracker("otf.multiples",function(v) trace_multiples=v end) +local trace_alternatives=false registertracker("otf.alternatives",function(v) trace_alternatives=v end) +local trace_ligatures=false registertracker("otf.ligatures",function(v) trace_ligatures=v end) +local trace_contexts=false registertracker("otf.contexts",function(v) trace_contexts=v end) +local trace_marks=false registertracker("otf.marks",function(v) trace_marks=v end) +local trace_kerns=false registertracker("otf.kerns",function(v) trace_kerns=v end) +local trace_cursive=false registertracker("otf.cursive",function(v) trace_cursive=v end) +local trace_preparing=false registertracker("otf.preparing",function(v) trace_preparing=v end) +local trace_bugs=false registertracker("otf.bugs",function(v) trace_bugs=v end) +local trace_details=false registertracker("otf.details",function(v) trace_details=v end) +local trace_applied=false registertracker("otf.applied",function(v) trace_applied=v end) +local trace_steps=false registertracker("otf.steps",function(v) trace_steps=v end) +local trace_skips=false registertracker("otf.skips",function(v) trace_skips=v end) +local trace_directions=false registertracker("otf.directions",function(v) trace_directions=v end) +local report_direct=logs.reporter("fonts","otf direct") +local report_subchain=logs.reporter("fonts","otf subchain") +local report_chain=logs.reporter("fonts","otf chain") +local report_process=logs.reporter("fonts","otf process") +local report_prepare=logs.reporter("fonts","otf prepare") +local report_warning=logs.reporter("fonts","otf warning") +registertracker("otf.verbose_chain",function(v) otf.setcontextchain(v and "verbose") end) +registertracker("otf.normal_chain",function(v) otf.setcontextchain(v and "normal") end) +registertracker("otf.replacements","otf.singles,otf.multiples,otf.alternatives,otf.ligatures") +registertracker("otf.positions","otf.marks,otf.kerns,otf.cursive") +registertracker("otf.actions","otf.replacements,otf.positions") +registertracker("otf.injections","nodes.injections") +registertracker("*otf.sample","otf.steps,otf.actions,otf.analyzing") +local nuts=nodes.nuts +local tonode=nuts.tonode +local tonut=nuts.tonut +local getfield=nuts.getfield +local setfield=nuts.setfield +local getnext=nuts.getnext +local getprev=nuts.getprev +local getid=nuts.getid +local getattr=nuts.getattr +local setattr=nuts.setattr +local getprop=nuts.getprop +local setprop=nuts.setprop +local getfont=nuts.getfont +local getsubtype=nuts.getsubtype +local getchar=nuts.getchar +local insert_node_after=nuts.insert_after +local delete_node=nuts.delete +local copy_node=nuts.copy +local find_node_tail=nuts.tail +local flush_node_list=nuts.flush_list +local end_of_math=nuts.end_of_math +local setmetatableindex=table.setmetatableindex +local zwnj=0x200C +local zwj=0x200D +local wildcard="*" +local default="dflt" +local nodecodes=nodes.nodecodes +local whatcodes=nodes.whatcodes +local glyphcodes=nodes.glyphcodes +local disccodes=nodes.disccodes +local glyph_code=nodecodes.glyph +local glue_code=nodecodes.glue +local disc_code=nodecodes.disc +local whatsit_code=nodecodes.whatsit +local math_code=nodecodes.math +local dir_code=whatcodes.dir +local localpar_code=whatcodes.localpar +local discretionary_code=disccodes.discretionary +local ligature_code=glyphcodes.ligature +local privateattribute=attributes.private +local a_state=privateattribute('state') +local a_cursbase=privateattribute('cursbase') +local injections=nodes.injections +local setmark=injections.setmark +local setcursive=injections.setcursive +local setkern=injections.setkern +local setpair=injections.setpair +local resetinjection=injections.reset +local setligaindex=injections.setligaindex +local getligaindex=injections.getligaindex +local cursonce=true +local fonthashes=fonts.hashes +local fontdata=fonthashes.identifiers +local otffeatures=fonts.constructors.newfeatures("otf") +local registerotffeature=otffeatures.register +local onetimemessage=fonts.loggers.onetimemessage or function() end +otf.defaultnodealternate="none" +local tfmdata=false +local characters=false +local descriptions=false +local resources=false +local marks=false +local currentfont=false +local lookuptable=false +local anchorlookups=false +local lookuptypes=false +local lookuptags=false +local handlers={} +local rlmode=0 +local featurevalue=false +local checkstep=(nodes and nodes.tracers and nodes.tracers.steppers.check) or function() end +local registerstep=(nodes and nodes.tracers and nodes.tracers.steppers.register) or function() end +local registermessage=(nodes and nodes.tracers and nodes.tracers.steppers.message) or function() end +local function logprocess(...) + if trace_steps then + registermessage(...) + end + report_direct(...) +end +local function logwarning(...) + report_direct(...) +end +local f_unicode=formatters["%U"] +local f_uniname=formatters["%U (%s)"] +local f_unilist=formatters["% t (% t)"] +local function gref(n) + if type(n)=="number" then + local description=descriptions[n] + local name=description and description.name + if name then + return f_uniname(n,name) + else + return f_unicode(n) + end + elseif n then + local num,nam={},{} + for i=1,#n do + local ni=n[i] + if tonumber(ni) then + local di=descriptions[ni] + num[i]=f_unicode(ni) + nam[i]=di and di.name or "-" + end + end + return f_unilist(num,nam) + else + return "" + end +end +local function cref(kind,chainname,chainlookupname,lookupname,index) + if index then + return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookuptags[lookupname],index) + elseif lookupname then + return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookuptags[lookupname]) + elseif chainlookupname then + return formatters["feature %a, chain %a, sub %a"](kind,lookuptags[chainname],lookuptags[chainlookupname]) + elseif chainname then + return formatters["feature %a, chain %a"](kind,lookuptags[chainname]) + else + return formatters["feature %a"](kind) + end +end +local function pref(kind,lookupname) + return formatters["feature %a, lookup %a"](kind,lookuptags[lookupname]) +end +local function copy_glyph(g) + local components=getfield(g,"components") + if components then + setfield(g,"components",nil) + local n=copy_node(g) + setfield(g,"components",components) + return n + else + return copy_node(g) + end +end +local function markstoligature(kind,lookupname,head,start,stop,char) + if start==stop and getchar(start)==char then + return head,start + else + local prev=getprev(start) + local next=getnext(stop) + setfield(start,"prev",nil) + setfield(stop,"next",nil) + local base=copy_glyph(start) + if head==start then + head=base + end + resetinjection(base) + setfield(base,"char",char) + setfield(base,"subtype",ligature_code) + setfield(base,"components",start) + if prev then + setfield(prev,"next",base) + end + if next then + setfield(next,"prev",base) + end + setfield(base,"next",next) + setfield(base,"prev",prev) + return head,base + end +end +local function getcomponentindex(start) + if getid(start)~=glyph_code then + return 0 + elseif getsubtype(start)==ligature_code then + local i=0 + local components=getfield(start,"components") + while components do + i=i+getcomponentindex(components) + components=getnext(components) + end + return i + elseif not marks[getchar(start)] then + return 1 + else + return 0 + end +end +local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound) + if start==stop and getchar(start)==char then + resetinjection(start) + setfield(start,"char",char) + return head,start + end + local prev=getprev(start) + local next=getnext(stop) + setfield(start,"prev",nil) + setfield(stop,"next",nil) + local base=copy_glyph(start) + if start==head then + head=base + end + resetinjection(base) + setfield(base,"char",char) + setfield(base,"subtype",ligature_code) + setfield(base,"components",start) + if prev then + setfield(prev,"next",base) + end + if next then + setfield(next,"prev",base) + end + setfield(base,"next",next) + setfield(base,"prev",prev) + if not discfound then + local deletemarks=markflag~="mark" + local components=start + local baseindex=0 + local componentindex=0 + local head=base + local current=base + while start do + local char=getchar(start) + if not marks[char] then + baseindex=baseindex+componentindex + componentindex=getcomponentindex(start) + elseif not deletemarks then + setligaindex(start,baseindex+getligaindex(start,componentindex)) + if trace_marks then + logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),getligaindex(start)) + end + head,current=insert_node_after(head,current,copy_node(start)) + elseif trace_marks then + logwarning("%s: delete mark %s",pref(kind,lookupname),gref(char)) + end + start=getnext(start) + end + local start=getnext(current) + while start and getid(start)==glyph_code do + local char=getchar(start) + if marks[char] then + setligaindex(start,baseindex+getligaindex(start,componentindex)) + if trace_marks then + logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),getligaindex(start)) + end + else + break + end + start=getnext(start) + end + end + return head,base +end +function handlers.gsub_single(head,start,kind,lookupname,replacement) + if trace_singles then + logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(getchar(start)),gref(replacement)) + end + resetinjection(start) + setfield(start,"char",replacement) + return head,start,true +end +local function get_alternative_glyph(start,alternatives,value,trace_alternatives) + local n=#alternatives + if value=="random" then + local r=random(1,n) + return alternatives[r],trace_alternatives and formatters["value %a, taking %a"](value,r) + elseif value=="first" then + return alternatives[1],trace_alternatives and formatters["value %a, taking %a"](value,1) + elseif value=="last" then + return alternatives[n],trace_alternatives and formatters["value %a, taking %a"](value,n) + else + value=tonumber(value) + if type(value)~="number" then + return alternatives[1],trace_alternatives and formatters["invalid value %s, taking %a"](value,1) + elseif value>n then + local defaultalt=otf.defaultnodealternate + if defaultalt=="first" then + return alternatives[n],trace_alternatives and formatters["invalid value %s, taking %a"](value,1) + elseif defaultalt=="last" then + return alternatives[1],trace_alternatives and formatters["invalid value %s, taking %a"](value,n) + else + return false,trace_alternatives and formatters["invalid value %a, %s"](value,"out of range") + end + elseif value==0 then + return getchar(start),trace_alternatives and formatters["invalid value %a, %s"](value,"no change") + elseif value<1 then + return alternatives[1],trace_alternatives and formatters["invalid value %a, taking %a"](value,1) + else + return alternatives[value],trace_alternatives and formatters["value %a, taking %a"](value,value) + end + end +end +local function multiple_glyphs(head,start,multiple,ignoremarks) + local nofmultiples=#multiple + if nofmultiples>0 then + resetinjection(start) + setfield(start,"char",multiple[1]) + if nofmultiples>1 then + local sn=getnext(start) + for k=2,nofmultiples do + local n=copy_node(start) + resetinjection(n) + setfield(n,"char",multiple[k]) + setfield(n,"next",sn) + setfield(n,"prev",start) + if sn then + setfield(sn,"prev",n) + end + setfield(start,"next",n) + start=n + end + end + return head,start,true + else + if trace_multiples then + logprocess("no multiple for %s",gref(getchar(start))) + end + return head,start,false + end +end +function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence) + local value=featurevalue==true and tfmdata.shared.features[kind] or featurevalue + local choice,comment=get_alternative_glyph(start,alternative,value,trace_alternatives) + if choice then + if trace_alternatives then + logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(getchar(start)),choice,gref(choice),comment) + end + resetinjection(start) + setfield(start,"char",choice) + else + if trace_alternatives then + logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(getchar(start)),comment) + end + end + return head,start,true +end +function handlers.gsub_multiple(head,start,kind,lookupname,multiple,sequence) + if trace_multiples then + logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(getchar(start)),gref(multiple)) + end + return multiple_glyphs(head,start,multiple,sequence.flags[1]) +end +function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence) + local s,stop,discfound=getnext(start),nil,false + local startchar=getchar(start) + if marks[startchar] then + while s do + local id=getid(s) + if id==glyph_code and getfont(s)==currentfont and getsubtype(s)<256 then + local lg=ligature[getchar(s)] + if lg then + stop=s + ligature=lg + s=getnext(s) + else + break + end + else + break + end + end + if stop then + local lig=ligature.ligature + if lig then + if trace_ligatures then + local stopchar=getchar(stop) + head,start=markstoligature(kind,lookupname,head,start,stop,lig) + logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start))) + else + head,start=markstoligature(kind,lookupname,head,start,stop,lig) + end + return head,start,true + else + end + end + else + local skipmark=sequence.flags[1] + while s do + local id=getid(s) + if id==glyph_code and getsubtype(s)<256 then + if getfont(s)==currentfont then + local char=getchar(s) + if skipmark and marks[char] then + s=getnext(s) + else + local lg=ligature[char] + if lg then + stop=s + ligature=lg + s=getnext(s) + else + break + end + end + else + break + end + elseif id==disc_code then + discfound=true + s=getnext(s) + else + break + end + end + local lig=ligature.ligature + if lig then + if stop then + if trace_ligatures then + local stopchar=getchar(stop) + head,start=toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound) + logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start))) + else + head,start=toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound) + end + else + resetinjection(start) + setfield(start,"char",lig) + if trace_ligatures then + logprocess("%s: replacing %s by (no real) ligature %s case 3",pref(kind,lookupname),gref(startchar),gref(lig)) + end + end + return head,start,true + else + end + end + return head,start,false +end +function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence) + local markchar=getchar(start) + if marks[markchar] then + local base=getprev(start) + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + local basechar=getchar(base) + if marks[basechar] then + while true do + base=getprev(base) + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + basechar=getchar(base) + if not marks[basechar] then + break + end + else + if trace_bugs then + logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar)) + end + return head,start,false + end + end + end + local baseanchors=descriptions[basechar] + if baseanchors then + baseanchors=baseanchors.anchors + end + if baseanchors then + local baseanchors=baseanchors['basechar'] + if baseanchors then + local al=anchorlookups[lookupname] + for anchor,ba in next,baseanchors do + if al[anchor] then + local ma=markanchors[anchor] + if ma then + local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) + if trace_marks then + logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)", + pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) + end + return head,start,true + end + end + end + if trace_bugs then + logwarning("%s, no matching anchors for mark %s and base %s",pref(kind,lookupname),gref(markchar),gref(basechar)) + end + end + elseif trace_bugs then + onetimemessage(currentfont,basechar,"no base anchors",report_fonts) + end + elseif trace_bugs then + logwarning("%s: prev node is no char",pref(kind,lookupname)) + end + elseif trace_bugs then + logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar)) + end + return head,start,false +end +function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence) + local markchar=getchar(start) + if marks[markchar] then + local base=getprev(start) + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + local basechar=getchar(base) + if marks[basechar] then + while true do + base=getprev(base) + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + basechar=getchar(base) + if not marks[basechar] then + break + end + else + if trace_bugs then + logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar)) + end + return head,start,false + end + end + end + local index=getligaindex(start) + local baseanchors=descriptions[basechar] + if baseanchors then + baseanchors=baseanchors.anchors + if baseanchors then + local baseanchors=baseanchors['baselig'] + if baseanchors then + local al=anchorlookups[lookupname] + for anchor,ba in next,baseanchors do + if al[anchor] then + local ma=markanchors[anchor] + if ma then + ba=ba[index] + if ba then + local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) + if trace_marks then + logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)", + pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy) + end + return head,start,true + else + if trace_bugs then + logwarning("%s: no matching anchors for mark %s and baselig %s with index %a",pref(kind,lookupname),gref(markchar),gref(basechar),index) + end + end + end + end + end + if trace_bugs then + logwarning("%s: no matching anchors for mark %s and baselig %s",pref(kind,lookupname),gref(markchar),gref(basechar)) + end + end + end + elseif trace_bugs then + onetimemessage(currentfont,basechar,"no base anchors",report_fonts) + end + elseif trace_bugs then + logwarning("%s: prev node is no char",pref(kind,lookupname)) + end + elseif trace_bugs then + logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar)) + end + return head,start,false +end +function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence) + local markchar=getchar(start) + if marks[markchar] then + local base=getprev(start) + local slc=getligaindex(start) + if slc then + while base do + local blc=getligaindex(base) + if blc and blc~=slc then + base=getprev(base) + else + break + end + end + end + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + local basechar=getchar(base) + local baseanchors=descriptions[basechar] + if baseanchors then + baseanchors=baseanchors.anchors + if baseanchors then + baseanchors=baseanchors['basemark'] + if baseanchors then + local al=anchorlookups[lookupname] + for anchor,ba in next,baseanchors do + if al[anchor] then + local ma=markanchors[anchor] + if ma then + local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) + if trace_marks then + logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)", + pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) + end + return head,start,true + end + end + end + if trace_bugs then + logwarning("%s: no matching anchors for mark %s and basemark %s",pref(kind,lookupname),gref(markchar),gref(basechar)) + end + end + end + elseif trace_bugs then + onetimemessage(currentfont,basechar,"no base anchors",report_fonts) + end + elseif trace_bugs then + logwarning("%s: prev node is no mark",pref(kind,lookupname)) + end + elseif trace_bugs then + logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar)) + end + return head,start,false +end +function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence) + local alreadydone=cursonce and getprop(start,a_cursbase) + if not alreadydone then + local done=false + local startchar=getchar(start) + if marks[startchar] then + if trace_cursive then + logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar)) + end + else + local nxt=getnext(start) + while not done and nxt and getid(nxt)==glyph_code and getfont(nxt)==currentfont and getsubtype(nxt)<256 do + local nextchar=getchar(nxt) + if marks[nextchar] then + nxt=getnext(nxt) + else + local entryanchors=descriptions[nextchar] + if entryanchors then + entryanchors=entryanchors.anchors + if entryanchors then + entryanchors=entryanchors['centry'] + if entryanchors then + local al=anchorlookups[lookupname] + for anchor,entry in next,entryanchors do + if al[anchor] then + local exit=exitanchors[anchor] + if exit then + local dx,dy,bound=setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar]) + if trace_cursive then + logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode) + end + done=true + break + end + end + end + end + end + elseif trace_bugs then + onetimemessage(currentfont,startchar,"no entry anchors",report_fonts) + end + break + end + end + end + return head,start,done + else + if trace_cursive and trace_details then + logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone) + end + return head,start,false + end +end +function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence) + local startchar=getchar(start) + local dx,dy,w,h=setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar]) + if trace_kerns then + logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h) + end + return head,start,false +end +function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence) + local snext=getnext(start) + if not snext then + return head,start,false + else + local prev,done=start,false + local factor=tfmdata.parameters.factor + local lookuptype=lookuptypes[lookupname] + while snext and getid(snext)==glyph_code and getfont(snext)==currentfont and getsubtype(snext)<256 do + local nextchar=getchar(snext) + local krn=kerns[nextchar] + if not krn and marks[nextchar] then + prev=snext + snext=getnext(snext) + else + if not krn then + elseif type(krn)=="table" then + if lookuptype=="pair" then + local a,b=krn[2],krn[3] + if a and #a>0 then + local startchar=getchar(start) + local x,y,w,h=setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar]) + if trace_kerns then + logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h) + end + end + if b and #b>0 then + local startchar=getchar(start) + local x,y,w,h=setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar]) + if trace_kerns then + logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h) + end + end + else + report_process("%s: check this out (old kern stuff)",pref(kind,lookupname)) + end + done=true + elseif krn~=0 then + local k=setkern(snext,factor,rlmode,krn) + if trace_kerns then + logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(getchar(prev)),gref(nextchar)) + end + done=true + end + break + end + end + return head,start,done + end +end +local chainmores={} +local chainprocs={} +local function logprocess(...) + if trace_steps then + registermessage(...) + end + report_subchain(...) +end +local logwarning=report_subchain +local function logprocess(...) + if trace_steps then + registermessage(...) + end + report_chain(...) +end +local logwarning=report_chain +function chainprocs.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname) + logwarning("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname)) + return head,start,false +end +function chainmores.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname,n) + logprocess("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname)) + return head,start,false +end +function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements) + local char=getchar(start) + local replacement=replacements[char] + if replacement then + if trace_singles then + logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement)) + end + resetinjection(start) + setfield(start,"char",replacement) + return head,start,true + else + return head,start,false + end +end +function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex) + local current=start + local subtables=currentlookup.subtables + if #subtables>1 then + logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," ")) + end + while current do + if getid(current)==glyph_code then + local currentchar=getchar(current) + local lookupname=subtables[1] + local replacement=lookuphash[lookupname] + if not replacement then + if trace_bugs then + logwarning("%s: no single hits",cref(kind,chainname,chainlookupname,lookupname,chainindex)) + end + else + replacement=replacement[currentchar] + if not replacement or replacement=="" then + if trace_bugs then + logwarning("%s: no single for %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar)) + end + else + if trace_singles then + logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement)) + end + resetinjection(current) + setfield(current,"char",replacement) + end + end + return head,start,true + elseif current==stop then + break + else + current=getnext(current) + end + end + return head,start,false +end +chainmores.gsub_single=chainprocs.gsub_single +function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) + local startchar=getchar(start) + local subtables=currentlookup.subtables + local lookupname=subtables[1] + local replacements=lookuphash[lookupname] + if not replacements then + if trace_bugs then + logwarning("%s: no multiple hits",cref(kind,chainname,chainlookupname,lookupname)) + end + else + replacements=replacements[startchar] + if not replacements or replacement=="" then + if trace_bugs then + logwarning("%s: no multiple for %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar)) + end + else + if trace_multiples then + logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements)) + end + return multiple_glyphs(head,start,replacements,currentlookup.flags[1]) + end + end + return head,start,false +end +chainmores.gsub_multiple=chainprocs.gsub_multiple +function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) + local current=start + local subtables=currentlookup.subtables + local value=featurevalue==true and tfmdata.shared.features[kind] or featurevalue + while current do + if getid(current)==glyph_code then + local currentchar=getchar(current) + local lookupname=subtables[1] + local alternatives=lookuphash[lookupname] + if not alternatives then + if trace_bugs then + logwarning("%s: no alternative hit",cref(kind,chainname,chainlookupname,lookupname)) + end + else + alternatives=alternatives[currentchar] + if alternatives then + local choice,comment=get_alternative_glyph(current,alternatives,value,trace_alternatives) + if choice then + if trace_alternatives then + logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment) + end + resetinjection(start) + setfield(start,"char",choice) + else + if trace_alternatives then + logwarning("%s: no variant %a for %s, %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(char),comment) + end + end + elseif trace_bugs then + logwarning("%s: no alternative for %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(currentchar),comment) + end + end + return head,start,true + elseif current==stop then + break + else + current=getnext(current) + end + end + return head,start,false +end +chainmores.gsub_alternate=chainprocs.gsub_alternate +function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex) + local startchar=getchar(start) + local subtables=currentlookup.subtables + local lookupname=subtables[1] + local ligatures=lookuphash[lookupname] + if not ligatures then + if trace_bugs then + logwarning("%s: no ligature hits",cref(kind,chainname,chainlookupname,lookupname,chainindex)) + end + else + ligatures=ligatures[startchar] + if not ligatures then + if trace_bugs then + logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar)) + end + else + local s=getnext(start) + local discfound=false + local last=stop + local nofreplacements=0 + local skipmark=currentlookup.flags[1] + while s do + local id=getid(s) + if id==disc_code then + s=getnext(s) + discfound=true + else + local schar=getchar(s) + if skipmark and marks[schar] then + s=getnext(s) + else + local lg=ligatures[schar] + if lg then + ligatures,last,nofreplacements=lg,s,nofreplacements+1 + if s==stop then + break + else + s=getnext(s) + end + else + break + end + end + end + end + local l2=ligatures.ligature + if l2 then + if chainindex then + stop=last + end + if trace_ligatures then + if start==stop then + logprocess("%s: replacing character %s by ligature %s case 3",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2)) + else + logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop)),gref(l2)) + end + end + head,start=toligature(kind,lookupname,head,start,stop,l2,currentlookup.flags[1],discfound) + return head,start,true,nofreplacements + elseif trace_bugs then + if start==stop then + logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar)) + else + logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop))) + end + end + end + end + return head,start,false,0 +end +chainmores.gsub_ligature=chainprocs.gsub_ligature +function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) + local markchar=getchar(start) + if marks[markchar] then + local subtables=currentlookup.subtables + local lookupname=subtables[1] + local markanchors=lookuphash[lookupname] + if markanchors then + markanchors=markanchors[markchar] + end + if markanchors then + local base=getprev(start) + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + local basechar=getchar(base) + if marks[basechar] then + while true do + base=getprev(base) + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + basechar=getchar(base) + if not marks[basechar] then + break + end + else + if trace_bugs then + logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar)) + end + return head,start,false + end + end + end + local baseanchors=descriptions[basechar].anchors + if baseanchors then + local baseanchors=baseanchors['basechar'] + if baseanchors then + local al=anchorlookups[lookupname] + for anchor,ba in next,baseanchors do + if al[anchor] then + local ma=markanchors[anchor] + if ma then + local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) + if trace_marks then + logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)", + cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) + end + return head,start,true + end + end + end + if trace_bugs then + logwarning("%s, no matching anchors for mark %s and base %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar)) + end + end + end + elseif trace_bugs then + logwarning("%s: prev node is no char",cref(kind,chainname,chainlookupname,lookupname)) + end + elseif trace_bugs then + logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar)) + end + elseif trace_bugs then + logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar)) + end + return head,start,false +end +function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) + local markchar=getchar(start) + if marks[markchar] then + local subtables=currentlookup.subtables + local lookupname=subtables[1] + local markanchors=lookuphash[lookupname] + if markanchors then + markanchors=markanchors[markchar] + end + if markanchors then + local base=getprev(start) + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + local basechar=getchar(base) + if marks[basechar] then + while true do + base=getprev(base) + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + basechar=getchar(base) + if not marks[basechar] then + break + end + else + if trace_bugs then + logwarning("%s: no base for mark %s",cref(kind,chainname,chainlookupname,lookupname),markchar) + end + return head,start,false + end + end + end + local index=getligaindex(start) + local baseanchors=descriptions[basechar].anchors + if baseanchors then + local baseanchors=baseanchors['baselig'] + if baseanchors then + local al=anchorlookups[lookupname] + for anchor,ba in next,baseanchors do + if al[anchor] then + local ma=markanchors[anchor] + if ma then + ba=ba[index] + if ba then + local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) + if trace_marks then + logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)", + cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy) + end + return head,start,true + end + end + end + end + if trace_bugs then + logwarning("%s: no matching anchors for mark %s and baselig %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar)) + end + end + end + elseif trace_bugs then + logwarning("feature %s, lookup %s: prev node is no char",kind,lookupname) + end + elseif trace_bugs then + logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar)) + end + elseif trace_bugs then + logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar)) + end + return head,start,false +end +function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) + local markchar=getchar(start) + if marks[markchar] then + local subtables=currentlookup.subtables + local lookupname=subtables[1] + local markanchors=lookuphash[lookupname] + if markanchors then + markanchors=markanchors[markchar] + end + if markanchors then + local base=getprev(start) + local slc=getligaindex(start) + if slc then + while base do + local blc=getligaindex(base) + if blc and blc~=slc then + base=getprev(base) + else + break + end + end + end + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + local basechar=getchar(base) + local baseanchors=descriptions[basechar].anchors + if baseanchors then + baseanchors=baseanchors['basemark'] + if baseanchors then + local al=anchorlookups[lookupname] + for anchor,ba in next,baseanchors do + if al[anchor] then + local ma=markanchors[anchor] + if ma then + local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) + if trace_marks then + logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)", + cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) + end + return head,start,true + end + end + end + if trace_bugs then + logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar)) + end + end + end + elseif trace_bugs then + logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname)) + end + elseif trace_bugs then + logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar)) + end + elseif trace_bugs then + logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar)) + end + return head,start,false +end +function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) + local alreadydone=cursonce and getprop(start,a_cursbase) + if not alreadydone then + local startchar=getchar(start) + local subtables=currentlookup.subtables + local lookupname=subtables[1] + local exitanchors=lookuphash[lookupname] + if exitanchors then + exitanchors=exitanchors[startchar] + end + if exitanchors then + local done=false + if marks[startchar] then + if trace_cursive then + logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar)) + end + else + local nxt=getnext(start) + while not done and nxt and getid(nxt)==glyph_code and getfont(nxt)==currentfont and getsubtype(nxt)<256 do + local nextchar=getchar(nxt) + if marks[nextchar] then + nxt=getnext(nxt) + else + local entryanchors=descriptions[nextchar] + if entryanchors then + entryanchors=entryanchors.anchors + if entryanchors then + entryanchors=entryanchors['centry'] + if entryanchors then + local al=anchorlookups[lookupname] + for anchor,entry in next,entryanchors do + if al[anchor] then + local exit=exitanchors[anchor] + if exit then + local dx,dy,bound=setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar]) + if trace_cursive then + logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode) + end + done=true + break + end + end + end + end + end + elseif trace_bugs then + onetimemessage(currentfont,startchar,"no entry anchors",report_fonts) + end + break + end + end + end + return head,start,done + else + if trace_cursive and trace_details then + logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone) + end + return head,start,false + end + end + return head,start,false +end +function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence) + local startchar=getchar(start) + local subtables=currentlookup.subtables + local lookupname=subtables[1] + local kerns=lookuphash[lookupname] + if kerns then + kerns=kerns[startchar] + if kerns then + local dx,dy,w,h=setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar]) + if trace_kerns then + logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h) + end + end + end + return head,start,false +end +chainmores.gpos_single=chainprocs.gpos_single +function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence) + local snext=getnext(start) + if snext then + local startchar=getchar(start) + local subtables=currentlookup.subtables + local lookupname=subtables[1] + local kerns=lookuphash[lookupname] + if kerns then + kerns=kerns[startchar] + if kerns then + local lookuptype=lookuptypes[lookupname] + local prev,done=start,false + local factor=tfmdata.parameters.factor + while snext and getid(snext)==glyph_code and getfont(snext)==currentfont and getsubtype(snext)<256 do + local nextchar=getchar(snext) + local krn=kerns[nextchar] + if not krn and marks[nextchar] then + prev=snext + snext=getnext(snext) + else + if not krn then + elseif type(krn)=="table" then + if lookuptype=="pair" then + local a,b=krn[2],krn[3] + if a and #a>0 then + local startchar=getchar(start) + local x,y,w,h=setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar]) + if trace_kerns then + logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h) + end + end + if b and #b>0 then + local startchar=getchar(start) + local x,y,w,h=setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar]) + if trace_kerns then + logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h) + end + end + else + report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname)) + local a,b=krn[2],krn[6] + if a and a~=0 then + local k=setkern(snext,factor,rlmode,a) + if trace_kerns then + logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar)) + end + end + if b and b~=0 then + logwarning("%s: ignoring second kern xoff %s",cref(kind,chainname,chainlookupname),b*factor) + end + end + done=true + elseif krn~=0 then + local k=setkern(snext,factor,rlmode,krn) + if trace_kerns then + logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar)) + end + done=true + end + break + end + end + return head,start,done + end + end + end + return head,start,false +end +chainmores.gpos_pair=chainprocs.gpos_pair +local function show_skip(kind,chainname,char,ck,class) + if ck[9] then + logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a, %a => %a",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10]) + else + logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a",cref(kind,chainname),gref(char),class,ck[1],ck[2]) + end +end +local quit_on_no_replacement=true +directives.register("otf.chain.quitonnoreplacement",function(value) + quit_on_no_replacement=value +end) +local function normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash) + local flags=sequence.flags + local done=false + local skipmark=flags[1] + local skipligature=flags[2] + local skipbase=flags[3] + local someskip=skipmark or skipligature or skipbase + local markclass=sequence.markclass + local skipped=false + for k=1,#contexts do + local match=true + local current=start + local last=start + local ck=contexts[k] + local seq=ck[3] + local s=#seq + if s==1 then + match=getid(current)==glyph_code and getfont(current)==currentfont and getsubtype(current)<256 and seq[1][getchar(current)] + else + local f,l=ck[4],ck[5] + if f==1 and f==l then + else + if f==l then + else + local n=f+1 + last=getnext(last) + while n<=l do + if last then + local id=getid(last) + if id==glyph_code then + if getfont(last)==currentfont and getsubtype(last)<256 then + local char=getchar(last) + local ccd=descriptions[char] + if ccd then + local class=ccd.class + if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then + skipped=true + if trace_skips then + show_skip(kind,chainname,char,ck,class) + end + last=getnext(last) + elseif seq[n][char] then + if n1 then + local prev=getprev(start) + if prev then + local n=f-1 + while n>=1 do + if prev then + local id=getid(prev) + if id==glyph_code then + if getfont(prev)==currentfont and getsubtype(prev)<256 then + local char=getchar(prev) + local ccd=descriptions[char] + if ccd then + local class=ccd.class + if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then + skipped=true + if trace_skips then + show_skip(kind,chainname,char,ck,class) + end + elseif seq[n][char] then + n=n -1 + else + match=false + break + end + else + match=false + break + end + else + match=false + break + end + elseif id==disc_code then + elseif seq[n][32] then + n=n -1 + else + match=false + break + end + prev=getprev(prev) + elseif seq[n][32] then + n=n -1 + else + match=false + break + end + end + elseif f==2 then + match=seq[1][32] + else + for n=f-1,1 do + if not seq[n][32] then + match=false + break + end + end + end + end + if match and s>l then + local current=last and getnext(last) + if current then + local n=l+1 + while n<=s do + if current then + local id=getid(current) + if id==glyph_code then + if getfont(current)==currentfont and getsubtype(current)<256 then + local char=getchar(current) + local ccd=descriptions[char] + if ccd then + local class=ccd.class + if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then + skipped=true + if trace_skips then + show_skip(kind,chainname,char,ck,class) + end + elseif seq[n][char] then + n=n+1 + else + match=false + break + end + else + match=false + break + end + else + match=false + break + end + elseif id==disc_code then + elseif seq[n][32] then + n=n+1 + else + match=false + break + end + current=getnext(current) + elseif seq[n][32] then + n=n+1 + else + match=false + break + end + end + elseif s-l==1 then + match=seq[s][32] + else + for n=l+1,s do + if not seq[n][32] then + match=false + break + end + end + end + end + end + if match then + if trace_contexts then + local rule,lookuptype,f,l=ck[1],ck[2],ck[4],ck[5] + local char=getchar(start) + if ck[9] then + logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a, %a => %a", + cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10]) + else + logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a", + cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype) + end + end + local chainlookups=ck[6] + if chainlookups then + local nofchainlookups=#chainlookups + if nofchainlookups==1 then + local chainlookupname=chainlookups[1] + local chainlookup=lookuptable[chainlookupname] + if chainlookup then + local cp=chainprocs[chainlookup.type] + if cp then + local ok + head,start,ok=cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence) + if ok then + done=true + end + else + logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type) + end + else + logprocess("%s is not yet supported",cref(kind,chainname,chainlookupname)) + end + else + local i=1 + while true do + if skipped then + while true do + local char=getchar(start) + local ccd=descriptions[char] + if ccd then + local class=ccd.class + if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then + start=getnext(start) + else + break + end + else + break + end + end + end + local chainlookupname=chainlookups[i] + local chainlookup=lookuptable[chainlookupname] + if not chainlookup then + i=i+1 + else + local cp=chainmores[chainlookup.type] + if not cp then + logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type) + i=i+1 + else + local ok,n + head,start,ok,n=cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence) + if ok then + done=true + i=i+(n or 1) + else + i=i+1 + end + end + end + if i>nofchainlookups then + break + elseif start then + start=getnext(start) + else + end + end + end + else + local replacements=ck[7] + if replacements then + head,start,done=chainprocs.reversesub(head,start,last,kind,chainname,ck,lookuphash,replacements) + else + done=quit_on_no_replacement + if trace_contexts then + logprocess("%s: skipping match",cref(kind,chainname)) + end + end + end + end + end + return head,start,done +end +local verbose_handle_contextchain=function(font,...) + logwarning("no verbose handler installed, reverting to 'normal'") + otf.setcontextchain() + return normal_handle_contextchain(...) +end +otf.chainhandlers={ + normal=normal_handle_contextchain, + verbose=verbose_handle_contextchain, +} +function otf.setcontextchain(method) + if not method or method=="normal" or not otf.chainhandlers[method] then + if handlers.contextchain then + logwarning("installing normal contextchain handler") + end + handlers.contextchain=normal_handle_contextchain + else + logwarning("installing contextchain handler %a",method) + local handler=otf.chainhandlers[method] + handlers.contextchain=function(...) + return handler(currentfont,...) + end + end + handlers.gsub_context=handlers.contextchain + handlers.gsub_contextchain=handlers.contextchain + handlers.gsub_reversecontextchain=handlers.contextchain + handlers.gpos_contextchain=handlers.contextchain + handlers.gpos_context=handlers.contextchain +end +otf.setcontextchain() +local missing={} +local function logprocess(...) + if trace_steps then + registermessage(...) + end + report_process(...) +end +local logwarning=report_process +local function report_missing_cache(typ,lookup) + local f=missing[currentfont] if not f then f={} missing[currentfont]=f end + local t=f[typ] if not t then t={} f[typ]=t end + if not t[lookup] then + t[lookup]=true + logwarning("missing cache for lookup %a, type %a, font %a, name %a",lookup,typ,currentfont,tfmdata.properties.fullname) + end +end +local resolved={} +local lookuphashes={} +setmetatableindex(lookuphashes,function(t,font) + local lookuphash=fontdata[font].resources.lookuphash + if not lookuphash or not next(lookuphash) then + lookuphash=false + end + t[font]=lookuphash + return lookuphash +end) +local autofeatures=fonts.analyzers.features +local function initialize(sequence,script,language,enabled) + local features=sequence.features + if features then + local order=sequence.order + if order then + for i=1,#order do + local kind=order[i] + local valid=enabled[kind] + if valid then + local scripts=features[kind] + local languages=scripts[script] or scripts[wildcard] + if languages and (languages[language] or languages[wildcard]) then + return { valid,autofeatures[kind] or false,sequence.chain or 0,kind,sequence } + end + end + end + else + end + end + return false +end +function otf.dataset(tfmdata,font) + local shared=tfmdata.shared + local properties=tfmdata.properties + local language=properties.language or "dflt" + local script=properties.script or "dflt" + local enabled=shared.features + local res=resolved[font] + if not res then + res={} + resolved[font]=res + end + local rs=res[script] + if not rs then + rs={} + res[script]=rs + end + local rl=rs[language] + if not rl then + rl={ + } + rs[language]=rl + local sequences=tfmdata.resources.sequences + for s=1,#sequences do + local v=enabled and initialize(sequences[s],script,language,enabled) + if v then + rl[#rl+1]=v + end + end + end + return rl +end +local function featuresprocessor(head,font,attr) + local lookuphash=lookuphashes[font] + if not lookuphash then + return head,false + end + head=tonut(head) + if trace_steps then + checkstep(head) + end + tfmdata=fontdata[font] + descriptions=tfmdata.descriptions + characters=tfmdata.characters + resources=tfmdata.resources + marks=resources.marks + anchorlookups=resources.lookup_to_anchor + lookuptable=resources.lookups + lookuptypes=resources.lookuptypes + lookuptags=resources.lookuptags + currentfont=font + rlmode=0 + local sequences=resources.sequences + local done=false + local datasets=otf.dataset(tfmdata,font,attr) + local dirstack={} + for s=1,#datasets do + local dataset=datasets[s] + featurevalue=dataset[1] + local sequence=dataset[5] + local rlparmode=0 + local topstack=0 + local success=false + local attribute=dataset[2] + local chain=dataset[3] + local typ=sequence.type + local subtables=sequence.subtables + if chain<0 then + local handler=handlers[typ] + local start=find_node_tail(head) + while start do + local id=getid(start) + if id==glyph_code then + if getfont(start)==font and getsubtype(start)<256 then + local a=getattr(start,0) + if a then + a=a==attr + else + a=true + end + if a then + for i=1,#subtables do + local lookupname=subtables[i] + local lookupcache=lookuphash[lookupname] + if lookupcache then + local lookupmatch=lookupcache[getchar(start)] + if lookupmatch then + head,start,success=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) + if success then + break + end + end + else + report_missing_cache(typ,lookupname) + end + end + if start then start=getprev(start) end + else + start=getprev(start) + end + else + start=getprev(start) + end + else + start=getprev(start) + end + end + else + local handler=handlers[typ] + local ns=#subtables + local start=head + rlmode=0 + if ns==1 then + local lookupname=subtables[1] + local lookupcache=lookuphash[lookupname] + if not lookupcache then + report_missing_cache(typ,lookupname) + else + local function subrun(start) + local head=start + local done=false + while start do + local id=getid(start) + if id==glyph_code and getfont(start)==font and getsubtype(start)<256 then + local a=getattr(start,0) + if a then + a=(a==attr) and (not attribute or getprop(start,a_state)==attribute) + else + a=not attribute or getprop(start,a_state)==attribute + end + if a then + local lookupmatch=lookupcache[getchar(start)] + if lookupmatch then + local ok + head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) + if ok then + done=true + end + end + if start then start=getnext(start) end + else + start=getnext(start) + end + else + start=getnext(start) + end + end + if done then + success=true + return head + end + end + local function kerndisc(disc) + local prev=getprev(disc) + local next=getnext(disc) + if prev and next then + setfield(prev,"next",next) + local a=getattr(prev,0) + if a then + a=(a==attr) and (not attribute or getprop(prev,a_state)==attribute) + else + a=not attribute or getprop(prev,a_state)==attribute + end + if a then + local lookupmatch=lookupcache[getchar(prev)] + if lookupmatch then + local h,d,ok=handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) + if ok then + done=true + success=true + end + end + end + setfield(prev,"next",disc) + end + return next + end + while start do + local id=getid(start) + if id==glyph_code then + if getfont(start)==font and getsubtype(start)<256 then + local a=getattr(start,0) + if a then + a=(a==attr) and (not attribute or getprop(start,a_state)==attribute) + else + a=not attribute or getprop(start,a_state)==attribute + end + if a then + local lookupmatch=lookupcache[getchar(start)] + if lookupmatch then + local ok + head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) + if ok then + success=true + end + end + if start then start=getnext(start) end + else + start=getnext(start) + end + else + start=getnext(start) + end + elseif id==disc_code then + if getsubtype(start)==discretionary_code then + local pre=getfield(start,"pre") + if pre then + local new=subrun(pre) + if new then setfield(start,"pre",new) end + end + local post=getfield(start,"post") + if post then + local new=subrun(post) + if new then setfield(start,"post",new) end + end + local replace=getfield(start,"replace") + if replace then + local new=subrun(replace) + if new then setfield(start,"replace",new) end + end +elseif typ=="gpos_single" or typ=="gpos_pair" then + kerndisc(start) + end + start=getnext(start) + elseif id==whatsit_code then + local subtype=getsubtype(start) + if subtype==dir_code then + local dir=getfield(start,"dir") + if dir=="+TRT" or dir=="+TLT" then + topstack=topstack+1 + dirstack[topstack]=dir + elseif dir=="-TRT" or dir=="-TLT" then + topstack=topstack-1 + end + local newdir=dirstack[topstack] + if newdir=="+TRT" then + rlmode=-1 + elseif newdir=="+TLT" then + rlmode=1 + else + rlmode=rlparmode + end + if trace_directions then + report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir) + end + elseif subtype==localpar_code then + local dir=getfield(start,"dir") + if dir=="TRT" then + rlparmode=-1 + elseif dir=="TLT" then + rlparmode=1 + else + rlparmode=0 + end + rlmode=rlparmode + if trace_directions then + report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode) + end + end + start=getnext(start) + elseif id==math_code then + start=getnext(end_of_math(start)) + else + start=getnext(start) + end + end + end + else + local function subrun(start) + local head=start + local done=false + while start do + local id=getid(start) + if id==glyph_code and getfont(start)==font and getsubtype(start)<256 then + local a=getattr(start,0) + if a then + a=(a==attr) and (not attribute or getprop(start,a_state)==attribute) + else + a=not attribute or getprop(start,a_state)==attribute + end + if a then + for i=1,ns do + local lookupname=subtables[i] + local lookupcache=lookuphash[lookupname] + if lookupcache then + local lookupmatch=lookupcache[getchar(start)] + if lookupmatch then + local ok + head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) + if ok then + done=true + break + elseif not start then + break + end + end + else + report_missing_cache(typ,lookupname) + end + end + if start then start=getnext(start) end + else + start=getnext(start) + end + else + start=getnext(start) + end + end + if done then + success=true + return head + end + end + local function kerndisc(disc) + local prev=getprev(disc) + local next=getnext(disc) + if prev and next then + setfield(prev,"next",next) + local a=getattr(prev,0) + if a then + a=(a==attr) and (not attribute or getprop(prev,a_state)==attribute) + else + a=not attribute or getprop(prev,a_state)==attribute + end + if a then + for i=1,ns do + local lookupname=subtables[i] + local lookupcache=lookuphash[lookupname] + if lookupcache then + local lookupmatch=lookupcache[getchar(prev)] + if lookupmatch then + local h,d,ok=handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) + if ok then + done=true + break + end + end + else + report_missing_cache(typ,lookupname) + end + end + end + setfield(prev,"next",disc) + end + return next + end + while start do + local id=getid(start) + if id==glyph_code then + if getfont(start)==font and getsubtype(start)<256 then + local a=getattr(start,0) + if a then + a=(a==attr) and (not attribute or getprop(start,a_state)==attribute) + else + a=not attribute or getprop(start,a_state)==attribute + end + if a then + for i=1,ns do + local lookupname=subtables[i] + local lookupcache=lookuphash[lookupname] + if lookupcache then + local lookupmatch=lookupcache[getchar(start)] + if lookupmatch then + local ok + head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) + if ok then + success=true + break + elseif not start then + break + end + end + else + report_missing_cache(typ,lookupname) + end + end + if start then start=getnext(start) end + else + start=getnext(start) + end + else + start=getnext(start) + end + elseif id==disc_code then + if getsubtype(start)==discretionary_code then + local pre=getfield(start,"pre") + if pre then + local new=subrun(pre) + if new then setfield(start,"pre",new) end + end + local post=getfield(start,"post") + if post then + local new=subrun(post) + if new then setfield(start,"post",new) end + end + local replace=getfield(start,"replace") + if replace then + local new=subrun(replace) + if new then setfield(start,"replace",new) end + end +elseif typ=="gpos_single" or typ=="gpos_pair" then + kerndisc(start) + end + start=getnext(start) + elseif id==whatsit_code then + local subtype=getsubtype(start) + if subtype==dir_code then + local dir=getfield(start,"dir") + if dir=="+TRT" or dir=="+TLT" then + topstack=topstack+1 + dirstack[topstack]=dir + elseif dir=="-TRT" or dir=="-TLT" then + topstack=topstack-1 + end + local newdir=dirstack[topstack] + if newdir=="+TRT" then + rlmode=-1 + elseif newdir=="+TLT" then + rlmode=1 + else + rlmode=rlparmode + end + if trace_directions then + report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir) + end + elseif subtype==localpar_code then + local dir=getfield(start,"dir") + if dir=="TRT" then + rlparmode=-1 + elseif dir=="TLT" then + rlparmode=1 + else + rlparmode=0 + end + rlmode=rlparmode + if trace_directions then + report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode) + end + end + start=getnext(start) + elseif id==math_code then + start=getnext(end_of_math(start)) + else + start=getnext(start) + end + end + end + end + if success then + done=true + end + if trace_steps then + registerstep(head) + end + end + head=tonode(head) + return head,done +end +local function generic(lookupdata,lookupname,unicode,lookuphash) + local target=lookuphash[lookupname] + if target then + target[unicode]=lookupdata + else + lookuphash[lookupname]={ [unicode]=lookupdata } + end +end +local action={ + substitution=generic, + multiple=generic, + alternate=generic, + position=generic, + ligature=function(lookupdata,lookupname,unicode,lookuphash) + local target=lookuphash[lookupname] + if not target then + target={} + lookuphash[lookupname]=target + end + for i=1,#lookupdata do + local li=lookupdata[i] + local tu=target[li] + if not tu then + tu={} + target[li]=tu + end + target=tu + end + target.ligature=unicode + end, + pair=function(lookupdata,lookupname,unicode,lookuphash) + local target=lookuphash[lookupname] + if not target then + target={} + lookuphash[lookupname]=target + end + local others=target[unicode] + local paired=lookupdata[1] + if others then + others[paired]=lookupdata + else + others={ [paired]=lookupdata } + target[unicode]=others + end + end, +} +local function prepare_lookups(tfmdata) + local rawdata=tfmdata.shared.rawdata + local resources=rawdata.resources + local lookuphash=resources.lookuphash + local anchor_to_lookup=resources.anchor_to_lookup + local lookup_to_anchor=resources.lookup_to_anchor + local lookuptypes=resources.lookuptypes + local characters=tfmdata.characters + local descriptions=tfmdata.descriptions + for unicode,character in next,characters do + local description=descriptions[unicode] + if description then + local lookups=description.slookups + if lookups then + for lookupname,lookupdata in next,lookups do + action[lookuptypes[lookupname]](lookupdata,lookupname,unicode,lookuphash) + end + end + local lookups=description.mlookups + if lookups then + for lookupname,lookuplist in next,lookups do + local lookuptype=lookuptypes[lookupname] + for l=1,#lookuplist do + local lookupdata=lookuplist[l] + action[lookuptype](lookupdata,lookupname,unicode,lookuphash) + end + end + end + local list=description.kerns + if list then + for lookup,krn in next,list do + local target=lookuphash[lookup] + if target then + target[unicode]=krn + else + lookuphash[lookup]={ [unicode]=krn } + end + end + end + local list=description.anchors + if list then + for typ,anchors in next,list do + if typ=="mark" or typ=="cexit" then + for name,anchor in next,anchors do + local lookups=anchor_to_lookup[name] + if lookups then + for lookup,_ in next,lookups do + local target=lookuphash[lookup] + if target then + target[unicode]=anchors + else + lookuphash[lookup]={ [unicode]=anchors } + end + end + end + end + end + end + end + end + end +end +local function split(replacement,original) + local result={} + for i=1,#replacement do + result[original[i]]=replacement[i] + end + return result +end +local valid={ + coverage={ chainsub=true,chainpos=true,contextsub=true }, + reversecoverage={ reversesub=true }, + glyphs={ chainsub=true,chainpos=true }, +} +local function prepare_contextchains(tfmdata) + local rawdata=tfmdata.shared.rawdata + local resources=rawdata.resources + local lookuphash=resources.lookuphash + local lookuptags=resources.lookuptags + local lookups=rawdata.lookups + if lookups then + for lookupname,lookupdata in next,rawdata.lookups do + local lookuptype=lookupdata.type + if lookuptype then + local rules=lookupdata.rules + if rules then + local format=lookupdata.format + local validformat=valid[format] + if not validformat then + report_prepare("unsupported format %a",format) + elseif not validformat[lookuptype] then + report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookuptags[lookupname]) + else + local contexts=lookuphash[lookupname] + if not contexts then + contexts={} + lookuphash[lookupname]=contexts + end + local t,nt={},0 + for nofrules=1,#rules do + local rule=rules[nofrules] + local current=rule.current + local before=rule.before + local after=rule.after + local replacements=rule.replacements + local sequence={} + local nofsequences=0 + if before then + for n=1,#before do + nofsequences=nofsequences+1 + sequence[nofsequences]=before[n] + end + end + local start=nofsequences+1 + for n=1,#current do + nofsequences=nofsequences+1 + sequence[nofsequences]=current[n] + end + local stop=nofsequences + if after then + for n=1,#after do + nofsequences=nofsequences+1 + sequence[nofsequences]=after[n] + end + end + if sequence[1] then + nt=nt+1 + t[nt]={ nofrules,lookuptype,sequence,start,stop,rule.lookups,replacements } + for unic,_ in next,sequence[start] do + local cu=contexts[unic] + if not cu then + contexts[unic]=t + end + end + end + end + end + else + end + else + report_prepare("missing lookuptype for lookupname %a",lookuptags[lookupname]) + end + end + end +end +local function featuresinitializer(tfmdata,value) + if true then + local rawdata=tfmdata.shared.rawdata + local properties=rawdata.properties + if not properties.initialized then + local starttime=trace_preparing and os.clock() + local resources=rawdata.resources + resources.lookuphash=resources.lookuphash or {} + prepare_contextchains(tfmdata) + prepare_lookups(tfmdata) + properties.initialized=true + if trace_preparing then + report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,tfmdata.properties.fullname) + end + end + end +end +registerotffeature { + name="features", + description="features", + default=true, + initializers={ + position=1, + node=featuresinitializer, + }, + processors={ + node=featuresprocessor, + } +} +otf.handlers=handlers + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-otp']={ + version=1.001, + comment="companion to font-otf.lua (packing)", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local next,type=next,type +local sort,concat=table.sort,table.concat +local sortedhash=table.sortedhash +local trace_packing=false trackers.register("otf.packing",function(v) trace_packing=v end) +local trace_loading=false trackers.register("otf.loading",function(v) trace_loading=v end) +local report_otf=logs.reporter("fonts","otf loading") +fonts=fonts or {} +local handlers=fonts.handlers or {} +fonts.handlers=handlers +local otf=handlers.otf or {} +handlers.otf=otf +local enhancers=otf.enhancers or {} +otf.enhancers=enhancers +local glists=otf.glists or { "gsub","gpos" } +otf.glists=glists +local criterium=1 +local threshold=0 +local function tabstr_normal(t) + local s={} + local n=0 + for k,v in next,t do + n=n+1 + if type(v)=="table" then + s[n]=k..">"..tabstr_normal(v) + elseif v==true then + s[n]=k.."+" + elseif v then + s[n]=k.."="..v + else + s[n]=k.."-" + end + end + if n==0 then + return "" + elseif n==1 then + return s[1] + else + sort(s) + return concat(s,",") + end +end +local function tabstr_flat(t) + local s={} + local n=0 + for k,v in next,t do + n=n+1 + s[n]=k.."="..v + end + if n==0 then + return "" + elseif n==1 then + return s[1] + else + sort(s) + return concat(s,",") + end +end +local function tabstr_mixed(t) + local s={} + local n=#t + if n==0 then + return "" + elseif n==1 then + local k=t[1] + if k==true then + return "++" + elseif k==false then + return "--" + else + return tostring(k) + end + else + for i=1,n do + local k=t[i] + if k==true then + s[i]="++" + elseif k==false then + s[i]="--" + else + s[i]=k + end + end + return concat(s,",") + end +end +local function tabstr_boolean(t) + local s={} + local n=0 + for k,v in next,t do + n=n+1 + if v then + s[n]=k.."+" + else + s[n]=k.."-" + end + end + if n==0 then + return "" + elseif n==1 then + return s[1] + else + sort(s) + return concat(s,",") + end +end +local function packdata(data) + if data then + local h,t,c={},{},{} + local hh,tt,cc={},{},{} + local nt,ntt=0,0 + local function pack_normal(v) + local tag=tabstr_normal(v) + local ht=h[tag] + if ht then + c[ht]=c[ht]+1 + return ht + else + nt=nt+1 + t[nt]=v + h[tag]=nt + c[nt]=1 + return nt + end + end + local function pack_flat(v) + local tag=tabstr_flat(v) + local ht=h[tag] + if ht then + c[ht]=c[ht]+1 + return ht + else + nt=nt+1 + t[nt]=v + h[tag]=nt + c[nt]=1 + return nt + end + end + local function pack_boolean(v) + local tag=tabstr_boolean(v) + local ht=h[tag] + if ht then + c[ht]=c[ht]+1 + return ht + else + nt=nt+1 + t[nt]=v + h[tag]=nt + c[nt]=1 + return nt + end + end + local function pack_indexed(v) + local tag=concat(v," ") + local ht=h[tag] + if ht then + c[ht]=c[ht]+1 + return ht + else + nt=nt+1 + t[nt]=v + h[tag]=nt + c[nt]=1 + return nt + end + end + local function pack_mixed(v) + local tag=tabstr_mixed(v) + local ht=h[tag] + if ht then + c[ht]=c[ht]+1 + return ht + else + nt=nt+1 + t[nt]=v + h[tag]=nt + c[nt]=1 + return nt + end + end + local function pack_final(v) + if c[v]<=criterium then + return t[v] + else + local hv=hh[v] + if hv then + return hv + else + ntt=ntt+1 + tt[ntt]=t[v] + hh[v]=ntt + cc[ntt]=c[v] + return ntt + end + end + end + local function success(stage,pass) + if nt==0 then + if trace_loading or trace_packing then + report_otf("pack quality: nothing to pack") + end + return false + elseif nt>=threshold then + local one,two,rest=0,0,0 + if pass==1 then + for k,v in next,c do + if v==1 then + one=one+1 + elseif v==2 then + two=two+1 + else + rest=rest+1 + end + end + else + for k,v in next,cc do + if v>20 then + rest=rest+1 + elseif v>10 then + two=two+1 + else + one=one+1 + end + end + data.tables=tt + end + if trace_loading or trace_packing then + report_otf("pack quality: stage %s, pass %s, %s packed, 1-10:%s, 11-20:%s, rest:%s (criterium: %s)",stage,pass,one+two+rest,one,two,rest,criterium) + end + return true + else + if trace_loading or trace_packing then + report_otf("pack quality: stage %s, pass %s, %s packed, aborting pack (threshold: %s)",stage,pass,nt,threshold) + end + return false + end + end + local function packers(pass) + if pass==1 then + return pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed + else + return pack_final,pack_final,pack_final,pack_final,pack_final + end + end + local resources=data.resources + local lookuptypes=resources.lookuptypes + for pass=1,2 do + if trace_packing then + report_otf("start packing: stage 1, pass %s",pass) + end + local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass) + for unicode,description in next,data.descriptions do + local boundingbox=description.boundingbox + if boundingbox then + description.boundingbox=pack_indexed(boundingbox) + end + local slookups=description.slookups + if slookups then + for tag,slookup in next,slookups do + local what=lookuptypes[tag] + if what=="pair" then + local t=slookup[2] if t then slookup[2]=pack_indexed(t) end + local t=slookup[3] if t then slookup[3]=pack_indexed(t) end + elseif what~="substitution" then + slookups[tag]=pack_indexed(slookup) + end + end + end + local mlookups=description.mlookups + if mlookups then + for tag,mlookup in next,mlookups do + local what=lookuptypes[tag] + if what=="pair" then + for i=1,#mlookup do + local lookup=mlookup[i] + local t=lookup[2] if t then lookup[2]=pack_indexed(t) end + local t=lookup[3] if t then lookup[3]=pack_indexed(t) end + end + elseif what~="substitution" then + for i=1,#mlookup do + mlookup[i]=pack_indexed(mlookup[i]) + end + end + end + end + local kerns=description.kerns + if kerns then + for tag,kern in next,kerns do + kerns[tag]=pack_flat(kern) + end + end + local math=description.math + if math then + local kerns=math.kerns + if kerns then + for tag,kern in next,kerns do + kerns[tag]=pack_normal(kern) + end + end + end + local anchors=description.anchors + if anchors then + for what,anchor in next,anchors do + if what=="baselig" then + for _,a in next,anchor do + for k=1,#a do + a[k]=pack_indexed(a[k]) + end + end + else + for k,v in next,anchor do + anchor[k]=pack_indexed(v) + end + end + end + end + local altuni=description.altuni + if altuni then + for i=1,#altuni do + altuni[i]=pack_flat(altuni[i]) + end + end + end + local lookups=data.lookups + if lookups then + for _,lookup in next,lookups do + local rules=lookup.rules + if rules then + for i=1,#rules do + local rule=rules[i] + local r=rule.before if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end + local r=rule.after if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end + local r=rule.current if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end + local r=rule.replacements if r then rule.replacements=pack_flat (r) end + local r=rule.lookups if r then rule.lookups=pack_indexed(r) end + end + end + end + end + local anchor_to_lookup=resources.anchor_to_lookup + if anchor_to_lookup then + for anchor,lookup in next,anchor_to_lookup do + anchor_to_lookup[anchor]=pack_normal(lookup) + end + end + local lookup_to_anchor=resources.lookup_to_anchor + if lookup_to_anchor then + for lookup,anchor in next,lookup_to_anchor do + lookup_to_anchor[lookup]=pack_normal(anchor) + end + end + local sequences=resources.sequences + if sequences then + for feature,sequence in next,sequences do + local flags=sequence.flags + if flags then + sequence.flags=pack_normal(flags) + end + local subtables=sequence.subtables + if subtables then + sequence.subtables=pack_normal(subtables) + end + local features=sequence.features + if features then + for script,feature in next,features do + features[script]=pack_normal(feature) + end + end + local order=sequence.order + if order then + sequence.order=pack_indexed(order) + end + local markclass=sequence.markclass + if markclass then + sequence.markclass=pack_boolean(markclass) + end + end + end + local lookups=resources.lookups + if lookups then + for name,lookup in next,lookups do + local flags=lookup.flags + if flags then + lookup.flags=pack_normal(flags) + end + local subtables=lookup.subtables + if subtables then + lookup.subtables=pack_normal(subtables) + end + end + end + local features=resources.features + if features then + for _,what in next,glists do + local list=features[what] + if list then + for feature,spec in next,list do + list[feature]=pack_normal(spec) + end + end + end + end + if not success(1,pass) then + return + end + end + if nt>0 then + for pass=1,2 do + if trace_packing then + report_otf("start packing: stage 2, pass %s",pass) + end + local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass) + for unicode,description in next,data.descriptions do + local kerns=description.kerns + if kerns then + description.kerns=pack_normal(kerns) + end + local math=description.math + if math then + local kerns=math.kerns + if kerns then + math.kerns=pack_normal(kerns) + end + end + local anchors=description.anchors + if anchors then + description.anchors=pack_normal(anchors) + end + local mlookups=description.mlookups + if mlookups then + for tag,mlookup in next,mlookups do + mlookups[tag]=pack_normal(mlookup) + end + end + local altuni=description.altuni + if altuni then + description.altuni=pack_normal(altuni) + end + end + local lookups=data.lookups + if lookups then + for _,lookup in next,lookups do + local rules=lookup.rules + if rules then + for i=1,#rules do + local rule=rules[i] + local r=rule.before if r then rule.before=pack_normal(r) end + local r=rule.after if r then rule.after=pack_normal(r) end + local r=rule.current if r then rule.current=pack_normal(r) end + end + end + end + end + local sequences=resources.sequences + if sequences then + for feature,sequence in next,sequences do + sequence.features=pack_normal(sequence.features) + end + end + if not success(2,pass) then + end + end + for pass=1,2 do + local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass) + for unicode,description in next,data.descriptions do + local slookups=description.slookups + if slookups then + description.slookups=pack_normal(slookups) + end + local mlookups=description.mlookups + if mlookups then + description.mlookups=pack_normal(mlookups) + end + end + end + end + end +end +local unpacked_mt={ + __index=function(t,k) + t[k]=false + return k + end +} +local function unpackdata(data) + if data then + local tables=data.tables + if tables then + local resources=data.resources + local lookuptypes=resources.lookuptypes + local unpacked={} + setmetatable(unpacked,unpacked_mt) + for unicode,description in next,data.descriptions do + local tv=tables[description.boundingbox] + if tv then + description.boundingbox=tv + end + local slookups=description.slookups + if slookups then + local tv=tables[slookups] + if tv then + description.slookups=tv + slookups=unpacked[tv] + end + if slookups then + for tag,lookup in next,slookups do + local what=lookuptypes[tag] + if what=="pair" then + local tv=tables[lookup[2]] + if tv then + lookup[2]=tv + end + local tv=tables[lookup[3]] + if tv then + lookup[3]=tv + end + elseif what~="substitution" then + local tv=tables[lookup] + if tv then + slookups[tag]=tv + end + end + end + end + end + local mlookups=description.mlookups + if mlookups then + local tv=tables[mlookups] + if tv then + description.mlookups=tv + mlookups=unpacked[tv] + end + if mlookups then + for tag,list in next,mlookups do + local tv=tables[list] + if tv then + mlookups[tag]=tv + list=unpacked[tv] + end + if list then + local what=lookuptypes[tag] + if what=="pair" then + for i=1,#list do + local lookup=list[i] + local tv=tables[lookup[2]] + if tv then + lookup[2]=tv + end + local tv=tables[lookup[3]] + if tv then + lookup[3]=tv + end + end + elseif what~="substitution" then + for i=1,#list do + local tv=tables[list[i]] + if tv then + list[i]=tv + end + end + end + end + end + end + end + local kerns=description.kerns + if kerns then + local tm=tables[kerns] + if tm then + description.kerns=tm + kerns=unpacked[tm] + end + if kerns then + for k,kern in next,kerns do + local tv=tables[kern] + if tv then + kerns[k]=tv + end + end + end + end + local math=description.math + if math then + local kerns=math.kerns + if kerns then + local tm=tables[kerns] + if tm then + math.kerns=tm + kerns=unpacked[tm] + end + if kerns then + for k,kern in next,kerns do + local tv=tables[kern] + if tv then + kerns[k]=tv + end + end + end + end + end + local anchors=description.anchors + if anchors then + local ta=tables[anchors] + if ta then + description.anchors=ta + anchors=unpacked[ta] + end + if anchors then + for tag,anchor in next,anchors do + if tag=="baselig" then + for _,list in next,anchor do + for i=1,#list do + local tv=tables[list[i]] + if tv then + list[i]=tv + end + end + end + else + for a,data in next,anchor do + local tv=tables[data] + if tv then + anchor[a]=tv + end + end + end + end + end + end + local altuni=description.altuni + if altuni then + local altuni=tables[altuni] + if altuni then + description.altuni=altuni + for i=1,#altuni do + local tv=tables[altuni[i]] + if tv then + altuni[i]=tv + end + end + end + end + end + local lookups=data.lookups + if lookups then + for _,lookup in next,lookups do + local rules=lookup.rules + if rules then + for i=1,#rules do + local rule=rules[i] + local before=rule.before + if before then + local tv=tables[before] + if tv then + rule.before=tv + before=unpacked[tv] + end + if before then + for i=1,#before do + local tv=tables[before[i]] + if tv then + before[i]=tv + end + end + end + end + local after=rule.after + if after then + local tv=tables[after] + if tv then + rule.after=tv + after=unpacked[tv] + end + if after then + for i=1,#after do + local tv=tables[after[i]] + if tv then + after[i]=tv + end + end + end + end + local current=rule.current + if current then + local tv=tables[current] + if tv then + rule.current=tv + current=unpacked[tv] + end + if current then + for i=1,#current do + local tv=tables[current[i]] + if tv then + current[i]=tv + end + end + end + end + local replacements=rule.replacements + if replacements then + local tv=tables[replacements] + if tv then + rule.replacements=tv + end + end + local lookups=rule.lookups + if lookups then + local tv=tables[lookups] + if tv then + rule.lookups=tv + end + end + end + end + end + end + local anchor_to_lookup=resources.anchor_to_lookup + if anchor_to_lookup then + for anchor,lookup in next,anchor_to_lookup do + local tv=tables[lookup] + if tv then + anchor_to_lookup[anchor]=tv + end + end + end + local lookup_to_anchor=resources.lookup_to_anchor + if lookup_to_anchor then + for lookup,anchor in next,lookup_to_anchor do + local tv=tables[anchor] + if tv then + lookup_to_anchor[lookup]=tv + end + end + end + local ls=resources.sequences + if ls then + for _,feature in next,ls do + local flags=feature.flags + if flags then + local tv=tables[flags] + if tv then + feature.flags=tv + end + end + local subtables=feature.subtables + if subtables then + local tv=tables[subtables] + if tv then + feature.subtables=tv + end + end + local features=feature.features + if features then + local tv=tables[features] + if tv then + feature.features=tv + features=unpacked[tv] + end + if features then + for script,data in next,features do + local tv=tables[data] + if tv then + features[script]=tv + end + end + end + end + local order=feature.order + if order then + local tv=tables[order] + if tv then + feature.order=tv + end + end + local markclass=feature.markclass + if markclass then + local tv=tables[markclass] + if tv then + feature.markclass=tv + end + end + end + end + local lookups=resources.lookups + if lookups then + for _,lookup in next,lookups do + local flags=lookup.flags + if flags then + local tv=tables[flags] + if tv then + lookup.flags=tv + end + end + local subtables=lookup.subtables + if subtables then + local tv=tables[subtables] + if tv then + lookup.subtables=tv + end + end + end + end + local features=resources.features + if features then + for _,what in next,glists do + local feature=features[what] + if feature then + for tag,spec in next,feature do + local tv=tables[spec] + if tv then + feature[tag]=tv + end + end + end + end + end + data.tables=nil + end + end +end +if otf.enhancers.register then + otf.enhancers.register("pack",packdata) + otf.enhancers.register("unpack",unpackdata) +end +otf.enhancers.unpack=unpackdata +otf.enhancers.pack=packdata + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['luatex-fonts-lua']={ + version=1.001, + comment="companion to luatex-*.tex", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end +local fonts=fonts +fonts.formats.lua="lua" +function fonts.readers.lua(specification) + local fullname=specification.filename or "" + if fullname=="" then + local forced=specification.forced or "" + if forced~="" then + fullname=specification.name.."."..forced + else + fullname=specification.name + end + end + local fullname=resolvers.findfile(fullname) or "" + if fullname~="" then + local loader=loadfile(fullname) + loader=loader and loader() + return loader and loader(specification) + end +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-def']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local format,gmatch,match,find,lower,gsub=string.format,string.gmatch,string.match,string.find,string.lower,string.gsub +local tostring,next=tostring,next +local lpegmatch=lpeg.match +local suffixonly,removesuffix=file.suffix,file.removesuffix +local allocate=utilities.storage.allocate +local trace_defining=false trackers .register("fonts.defining",function(v) trace_defining=v end) +local directive_embedall=false directives.register("fonts.embedall",function(v) directive_embedall=v end) +trackers.register("fonts.loading","fonts.defining","otf.loading","afm.loading","tfm.loading") +trackers.register("fonts.all","fonts.*","otf.*","afm.*","tfm.*") +local report_defining=logs.reporter("fonts","defining") +local fonts=fonts +local fontdata=fonts.hashes.identifiers +local readers=fonts.readers +local definers=fonts.definers +local specifiers=fonts.specifiers +local constructors=fonts.constructors +local fontgoodies=fonts.goodies +readers.sequence=allocate { 'otf','ttf','afm','tfm','lua' } +local variants=allocate() +specifiers.variants=variants +definers.methods=definers.methods or {} +local internalized=allocate() +local lastdefined=nil +local loadedfonts=constructors.loadedfonts +local designsizes=constructors.designsizes +local resolvefile=fontgoodies and fontgoodies.filenames and fontgoodies.filenames.resolve or function(s) return s end +local splitter,splitspecifiers=nil,"" +local P,C,S,Cc=lpeg.P,lpeg.C,lpeg.S,lpeg.Cc +local left=P("(") +local right=P(")") +local colon=P(":") +local space=P(" ") +definers.defaultlookup="file" +local prefixpattern=P(false) +local function addspecifier(symbol) + splitspecifiers=splitspecifiers..symbol + local method=S(splitspecifiers) + local lookup=C(prefixpattern)*colon + local sub=left*C(P(1-left-right-method)^1)*right + local specification=C(method)*C(P(1)^1) + local name=C((1-sub-specification)^1) + splitter=P((lookup+Cc(""))*name*(sub+Cc(""))*(specification+Cc(""))) +end +local function addlookup(str,default) + prefixpattern=prefixpattern+P(str) +end +definers.addlookup=addlookup +addlookup("file") +addlookup("name") +addlookup("spec") +local function getspecification(str) + return lpegmatch(splitter,str or "") +end +definers.getspecification=getspecification +function definers.registersplit(symbol,action,verbosename) + addspecifier(symbol) + variants[symbol]=action + if verbosename then + variants[verbosename]=action + end +end +local function makespecification(specification,lookup,name,sub,method,detail,size) + size=size or 655360 + if not lookup or lookup=="" then + lookup=definers.defaultlookup + end + if trace_defining then + report_defining("specification %a, lookup %a, name %a, sub %a, method %a, detail %a", + specification,lookup,name,sub,method,detail) + end + local t={ + lookup=lookup, + specification=specification, + size=size, + name=name, + sub=sub, + method=method, + detail=detail, + resolved="", + forced="", + features={}, + } + return t +end +definers.makespecification=makespecification +function definers.analyze(specification,size) + local lookup,name,sub,method,detail=getspecification(specification or "") + return makespecification(specification,lookup,name,sub,method,detail,size) +end +definers.resolvers=definers.resolvers or {} +local resolvers=definers.resolvers +function resolvers.file(specification) + local name=resolvefile(specification.name) + local suffix=lower(suffixonly(name)) + if fonts.formats[suffix] then + specification.forced=suffix + specification.forcedname=name + specification.name=removesuffix(name) + else + specification.name=name + end +end +function resolvers.name(specification) + local resolve=fonts.names.resolve + if resolve then + local resolved,sub=resolve(specification.name,specification.sub,specification) + if resolved then + specification.resolved=resolved + specification.sub=sub + local suffix=lower(suffixonly(resolved)) + if fonts.formats[suffix] then + specification.forced=suffix + specification.forcedname=resolved + specification.name=removesuffix(resolved) + else + specification.name=resolved + end + end + else + resolvers.file(specification) + end +end +function resolvers.spec(specification) + local resolvespec=fonts.names.resolvespec + if resolvespec then + local resolved,sub=resolvespec(specification.name,specification.sub,specification) + if resolved then + specification.resolved=resolved + specification.sub=sub + specification.forced=lower(suffixonly(resolved)) + specification.forcedname=resolved + specification.name=removesuffix(resolved) + end + else + resolvers.name(specification) + end +end +function definers.resolve(specification) + if not specification.resolved or specification.resolved=="" then + local r=resolvers[specification.lookup] + if r then + r(specification) + end + end + if specification.forced=="" then + specification.forced=nil + specification.forcedname=nil + end + specification.hash=lower(specification.name..' @ '..constructors.hashfeatures(specification)) + if specification.sub and specification.sub~="" then + specification.hash=specification.sub..' @ '..specification.hash + end + return specification +end +function definers.applypostprocessors(tfmdata) + local postprocessors=tfmdata.postprocessors + if postprocessors then + local properties=tfmdata.properties + for i=1,#postprocessors do + local extrahash=postprocessors[i](tfmdata) + if type(extrahash)=="string" and extrahash~="" then + extrahash=gsub(lower(extrahash),"[^a-z]","-") + properties.fullname=format("%s-%s",properties.fullname,extrahash) + end + end + end + return tfmdata +end +local function checkembedding(tfmdata) + local properties=tfmdata.properties + local embedding + if directive_embedall then + embedding="full" + elseif properties and properties.filename and constructors.dontembed[properties.filename] then + embedding="no" + else + embedding="subset" + end + if properties then + properties.embedding=embedding + else + tfmdata.properties={ embedding=embedding } + end + tfmdata.embedding=embedding +end +function definers.loadfont(specification) + local hash=constructors.hashinstance(specification) + local tfmdata=loadedfonts[hash] + if not tfmdata then + local forced=specification.forced or "" + if forced~="" then + local reader=readers[lower(forced)] + tfmdata=reader and reader(specification) + if not tfmdata then + report_defining("forced type %a of %a not found",forced,specification.name) + end + else + local sequence=readers.sequence + for s=1,#sequence do + local reader=sequence[s] + if readers[reader] then + if trace_defining then + report_defining("trying (reader sequence driven) type %a for %a with file %a",reader,specification.name,specification.filename) + end + tfmdata=readers[reader](specification) + if tfmdata then + break + else + specification.filename=nil + end + end + end + end + if tfmdata then + tfmdata=definers.applypostprocessors(tfmdata) + checkembedding(tfmdata) + loadedfonts[hash]=tfmdata + designsizes[specification.hash]=tfmdata.parameters.designsize + end + end + if not tfmdata then + report_defining("font with asked name %a is not found using lookup %a",specification.name,specification.lookup) + end + return tfmdata +end +function constructors.checkvirtualids() +end +function constructors.readanddefine(name,size) + local specification=definers.analyze(name,size) + local method=specification.method + if method and variants[method] then + specification=variants[method](specification) + end + specification=definers.resolve(specification) + local hash=constructors.hashinstance(specification) + local id=definers.registered(hash) + if not id then + local tfmdata=definers.loadfont(specification) + if tfmdata then + tfmdata.properties.hash=hash + constructors.checkvirtualids(tfmdata) + id=font.define(tfmdata) + definers.register(tfmdata,id) + else + id=0 + end + end + return fontdata[id],id +end +function definers.current() + return lastdefined +end +function definers.registered(hash) + local id=internalized[hash] + return id,id and fontdata[id] +end +function definers.register(tfmdata,id) + if tfmdata and id then + local hash=tfmdata.properties.hash + if not hash then + report_defining("registering font, id %a, name %a, invalid hash",id,tfmdata.properties.filename or "?") + elseif not internalized[hash] then + internalized[hash]=id + if trace_defining then + report_defining("registering font, id %s, hash %a",id,hash) + end + fontdata[id]=tfmdata + end + end +end +function definers.read(specification,size,id) + statistics.starttiming(fonts) + if type(specification)=="string" then + specification=definers.analyze(specification,size) + end + local method=specification.method + if method and variants[method] then + specification=variants[method](specification) + end + specification=definers.resolve(specification) + local hash=constructors.hashinstance(specification) + local tfmdata=definers.registered(hash) + if tfmdata then + if trace_defining then + report_defining("already hashed: %s",hash) + end + else + tfmdata=definers.loadfont(specification) + if tfmdata then + if trace_defining then + report_defining("loaded and hashed: %s",hash) + end + tfmdata.properties.hash=hash + if id then + definers.register(tfmdata,id) + end + else + if trace_defining then + report_defining("not loaded and hashed: %s",hash) + end + end + end + lastdefined=tfmdata or id + if not tfmdata then + report_defining("unknown font %a, loading aborted",specification.name) + elseif trace_defining and type(tfmdata)=="table" then + local properties=tfmdata.properties or {} + local parameters=tfmdata.parameters or {} + report_defining("using %a font with id %a, name %a, size %a, bytes %a, encoding %a, fullname %a, filename %a", + properties.format or "unknown",id,properties.name,parameters.size,properties.encodingbytes, + properties.encodingname,properties.fullname,file.basename(properties.filename)) + end + statistics.stoptiming(fonts) + return tfmdata +end +function font.getfont(id) + return fontdata[id] +end +callbacks.register('define_font',definers.read,"definition of fonts (tfmdata preparation)") + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['luatex-font-def']={ + version=1.001, + comment="companion to luatex-*.tex", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end +local fonts=fonts +fonts.constructors.namemode="specification" +function fonts.definers.getspecification(str) + return "",str,"",":",str +end +local list={} +local function issome () list.lookup='name' end +local function isfile () list.lookup='file' end +local function isname () list.lookup='name' end +local function thename(s) list.name=s end +local function issub (v) list.sub=v end +local function iscrap (s) list.crap=string.lower(s) end +local function iskey (k,v) list[k]=v end +local function istrue (s) list[s]=true end +local function isfalse(s) list[s]=false end +local P,S,R,C=lpeg.P,lpeg.S,lpeg.R,lpeg.C +local spaces=P(" ")^0 +local namespec=(1-S("/:("))^0 +local crapspec=spaces*P("/")*(((1-P(":"))^0)/iscrap)*spaces +local filename_1=P("file:")/isfile*(namespec/thename) +local filename_2=P("[")*P(true)/isname*(((1-P("]"))^0)/thename)*P("]") +local fontname_1=P("name:")/isname*(namespec/thename) +local fontname_2=P(true)/issome*(namespec/thename) +local sometext=(R("az","AZ","09")+S("+-."))^1 +local truevalue=P("+")*spaces*(sometext/istrue) +local falsevalue=P("-")*spaces*(sometext/isfalse) +local keyvalue=(C(sometext)*spaces*P("=")*spaces*C(sometext))/iskey +local somevalue=sometext/istrue +local subvalue=P("(")*(C(P(1-S("()"))^1)/issub)*P(")") +local option=spaces*(keyvalue+falsevalue+truevalue+somevalue)*spaces +local options=P(":")*spaces*(P(";")^0*option)^0 +local pattern=(filename_1+filename_2+fontname_1+fontname_2)*subvalue^0*crapspec^0*options^0 +local function colonized(specification) + list={} + lpeg.match(pattern,specification.specification) + list.crap=nil + if list.name then + specification.name=list.name + list.name=nil + end + if list.lookup then + specification.lookup=list.lookup + list.lookup=nil + end + if list.sub then + specification.sub=list.sub + list.sub=nil + end + specification.features.normal=fonts.handlers.otf.features.normalize(list) + return specification +end +fonts.definers.registersplit(":",colonized,"cryptic") +fonts.definers.registersplit("",colonized,"more cryptic") +function fonts.definers.applypostprocessors(tfmdata) + local postprocessors=tfmdata.postprocessors + if postprocessors then + for i=1,#postprocessors do + local extrahash=postprocessors[i](tfmdata) + if type(extrahash)=="string" and extrahash~="" then + extrahash=string.gsub(lower(extrahash),"[^a-z]","-") + tfmdata.properties.fullname=format("%s-%s",tfmdata.properties.fullname,extrahash) + end + end + end + return tfmdata +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['luatex-fonts-ext']={ + version=1.001, + comment="companion to luatex-*.tex", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end +local fonts=fonts +local otffeatures=fonts.constructors.newfeatures("otf") +local function initializeitlc(tfmdata,value) + if value then + local parameters=tfmdata.parameters + local italicangle=parameters.italicangle + if italicangle and italicangle~=0 then + local properties=tfmdata.properties + local factor=tonumber(value) or 1 + properties.hasitalics=true + properties.autoitalicamount=factor*(parameters.uwidth or 40)/2 + end + end +end +otffeatures.register { + name="itlc", + description="italic correction", + initializers={ + base=initializeitlc, + node=initializeitlc, + } +} +local function initializeslant(tfmdata,value) + value=tonumber(value) + if not value then + value=0 + elseif value>1 then + value=1 + elseif value<-1 then + value=-1 + end + tfmdata.parameters.slantfactor=value +end +otffeatures.register { + name="slant", + description="slant glyphs", + initializers={ + base=initializeslant, + node=initializeslant, + } +} +local function initializeextend(tfmdata,value) + value=tonumber(value) + if not value then + value=0 + elseif value>10 then + value=10 + elseif value<-10 then + value=-10 + end + tfmdata.parameters.extendfactor=value +end +otffeatures.register { + name="extend", + description="scale glyphs horizontally", + initializers={ + base=initializeextend, + node=initializeextend, + } +} +fonts.protrusions=fonts.protrusions or {} +fonts.protrusions.setups=fonts.protrusions.setups or {} +local setups=fonts.protrusions.setups +local function initializeprotrusion(tfmdata,value) + if value then + local setup=setups[value] + if setup then + local factor,left,right=setup.factor or 1,setup.left or 1,setup.right or 1 + local emwidth=tfmdata.parameters.quad + tfmdata.parameters.protrusion={ + auto=true, + } + for i,chr in next,tfmdata.characters do + local v,pl,pr=setup[i],nil,nil + if v then + pl,pr=v[1],v[2] + end + if pl and pl~=0 then chr.left_protruding=left*pl*factor end + if pr and pr~=0 then chr.right_protruding=right*pr*factor end + end + end + end +end +otffeatures.register { + name="protrusion", + description="shift characters into the left and or right margin", + initializers={ + base=initializeprotrusion, + node=initializeprotrusion, + } +} +fonts.expansions=fonts.expansions or {} +fonts.expansions.setups=fonts.expansions.setups or {} +local setups=fonts.expansions.setups +local function initializeexpansion(tfmdata,value) + if value then + local setup=setups[value] + if setup then + local factor=setup.factor or 1 + tfmdata.parameters.expansion={ + stretch=10*(setup.stretch or 0), + shrink=10*(setup.shrink or 0), + step=10*(setup.step or 0), + auto=true, + } + for i,chr in next,tfmdata.characters do + local v=setup[i] + if v and v~=0 then + chr.expansion_factor=v*factor + else + chr.expansion_factor=factor + end + end + end + end +end +otffeatures.register { + name="expansion", + description="apply hz optimization", + initializers={ + base=initializeexpansion, + node=initializeexpansion, + } +} +function fonts.loggers.onetimemessage() end +local byte=string.byte +fonts.expansions.setups['default']={ + stretch=2,shrink=2,step=.5,factor=1, + [byte('A')]=0.5,[byte('B')]=0.7,[byte('C')]=0.7,[byte('D')]=0.5,[byte('E')]=0.7, + [byte('F')]=0.7,[byte('G')]=0.5,[byte('H')]=0.7,[byte('K')]=0.7,[byte('M')]=0.7, + [byte('N')]=0.7,[byte('O')]=0.5,[byte('P')]=0.7,[byte('Q')]=0.5,[byte('R')]=0.7, + [byte('S')]=0.7,[byte('U')]=0.7,[byte('W')]=0.7,[byte('Z')]=0.7, + [byte('a')]=0.7,[byte('b')]=0.7,[byte('c')]=0.7,[byte('d')]=0.7,[byte('e')]=0.7, + [byte('g')]=0.7,[byte('h')]=0.7,[byte('k')]=0.7,[byte('m')]=0.7,[byte('n')]=0.7, + [byte('o')]=0.7,[byte('p')]=0.7,[byte('q')]=0.7,[byte('s')]=0.7,[byte('u')]=0.7, + [byte('w')]=0.7,[byte('z')]=0.7, + [byte('2')]=0.7,[byte('3')]=0.7,[byte('6')]=0.7,[byte('8')]=0.7,[byte('9')]=0.7, +} +fonts.protrusions.setups['default']={ + factor=1,left=1,right=1, + [0x002C]={ 0,1 }, + [0x002E]={ 0,1 }, + [0x003A]={ 0,1 }, + [0x003B]={ 0,1 }, + [0x002D]={ 0,1 }, + [0x2013]={ 0,0.50 }, + [0x2014]={ 0,0.33 }, + [0x3001]={ 0,1 }, + [0x3002]={ 0,1 }, + [0x060C]={ 0,1 }, + [0x061B]={ 0,1 }, + [0x06D4]={ 0,1 }, +} +fonts.handlers.otf.features.normalize=function(t) + if t.rand then + t.rand="random" + end + return t +end +function fonts.helpers.nametoslot(name) + local t=type(name) + if t=="string" then + local tfmdata=fonts.hashes.identifiers[currentfont()] + local shared=tfmdata and tfmdata.shared + local fntdata=shared and shared.rawdata + return fntdata and fntdata.resources.unicodes[name] + elseif t=="number" then + return n + end +end +fonts.encodings=fonts.encodings or {} +local reencodings={} +fonts.encodings.reencodings=reencodings +local function specialreencode(tfmdata,value) + local encoding=value and reencodings[value] + if encoding then + local temp={} + local char=tfmdata.characters + for k,v in next,encoding do + temp[k]=char[v] + end + for k,v in next,temp do + char[k]=temp[k] + end + return string.format("reencoded:%s",value) + end +end +local function reencode(tfmdata,value) + tfmdata.postprocessors=tfmdata.postprocessors or {} + table.insert(tfmdata.postprocessors, + function(tfmdata) + return specialreencode(tfmdata,value) + end + ) +end +otffeatures.register { + name="reencode", + description="reencode characters", + manipulators={ + base=reencode, + node=reencode, + } +} + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['luatex-fonts-cbk']={ + version=1.001, + comment="companion to luatex-*.tex", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end +local fonts=fonts +local nodes=nodes +local traverse_id=node.traverse_id +local glyph_code=nodes.nodecodes.glyph +local ligaturing=node.ligaturing +local kerning=node.kerning +function node.ligaturing() texio.write_nl("warning: node.ligaturing is already applied") end +function node.kerning () texio.write_nl("warning: node.kerning is already applied") end +function nodes.handlers.characters(head) + local fontdata=fonts.hashes.identifiers + if fontdata then + local usedfonts,basefonts,prevfont,basefont={},{},nil,nil + for n in traverse_id(glyph_code,head) do + local font=n.font + if font~=prevfont then + if basefont then + basefont[2]=n.prev + end + prevfont=font + local used=usedfonts[font] + if not used then + local tfmdata=fontdata[font] + if tfmdata then + local shared=tfmdata.shared + if shared then + local processors=shared.processes + if processors and #processors>0 then + usedfonts[font]=processors + else + basefont={ n,nil } + basefonts[#basefonts+1]=basefont + end + end + end + end + end + end + if next(usedfonts) then + for font,processors in next,usedfonts do + for i=1,#processors do + head=processors[i](head,font,0) or head + end + end + end + if #basefonts>0 then + for i=1,#basefonts do + local range=basefonts[i] + local start,stop=range[1],range[2] + if stop then + ligaturing(start,stop) + kerning(start,stop) + else + ligaturing(start) + kerning(start) + end + end + end + return head,true + else + return head,false + end +end +function nodes.simple_font_handler(head) + head=nodes.handlers.characters(head) + nodes.injections.handler(head) + nodes.handlers.protectglyphs(head) + return head +end + +end -- closure diff --git a/src/fontloader/fontloader-fonts-cbk.lua b/src/fontloader/fontloader-fonts-cbk.lua new file mode 100644 index 0000000..9db94f6 --- /dev/null +++ b/src/fontloader/fontloader-fonts-cbk.lua @@ -0,0 +1,68 @@ +if not modules then modules = { } end modules ['luatex-fonts-cbk'] = { + version = 1.001, + comment = "companion to luatex-*.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end + +local fonts = fonts +local nodes = nodes + +-- Fonts: (might move to node-gef.lua) + +local traverse_id = node.traverse_id +local glyph_code = nodes.nodecodes.glyph + +function nodes.handlers.characters(head) + local fontdata = fonts.hashes.identifiers + if fontdata then + local usedfonts, done, prevfont = { }, false, nil + for n in traverse_id(glyph_code,head) do + local font = n.font + if font ~= prevfont then + prevfont = font + local used = usedfonts[font] + if not used then + local tfmdata = fontdata[font] -- + if tfmdata then + local shared = tfmdata.shared -- we need to check shared, only when same features + if shared then + local processors = shared.processes + if processors and #processors > 0 then + usedfonts[font] = processors + done = true + end + end + end + end + end + end + if done then + for font, processors in next, usedfonts do + for i=1,#processors do + local h, d = processors[i](head,font,0) + head, done = h or head, done or d + end + end + end + return head, true + else + return head, false + end +end + +function nodes.simple_font_handler(head) +-- lang.hyphenate(head) + head = nodes.handlers.characters(head) + nodes.injections.handler(head) + nodes.handlers.protectglyphs(head) + head = node.ligaturing(head) + head = node.kerning(head) + return head +end diff --git a/src/fontloader/fontloader-fonts-def.lua b/src/fontloader/fontloader-fonts-def.lua new file mode 100644 index 0000000..0c2f0db --- /dev/null +++ b/src/fontloader/fontloader-fonts-def.lua @@ -0,0 +1,97 @@ +if not modules then modules = { } end modules ['luatex-font-def'] = { + version = 1.001, + comment = "companion to luatex-*.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end + +local fonts = fonts + +-- A bit of tuning for definitions. + +fonts.constructors.namemode = "specification" -- somehow latex needs this (changed name!) => will change into an overload + +-- tricky: we sort of bypass the parser and directly feed all into +-- the sub parser + +function fonts.definers.getspecification(str) + return "", str, "", ":", str +end + +-- the generic name parser (different from context!) + +local list = { } + +local function issome () list.lookup = 'name' end -- xetex mode prefers name (not in context!) +local function isfile () list.lookup = 'file' end +local function isname () list.lookup = 'name' end +local function thename(s) list.name = s end +local function issub (v) list.sub = v end +local function iscrap (s) list.crap = string.lower(s) end +local function iskey (k,v) list[k] = v end +local function istrue (s) list[s] = true end +local function isfalse(s) list[s] = false end + +local P, S, R, C = lpeg.P, lpeg.S, lpeg.R, lpeg.C + +local spaces = P(" ")^0 +local namespec = (1-S("/:("))^0 -- was: (1-S("/: ("))^0 +local crapspec = spaces * P("/") * (((1-P(":"))^0)/iscrap) * spaces +local filename_1 = P("file:")/isfile * (namespec/thename) +local filename_2 = P("[") * P(true)/isname * (((1-P("]"))^0)/thename) * P("]") +local fontname_1 = P("name:")/isname * (namespec/thename) +local fontname_2 = P(true)/issome * (namespec/thename) +local sometext = (R("az","AZ","09") + S("+-."))^1 +local truevalue = P("+") * spaces * (sometext/istrue) +local falsevalue = P("-") * spaces * (sometext/isfalse) +local keyvalue = (C(sometext) * spaces * P("=") * spaces * C(sometext))/iskey +local somevalue = sometext/istrue +local subvalue = P("(") * (C(P(1-S("()"))^1)/issub) * P(")") -- for Kim +local option = spaces * (keyvalue + falsevalue + truevalue + somevalue) * spaces +local options = P(":") * spaces * (P(";")^0 * option)^0 + +local pattern = (filename_1 + filename_2 + fontname_1 + fontname_2) * subvalue^0 * crapspec^0 * options^0 + +local function colonized(specification) -- xetex mode + list = { } + lpeg.match(pattern,specification.specification) + list.crap = nil -- style not supported, maybe some day + if list.name then + specification.name = list.name + list.name = nil + end + if list.lookup then + specification.lookup = list.lookup + list.lookup = nil + end + if list.sub then + specification.sub = list.sub + list.sub = nil + end + specification.features.normal = fonts.handlers.otf.features.normalize(list) + return specification +end + +fonts.definers.registersplit(":",colonized,"cryptic") +fonts.definers.registersplit("", colonized,"more cryptic") -- catches \font\text=[names] + +function fonts.definers.applypostprocessors(tfmdata) + local postprocessors = tfmdata.postprocessors + if postprocessors then + for i=1,#postprocessors do + local extrahash = postprocessors[i](tfmdata) -- after scaling etc + if type(extrahash) == "string" and extrahash ~= "" then + -- e.g. a reencoding needs this + extrahash = string.gsub(lower(extrahash),"[^a-z]","-") + tfmdata.properties.fullname = format("%s-%s",tfmdata.properties.fullname,extrahash) + end + end + end + return tfmdata +end diff --git a/src/fontloader/fontloader-fonts-enc.lua b/src/fontloader/fontloader-fonts-enc.lua new file mode 100644 index 0000000..e20c3a0 --- /dev/null +++ b/src/fontloader/fontloader-fonts-enc.lua @@ -0,0 +1,28 @@ +if not modules then modules = { } end modules ['luatex-font-enc'] = { + version = 1.001, + comment = "companion to luatex-*.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end + +local fonts = fonts +fonts.encodings = { } +fonts.encodings.agl = { } + +setmetatable(fonts.encodings.agl, { __index = function(t,k) + if k == "unicodes" then + texio.write(" ") + local unicodes = dofile(resolvers.findfile("font-age.lua")) + fonts.encodings.agl = { unicodes = unicodes } + return unicodes + else + return nil + end +end }) + diff --git a/src/fontloader/fontloader-fonts-ext.lua b/src/fontloader/fontloader-fonts-ext.lua new file mode 100644 index 0000000..b60d045 --- /dev/null +++ b/src/fontloader/fontloader-fonts-ext.lua @@ -0,0 +1,272 @@ +if not modules then modules = { } end modules ['luatex-fonts-ext'] = { + version = 1.001, + comment = "companion to luatex-*.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end + +local fonts = fonts +local otffeatures = fonts.constructors.newfeatures("otf") + +-- A few generic extensions. + +local function initializeitlc(tfmdata,value) + if value then + -- the magic 40 and it formula come from Dohyun Kim but we might need another guess + local parameters = tfmdata.parameters + local italicangle = parameters.italicangle + if italicangle and italicangle ~= 0 then + local properties = tfmdata.properties + local factor = tonumber(value) or 1 + properties.hasitalics = true + properties.autoitalicamount = factor * (parameters.uwidth or 40)/2 + end + end +end + +otffeatures.register { + name = "itlc", + description = "italic correction", + initializers = { + base = initializeitlc, + node = initializeitlc, + } +} + +-- slant and extend + +local function initializeslant(tfmdata,value) + value = tonumber(value) + if not value then + value = 0 + elseif value > 1 then + value = 1 + elseif value < -1 then + value = -1 + end + tfmdata.parameters.slantfactor = value +end + +otffeatures.register { + name = "slant", + description = "slant glyphs", + initializers = { + base = initializeslant, + node = initializeslant, + } +} + +local function initializeextend(tfmdata,value) + value = tonumber(value) + if not value then + value = 0 + elseif value > 10 then + value = 10 + elseif value < -10 then + value = -10 + end + tfmdata.parameters.extendfactor = value +end + +otffeatures.register { + name = "extend", + description = "scale glyphs horizontally", + initializers = { + base = initializeextend, + node = initializeextend, + } +} + +-- expansion and protrusion + +fonts.protrusions = fonts.protrusions or { } +fonts.protrusions.setups = fonts.protrusions.setups or { } + +local setups = fonts.protrusions.setups + +local function initializeprotrusion(tfmdata,value) + if value then + local setup = setups[value] + if setup then + local factor, left, right = setup.factor or 1, setup.left or 1, setup.right or 1 + local emwidth = tfmdata.parameters.quad + tfmdata.parameters.protrusion = { + auto = true, + } + for i, chr in next, tfmdata.characters do + local v, pl, pr = setup[i], nil, nil + if v then + pl, pr = v[1], v[2] + end + if pl and pl ~= 0 then chr.left_protruding = left *pl*factor end + if pr and pr ~= 0 then chr.right_protruding = right*pr*factor end + end + end + end +end + +otffeatures.register { + name = "protrusion", + description = "shift characters into the left and or right margin", + initializers = { + base = initializeprotrusion, + node = initializeprotrusion, + } +} + +fonts.expansions = fonts.expansions or { } +fonts.expansions.setups = fonts.expansions.setups or { } + +local setups = fonts.expansions.setups + +local function initializeexpansion(tfmdata,value) + if value then + local setup = setups[value] + if setup then + local factor = setup.factor or 1 + tfmdata.parameters.expansion = { + stretch = 10 * (setup.stretch or 0), + shrink = 10 * (setup.shrink or 0), + step = 10 * (setup.step or 0), + auto = true, + } + for i, chr in next, tfmdata.characters do + local v = setup[i] + if v and v ~= 0 then + chr.expansion_factor = v*factor + else -- can be option + chr.expansion_factor = factor + end + end + end + end +end + +otffeatures.register { + name = "expansion", + description = "apply hz optimization", + initializers = { + base = initializeexpansion, + node = initializeexpansion, + } +} + +-- left over + +function fonts.loggers.onetimemessage() end + +-- example vectors + +local byte = string.byte + +fonts.expansions.setups['default'] = { + + stretch = 2, shrink = 2, step = .5, factor = 1, + + [byte('A')] = 0.5, [byte('B')] = 0.7, [byte('C')] = 0.7, [byte('D')] = 0.5, [byte('E')] = 0.7, + [byte('F')] = 0.7, [byte('G')] = 0.5, [byte('H')] = 0.7, [byte('K')] = 0.7, [byte('M')] = 0.7, + [byte('N')] = 0.7, [byte('O')] = 0.5, [byte('P')] = 0.7, [byte('Q')] = 0.5, [byte('R')] = 0.7, + [byte('S')] = 0.7, [byte('U')] = 0.7, [byte('W')] = 0.7, [byte('Z')] = 0.7, + [byte('a')] = 0.7, [byte('b')] = 0.7, [byte('c')] = 0.7, [byte('d')] = 0.7, [byte('e')] = 0.7, + [byte('g')] = 0.7, [byte('h')] = 0.7, [byte('k')] = 0.7, [byte('m')] = 0.7, [byte('n')] = 0.7, + [byte('o')] = 0.7, [byte('p')] = 0.7, [byte('q')] = 0.7, [byte('s')] = 0.7, [byte('u')] = 0.7, + [byte('w')] = 0.7, [byte('z')] = 0.7, + [byte('2')] = 0.7, [byte('3')] = 0.7, [byte('6')] = 0.7, [byte('8')] = 0.7, [byte('9')] = 0.7, +} + +fonts.protrusions.setups['default'] = { + + factor = 1, left = 1, right = 1, + + [0x002C] = { 0, 1 }, -- comma + [0x002E] = { 0, 1 }, -- period + [0x003A] = { 0, 1 }, -- colon + [0x003B] = { 0, 1 }, -- semicolon + [0x002D] = { 0, 1 }, -- hyphen + [0x2013] = { 0, 0.50 }, -- endash + [0x2014] = { 0, 0.33 }, -- emdash + [0x3001] = { 0, 1 }, -- ideographic comma 、 + [0x3002] = { 0, 1 }, -- ideographic full stop 。 + [0x060C] = { 0, 1 }, -- arabic comma ، + [0x061B] = { 0, 1 }, -- arabic semicolon ؛ + [0x06D4] = { 0, 1 }, -- arabic full stop ۔ + +} + +-- normalizer + +fonts.handlers.otf.features.normalize = function(t) + if t.rand then + t.rand = "random" + end + return t +end + +-- bonus + +function fonts.helpers.nametoslot(name) + local t = type(name) + if t == "string" then + local tfmdata = fonts.hashes.identifiers[currentfont()] + local shared = tfmdata and tfmdata.shared + local fntdata = shared and shared.rawdata + return fntdata and fntdata.resources.unicodes[name] + elseif t == "number" then + return n + end +end + +-- \font\test=file:somefont:reencode=mymessup +-- +-- fonts.encodings.reencodings.mymessup = { +-- [109] = 110, -- m +-- [110] = 109, -- n +-- } + +fonts.encodings = fonts.encodings or { } +local reencodings = { } +fonts.encodings.reencodings = reencodings + +local function specialreencode(tfmdata,value) + -- we forget about kerns as we assume symbols and we + -- could issue a message if ther are kerns but it's + -- a hack anyway so we odn't care too much here + local encoding = value and reencodings[value] + if encoding then + local temp = { } + local char = tfmdata.characters + for k, v in next, encoding do + temp[k] = char[v] + end + for k, v in next, temp do + char[k] = temp[k] + end + -- if we use the font otherwise luatex gets confused so + -- we return an additional hash component for fullname + return string.format("reencoded:%s",value) + end +end + +local function reencode(tfmdata,value) + tfmdata.postprocessors = tfmdata.postprocessors or { } + table.insert(tfmdata.postprocessors, + function(tfmdata) + return specialreencode(tfmdata,value) + end + ) +end + +otffeatures.register { + name = "reencode", + description = "reencode characters", + manipulators = { + base = reencode, + node = reencode, + } +} diff --git a/src/fontloader/fontloader-fonts-inj.lua b/src/fontloader/fontloader-fonts-inj.lua new file mode 100644 index 0000000..ae48150 --- /dev/null +++ b/src/fontloader/fontloader-fonts-inj.lua @@ -0,0 +1,526 @@ +if not modules then modules = { } end modules ['node-inj'] = { + version = 1.001, + comment = "companion to node-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files", +} + +-- This is very experimental (this will change when we have luatex > .50 and +-- a few pending thingies are available. Also, Idris needs to make a few more +-- test fonts. Btw, future versions of luatex will have extended glyph properties +-- that can be of help. Some optimizations can go away when we have faster machines. + +-- todo: make a special one for context + +local next = next +local utfchar = utf.char + +local trace_injections = false trackers.register("nodes.injections", function(v) trace_injections = v end) + +local report_injections = logs.reporter("nodes","injections") + +local attributes, nodes, node = attributes, nodes, node + +fonts = fonts +local fontdata = fonts.hashes.identifiers + +nodes.injections = nodes.injections or { } +local injections = nodes.injections + +local nodecodes = nodes.nodecodes +local glyph_code = nodecodes.glyph +local kern_code = nodecodes.kern +local nodepool = nodes.pool +local newkern = nodepool.kern + +local traverse_id = node.traverse_id +local insert_node_before = node.insert_before +local insert_node_after = node.insert_after + +local a_kernpair = attributes.private('kernpair') +local a_ligacomp = attributes.private('ligacomp') +local a_markbase = attributes.private('markbase') +local a_markmark = attributes.private('markmark') +local a_markdone = attributes.private('markdone') +local a_cursbase = attributes.private('cursbase') +local a_curscurs = attributes.private('curscurs') +local a_cursdone = attributes.private('cursdone') + +-- This injector has been tested by Idris Samawi Hamid (several arabic fonts as well as +-- the rather demanding Husayni font), Khaled Hosny (latin and arabic) and Kaj Eigner +-- (arabic, hebrew and thai) and myself (whatever font I come across). I'm pretty sure +-- that this code is not 100% okay but examples are needed to figure things out. + +function injections.installnewkern(nk) + newkern = nk or newkern +end + +local cursives = { } +local marks = { } +local kerns = { } + +-- Currently we do gpos/kern in a bit inofficial way but when we have the extra fields in +-- glyphnodes to manipulate ht/dp/wd explicitly I will provide an alternative; also, we +-- can share tables. + +-- For the moment we pass the r2l key ... volt/arabtype tests .. idris: this needs +-- checking with husayni (volt and fontforge). + +function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext) + local dx, dy = factor*(exit[1]-entry[1]), factor*(exit[2]-entry[2]) + local ws, wn = tfmstart.width, tfmnext.width + local bound = #cursives + 1 + start[a_cursbase] = bound + nxt[a_curscurs] = bound + cursives[bound] = { rlmode, dx, dy, ws, wn } + return dx, dy, bound +end + +function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr) + local x, y, w, h = factor*spec[1], factor*spec[2], factor*spec[3], factor*spec[4] + -- dy = y - h + if x ~= 0 or w ~= 0 or y ~= 0 or h ~= 0 then + local bound = current[a_kernpair] + if bound then + local kb = kerns[bound] + -- inefficient but singles have less, but weird anyway, needs checking + kb[2], kb[3], kb[4], kb[5] = (kb[2] or 0) + x, (kb[3] or 0) + y, (kb[4] or 0)+ w, (kb[5] or 0) + h + else + bound = #kerns + 1 + current[a_kernpair] = bound + kerns[bound] = { rlmode, x, y, w, h, r2lflag, tfmchr.width } + end + return x, y, w, h, bound + end + return x, y, w, h -- no bound +end + +function injections.setkern(current,factor,rlmode,x,tfmchr) + local dx = factor*x + if dx ~= 0 then + local bound = #kerns + 1 + current[a_kernpair] = bound + kerns[bound] = { rlmode, dx } + return dx, bound + else + return 0, 0 + end +end + +function injections.setmark(start,base,factor,rlmode,ba,ma,index,baseismark) -- ba=baseanchor, ma=markanchor + local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2]) -- the index argument is no longer used but when this + local bound = base[a_markbase] -- fails again we should pass it + local index = 1 + if bound then + local mb = marks[bound] + if mb then + -- if not index then index = #mb + 1 end + index = #mb + 1 + mb[index] = { dx, dy, rlmode } + start[a_markmark] = bound + start[a_markdone] = index + return dx, dy, bound + else + report_injections("possible problem, %U is base mark without data (id %a)",base.char,bound) + end + end +-- index = index or 1 + index = index or 1 + bound = #marks + 1 + base[a_markbase] = bound + start[a_markmark] = bound + start[a_markdone] = index + marks[bound] = { [index] = { dx, dy, rlmode, baseismark } } + return dx, dy, bound +end + +local function dir(n) + return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset" +end + +local function trace(head) + report_injections("begin run") + for n in traverse_id(glyph_code,head) do + if n.subtype < 256 then + local kp = n[a_kernpair] + local mb = n[a_markbase] + local mm = n[a_markmark] + local md = n[a_markdone] + local cb = n[a_cursbase] + local cc = n[a_curscurs] + local char = n.char + report_injections("font %s, char %U, glyph %c",n.font,char,char) + if kp then + local k = kerns[kp] + if k[3] then + report_injections(" pairkern: dir %a, x %p, y %p, w %p, h %p",dir(k[1]),k[2],k[3],k[4],k[5]) + else + report_injections(" kern: dir %a, dx %p",dir(k[1]),k[2]) + end + end + if mb then + report_injections(" markbase: bound %a",mb) + end + if mm then + local m = marks[mm] + if mb then + local m = m[mb] + if m then + report_injections(" markmark: bound %a, index %a, dx %p, dy %p",mm,md,m[1],m[2]) + else + report_injections(" markmark: bound %a, missing index",mm) + end + else + m = m[1] + report_injections(" markmark: bound %a, dx %p, dy %p",mm,m and m[1],m and m[2]) + end + end + if cb then + report_injections(" cursbase: bound %a",cb) + end + if cc then + local c = cursives[cc] + report_injections(" curscurs: bound %a, dir %a, dx %p, dy %p",cc,dir(c[1]),c[2],c[3]) + end + end + end + report_injections("end run") +end + +-- todo: reuse tables (i.e. no collection), but will be extra fields anyway +-- todo: check for attribute + +-- We can have a fast test on a font being processed, so we can check faster for marks etc +-- but I'll make a context variant anyway. + +local function show_result(head) + local current = head + local skipping = false + while current do + local id = current.id + if id == glyph_code then + report_injections("char: %C, width %p, xoffset %p, yoffset %p",current.char,current.width,current.xoffset,current.yoffset) + skipping = false + elseif id == kern_code then + report_injections("kern: %p",current.kern) + skipping = false + elseif not skipping then + report_injections() + skipping = true + end + current = current.next + end +end + +function injections.handler(head,where,keep) + local has_marks, has_cursives, has_kerns = next(marks), next(cursives), next(kerns) + if has_marks or has_cursives then + if trace_injections then + trace(head) + end + -- in the future variant we will not copy items but refs to tables + local done, ky, rl, valid, cx, wx, mk, nofvalid = false, { }, { }, { }, { }, { }, { }, 0 + if has_kerns then -- move outside loop + local nf, tm = nil, nil + for n in traverse_id(glyph_code,head) do -- only needed for relevant fonts + if n.subtype < 256 then + nofvalid = nofvalid + 1 + valid[nofvalid] = n + if n.font ~= nf then + nf = n.font + tm = fontdata[nf].resources.marks + end + if tm then + mk[n] = tm[n.char] + end + local k = n[a_kernpair] + if k then + local kk = kerns[k] + if kk then + local x, y, w, h = kk[2] or 0, kk[3] or 0, kk[4] or 0, kk[5] or 0 + local dy = y - h + if dy ~= 0 then + ky[n] = dy + end + if w ~= 0 or x ~= 0 then + wx[n] = kk + end + rl[n] = kk[1] -- could move in test + end + end + end + end + else + local nf, tm = nil, nil + for n in traverse_id(glyph_code,head) do + if n.subtype < 256 then + nofvalid = nofvalid + 1 + valid[nofvalid] = n + if n.font ~= nf then + nf = n.font + tm = fontdata[nf].resources.marks + end + if tm then + mk[n] = tm[n.char] + end + end + end + end + if nofvalid > 0 then + -- we can assume done == true because we have cursives and marks + local cx = { } + if has_kerns and next(ky) then + for n, k in next, ky do + n.yoffset = k + end + end + -- todo: reuse t and use maxt + if has_cursives then + local p_cursbase, p = nil, nil + -- since we need valid[n+1] we can also use a "while true do" + local t, d, maxt = { }, { }, 0 + for i=1,nofvalid do -- valid == glyphs + local n = valid[i] + if not mk[n] then + local n_cursbase = n[a_cursbase] + if p_cursbase then + local n_curscurs = n[a_curscurs] + if p_cursbase == n_curscurs then + local c = cursives[n_curscurs] + if c then + local rlmode, dx, dy, ws, wn = c[1], c[2], c[3], c[4], c[5] + if rlmode >= 0 then + dx = dx - ws + else + dx = dx + wn + end + if dx ~= 0 then + cx[n] = dx + rl[n] = rlmode + end + -- if rlmode and rlmode < 0 then + dy = -dy + -- end + maxt = maxt + 1 + t[maxt] = p + d[maxt] = dy + else + maxt = 0 + end + end + elseif maxt > 0 then + local ny = n.yoffset + for i=maxt,1,-1 do + ny = ny + d[i] + local ti = t[i] + ti.yoffset = ti.yoffset + ny + end + maxt = 0 + end + if not n_cursbase and maxt > 0 then + local ny = n.yoffset + for i=maxt,1,-1 do + ny = ny + d[i] + local ti = t[i] + ti.yoffset = ny + end + maxt = 0 + end + p_cursbase, p = n_cursbase, n + end + end + if maxt > 0 then + local ny = n.yoffset + for i=maxt,1,-1 do + ny = ny + d[i] + local ti = t[i] + ti.yoffset = ny + end + maxt = 0 + end + if not keep then + cursives = { } + end + end + if has_marks then + for i=1,nofvalid do + local p = valid[i] + local p_markbase = p[a_markbase] + if p_markbase then + local mrks = marks[p_markbase] + local nofmarks = #mrks + for n in traverse_id(glyph_code,p.next) do + local n_markmark = n[a_markmark] + if p_markbase == n_markmark then + local index = n[a_markdone] or 1 + local d = mrks[index] + if d then + local rlmode = d[3] + -- + local k = wx[p] + if k then + local x = k[2] + local w = k[4] + if w then + if rlmode and rlmode >= 0 then + -- kern(x) glyph(p) kern(w-x) mark(n) + n.xoffset = p.xoffset - p.width + d[1] - (w-x) + else + -- kern(w-x) glyph(p) kern(x) mark(n) + n.xoffset = p.xoffset - d[1] - x + end + else + if rlmode and rlmode >= 0 then + -- okay for husayni + n.xoffset = p.xoffset - p.width + d[1] + else + -- needs checking: is x ok here? + n.xoffset = p.xoffset - d[1] - x + end + end + else + if rlmode and rlmode >= 0 then + n.xoffset = p.xoffset - p.width + d[1] + else + n.xoffset = p.xoffset - d[1] + end + local w = n.width + if w ~= 0 then + insert_node_before(head,n,newkern(-w/2)) + insert_node_after(head,n,newkern(-w/2)) + end + end + -- -- + if mk[p] then + n.yoffset = p.yoffset + d[2] + else + n.yoffset = n.yoffset + p.yoffset + d[2] + end + -- + if nofmarks == 1 then + break + else + nofmarks = nofmarks - 1 + end + end + else + -- KE: there can be sequences in ligatures + end + end + end + end + if not keep then + marks = { } + end + end + -- todo : combine + if next(wx) then + for n, k in next, wx do + -- only w can be nil (kernclasses), can be sped up when w == nil + local x = k[2] + local w = k[4] + if w then + local rl = k[1] -- r2l = k[6] + local wx = w - x + if rl < 0 then -- KE: don't use r2l here + if wx ~= 0 then + insert_node_before(head,n,newkern(wx)) -- type 0/2 + end + if x ~= 0 then + insert_node_after (head,n,newkern(x)) -- type 0/2 + end + else + if x ~= 0 then + insert_node_before(head,n,newkern(x)) -- type 0/2 + end + if wx ~= 0 then + insert_node_after (head,n,newkern(wx)) -- type 0/2 + end + end + elseif x ~= 0 then + -- this needs checking for rl < 0 but it is unlikely that a r2l script + -- uses kernclasses between glyphs so we're probably safe (KE has a + -- problematic font where marks interfere with rl < 0 in the previous + -- case) + insert_node_before(head,n,newkern(x)) -- a real font kern, type 0 + end + end + end + if next(cx) then + for n, k in next, cx do + if k ~= 0 then + local rln = rl[n] + if rln and rln < 0 then + insert_node_before(head,n,newkern(-k)) -- type 0/2 + else + insert_node_before(head,n,newkern(k)) -- type 0/2 + end + end + end + end + if not keep then + kerns = { } + end + -- if trace_injections then + -- show_result(head) + -- end + return head, true + elseif not keep then + kerns, cursives, marks = { }, { }, { } + end + elseif has_kerns then + if trace_injections then + trace(head) + end + for n in traverse_id(glyph_code,head) do + if n.subtype < 256 then + local k = n[a_kernpair] + if k then + local kk = kerns[k] + if kk then + local rl, x, y, w = kk[1], kk[2] or 0, kk[3], kk[4] + if y and y ~= 0 then + n.yoffset = y -- todo: h ? + end + if w then + -- copied from above + -- local r2l = kk[6] + local wx = w - x + if rl < 0 then -- KE: don't use r2l here + if wx ~= 0 then + insert_node_before(head,n,newkern(wx)) + end + if x ~= 0 then + insert_node_after (head,n,newkern(x)) + end + else + if x ~= 0 then + insert_node_before(head,n,newkern(x)) + end + if wx ~= 0 then + insert_node_after(head,n,newkern(wx)) + end + end + else + -- simple (e.g. kernclass kerns) + if x ~= 0 then + insert_node_before(head,n,newkern(x)) + end + end + end + end + end + end + if not keep then + kerns = { } + end + -- if trace_injections then + -- show_result(head) + -- end + return head, true + else + -- no tracing needed + end + return head, false +end diff --git a/src/fontloader/fontloader-fonts-lua.lua b/src/fontloader/fontloader-fonts-lua.lua new file mode 100644 index 0000000..ec3fe38 --- /dev/null +++ b/src/fontloader/fontloader-fonts-lua.lua @@ -0,0 +1,33 @@ +if not modules then modules = { } end modules ['luatex-fonts-lua'] = { + version = 1.001, + comment = "companion to luatex-*.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end + +local fonts = fonts +fonts.formats.lua = "lua" + +function fonts.readers.lua(specification) + local fullname = specification.filename or "" + if fullname == "" then + local forced = specification.forced or "" + if forced ~= "" then + fullname = specification.name .. "." .. forced + else + fullname = specification.name + end + end + local fullname = resolvers.findfile(fullname) or "" + if fullname ~= "" then + local loader = loadfile(fullname) + loader = loader and loader() + return loader and loader(specification) + end +end diff --git a/src/fontloader/fontloader-fonts-otn.lua b/src/fontloader/fontloader-fonts-otn.lua new file mode 100644 index 0000000..c57be5f --- /dev/null +++ b/src/fontloader/fontloader-fonts-otn.lua @@ -0,0 +1,2848 @@ +if not modules then modules = { } end modules ['font-otn'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files", +} + +-- preprocessors = { "nodes" } + +-- this is still somewhat preliminary and it will get better in due time; +-- much functionality could only be implemented thanks to the husayni font +-- of Idris Samawi Hamid to who we dedicate this module. + +-- in retrospect it always looks easy but believe it or not, it took a lot +-- of work to get proper open type support done: buggy fonts, fuzzy specs, +-- special made testfonts, many skype sessions between taco, idris and me, +-- torture tests etc etc ... unfortunately the code does not show how much +-- time it took ... + +-- todo: +-- +-- kerning is probably not yet ok for latin around dics nodes (interesting challenge) +-- extension infrastructure (for usage out of context) +-- sorting features according to vendors/renderers +-- alternative loop quitters +-- check cursive and r2l +-- find out where ignore-mark-classes went +-- default features (per language, script) +-- handle positions (we need example fonts) +-- handle gpos_single (we might want an extra width field in glyph nodes because adding kerns might interfere) +-- mark (to mark) code is still not what it should be (too messy but we need some more extreem husayni tests) +-- remove some optimizations (when I have a faster machine) +-- +-- maybe redo the lot some way (more context specific) + +--[[ldx-- +

This module is a bit more split up that I'd like but since we also want to test +with plain it has to be so. This module is part of +and discussion about improvements and functionality mostly happens on the + mailing list.

+ +

The specification of OpenType is kind of vague. Apart from a lack of a proper +free specifications there's also the problem that Microsoft and Adobe +may have their own interpretation of how and in what order to apply features. +In general the Microsoft website has more detailed specifications and is a +better reference. There is also some information in the FontForge help files.

+ +

Because there is so much possible, fonts might contain bugs and/or be made to +work with certain rederers. These may evolve over time which may have the side +effect that suddenly fonts behave differently.

+ +

After a lot of experiments (mostly by Taco, me and Idris) we're now at yet another +implementation. Of course all errors are mine and of course the code can be +improved. There are quite some optimizations going on here and processing speed +is currently acceptable. Not all functions are implemented yet, often because I +lack the fonts for testing. Many scripts are not yet supported either, but I will +look into them as soon as users ask for it.

+ +

Because there are different interpretations possible, I will extend the code +with more (configureable) variants. I can also add hooks for users so that they can +write their own extensions.

+ +

Glyphs are indexed not by unicode but in their own way. This is because there is no +relationship with unicode at all, apart from the fact that a font might cover certain +ranges of characters. One character can have multiple shapes. However, at the + end we use unicode so and all extra glyphs are mapped into a private +space. This is needed because we need to access them and has to include +then in the output eventually.

+ +

The raw table as it coms from gets reorganized in to fit out needs. +In that table is packed (similar tables are shared) and cached on disk +so that successive runs can use the optimized table (after loading the table is +unpacked). The flattening code used later is a prelude to an even more compact table +format (and as such it keeps evolving).

+ +

This module is sparsely documented because it is a moving target. The table format +of the reader changes and we experiment a lot with different methods for supporting +features.

+ +

As with the code, we may decide to store more information in the + table.

+ +

Incrementing the version number will force a re-cache. We jump the number by one +when there's a fix in the library or code that +results in different tables.

+--ldx]]-- + +-- action handler chainproc chainmore comment +-- +-- gsub_single ok ok ok +-- gsub_multiple ok ok not implemented yet +-- gsub_alternate ok ok not implemented yet +-- gsub_ligature ok ok ok +-- gsub_context ok -- +-- gsub_contextchain ok -- +-- gsub_reversecontextchain ok -- +-- chainsub -- ok +-- reversesub -- ok +-- gpos_mark2base ok ok +-- gpos_mark2ligature ok ok +-- gpos_mark2mark ok ok +-- gpos_cursive ok untested +-- gpos_single ok ok +-- gpos_pair ok ok +-- gpos_context ok -- +-- gpos_contextchain ok -- +-- +-- todo: contextpos and contextsub and class stuff +-- +-- actions: +-- +-- handler : actions triggered by lookup +-- chainproc : actions triggered by contextual lookup +-- chainmore : multiple substitutions triggered by contextual lookup (e.g. fij -> f + ij) +-- +-- remark: the 'not implemented yet' variants will be done when we have fonts that use them +-- remark: we need to check what to do with discretionaries + +-- We used to have independent hashes for lookups but as the tags are unique +-- we now use only one hash. If needed we can have multiple again but in that +-- case I will probably prefix (i.e. rename) the lookups in the cached font file. + +-- Todo: make plugin feature that operates on char/glyphnode arrays + +local concat, insert, remove = table.concat, table.insert, table.remove +local gmatch, gsub, find, match, lower, strip = string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip +local type, next, tonumber, tostring = type, next, tonumber, tostring +local lpegmatch = lpeg.match +local random = math.random +local formatters = string.formatters + +local logs, trackers, nodes, attributes = logs, trackers, nodes, attributes + +local registertracker = trackers.register + +local fonts = fonts +local otf = fonts.handlers.otf + +local trace_lookups = false registertracker("otf.lookups", function(v) trace_lookups = v end) +local trace_singles = false registertracker("otf.singles", function(v) trace_singles = v end) +local trace_multiples = false registertracker("otf.multiples", function(v) trace_multiples = v end) +local trace_alternatives = false registertracker("otf.alternatives", function(v) trace_alternatives = v end) +local trace_ligatures = false registertracker("otf.ligatures", function(v) trace_ligatures = v end) +local trace_contexts = false registertracker("otf.contexts", function(v) trace_contexts = v end) +local trace_marks = false registertracker("otf.marks", function(v) trace_marks = v end) +local trace_kerns = false registertracker("otf.kerns", function(v) trace_kerns = v end) +local trace_cursive = false registertracker("otf.cursive", function(v) trace_cursive = v end) +local trace_preparing = false registertracker("otf.preparing", function(v) trace_preparing = v end) +local trace_bugs = false registertracker("otf.bugs", function(v) trace_bugs = v end) +local trace_details = false registertracker("otf.details", function(v) trace_details = v end) +local trace_applied = false registertracker("otf.applied", function(v) trace_applied = v end) +local trace_steps = false registertracker("otf.steps", function(v) trace_steps = v end) +local trace_skips = false registertracker("otf.skips", function(v) trace_skips = v end) +local trace_directions = false registertracker("otf.directions", function(v) trace_directions = v end) + +local report_direct = logs.reporter("fonts","otf direct") +local report_subchain = logs.reporter("fonts","otf subchain") +local report_chain = logs.reporter("fonts","otf chain") +local report_process = logs.reporter("fonts","otf process") +local report_prepare = logs.reporter("fonts","otf prepare") +local report_warning = logs.reporter("fonts","otf warning") + +registertracker("otf.verbose_chain", function(v) otf.setcontextchain(v and "verbose") end) +registertracker("otf.normal_chain", function(v) otf.setcontextchain(v and "normal") end) + +registertracker("otf.replacements", "otf.singles,otf.multiples,otf.alternatives,otf.ligatures") +registertracker("otf.positions","otf.marks,otf.kerns,otf.cursive") +registertracker("otf.actions","otf.replacements,otf.positions") +registertracker("otf.injections","nodes.injections") + +registertracker("*otf.sample","otf.steps,otf.actions,otf.analyzing") + +local insert_node_after = node.insert_after +local delete_node = nodes.delete +local copy_node = node.copy +local find_node_tail = node.tail or node.slide +local flush_node_list = node.flush_list +local end_of_math = node.end_of_math + +local setmetatableindex = table.setmetatableindex + +local zwnj = 0x200C +local zwj = 0x200D +local wildcard = "*" +local default = "dflt" + +local nodecodes = nodes.nodecodes +local whatcodes = nodes.whatcodes +local glyphcodes = nodes.glyphcodes +local disccodes = nodes.disccodes + +local glyph_code = nodecodes.glyph +local glue_code = nodecodes.glue +local disc_code = nodecodes.disc +local whatsit_code = nodecodes.whatsit +local math_code = nodecodes.math + +local dir_code = whatcodes.dir +local localpar_code = whatcodes.localpar + +local discretionary_code = disccodes.discretionary + +local ligature_code = glyphcodes.ligature + +local privateattribute = attributes.private + +-- Something is messed up: we have two mark / ligature indices, one at the injection +-- end and one here ... this is bases in KE's patches but there is something fishy +-- there as I'm pretty sure that for husayni we need some connection (as it's much +-- more complex than an average font) but I need proper examples of all cases, not +-- of only some. + +local a_state = privateattribute('state') +local a_markbase = privateattribute('markbase') +local a_markmark = privateattribute('markmark') +local a_markdone = privateattribute('markdone') -- assigned at the injection end +local a_cursbase = privateattribute('cursbase') +local a_curscurs = privateattribute('curscurs') +local a_cursdone = privateattribute('cursdone') +local a_kernpair = privateattribute('kernpair') +local a_ligacomp = privateattribute('ligacomp') -- assigned here (ideally it should be combined) + +local injections = nodes.injections +local setmark = injections.setmark +local setcursive = injections.setcursive +local setkern = injections.setkern +local setpair = injections.setpair + +local markonce = true +local cursonce = true +local kernonce = true + +local fonthashes = fonts.hashes +local fontdata = fonthashes.identifiers + +local otffeatures = fonts.constructors.newfeatures("otf") +local registerotffeature = otffeatures.register + +local onetimemessage = fonts.loggers.onetimemessage or function() end + +otf.defaultnodealternate = "none" -- first last + +-- we share some vars here, after all, we have no nested lookups and less code + +local tfmdata = false +local characters = false +local descriptions = false +local resources = false +local marks = false +local currentfont = false +local lookuptable = false +local anchorlookups = false +local lookuptypes = false +local handlers = { } +local rlmode = 0 +local featurevalue = false + +-- head is always a whatsit so we can safely assume that head is not changed + +-- we use this for special testing and documentation + +local checkstep = (nodes and nodes.tracers and nodes.tracers.steppers.check) or function() end +local registerstep = (nodes and nodes.tracers and nodes.tracers.steppers.register) or function() end +local registermessage = (nodes and nodes.tracers and nodes.tracers.steppers.message) or function() end + +local function logprocess(...) + if trace_steps then + registermessage(...) + end + report_direct(...) +end + +local function logwarning(...) + report_direct(...) +end + +local f_unicode = formatters["%U"] +local f_uniname = formatters["%U (%s)"] +local f_unilist = formatters["% t (% t)"] + +local function gref(n) -- currently the same as in font-otb + if type(n) == "number" then + local description = descriptions[n] + local name = description and description.name + if name then + return f_uniname(n,name) + else + return f_unicode(n) + end + elseif n then + local num, nam = { }, { } + for i=1,#n do + local ni = n[i] + if tonumber(ni) then -- later we will start at 2 + local di = descriptions[ni] + num[i] = f_unicode(ni) + nam[i] = di and di.name or "-" + end + end + return f_unilist(num,nam) + else + return "" + end +end + +local function cref(kind,chainname,chainlookupname,lookupname,index) -- not in the mood to alias f_ + if index then + return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookupname,index) + elseif lookupname then + return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookupname) + elseif chainlookupname then + return formatters["feature %a, chain %a, sub %a"](kind,chainname,chainlookupname) + elseif chainname then + return formatters["feature %a, chain %a"](kind,chainname) + else + return formatters["feature %a"](kind) + end +end + +local function pref(kind,lookupname) + return formatters["feature %a, lookup %a"](kind,lookupname) +end + +-- We can assume that languages that use marks are not hyphenated. We can also assume +-- that at most one discretionary is present. + +-- We do need components in funny kerning mode but maybe I can better reconstruct then +-- as we do have the font components info available; removing components makes the +-- previous code much simpler. Also, later on copying and freeing becomes easier. +-- However, for arabic we need to keep them around for the sake of mark placement +-- and indices. + +local function copy_glyph(g) -- next and prev are untouched ! + local components = g.components + if components then + g.components = nil + local n = copy_node(g) + g.components = components + return n + else + return copy_node(g) + end +end + +-- start is a mark and we need to keep that one + +local function markstoligature(kind,lookupname,head,start,stop,char) + if start == stop and start.char == char then + return head, start + else + local prev = start.prev + local next = stop.next + start.prev = nil + stop.next = nil + local base = copy_glyph(start) + if head == start then + head = base + end + base.char = char + base.subtype = ligature_code + base.components = start + if prev then + prev.next = base + end + if next then + next.prev = base + end + base.next = next + base.prev = prev + return head, base + end +end + +-- The next code is somewhat complicated by the fact that some fonts can have ligatures made +-- from ligatures that themselves have marks. This was identified by Kai in for instance +-- arabtype: KAF LAM SHADDA ALEF FATHA (0x0643 0x0644 0x0651 0x0627 0x064E). This becomes +-- KAF LAM-ALEF with a SHADDA on the first and a FATHA op de second component. In a next +-- iteration this becomes a KAF-LAM-ALEF with a SHADDA on the second and a FATHA on the +-- third component. + +local function getcomponentindex(start) + if start.id ~= glyph_code then + return 0 + elseif start.subtype == ligature_code then + local i = 0 + local components = start.components + while components do + i = i + getcomponentindex(components) + components = components.next + end + return i + elseif not marks[start.char] then + return 1 + else + return 0 + end +end + +-- eventually we will do positioning in an other way (needs addional w/h/d fields) + +local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound) -- brr head + if start == stop and start.char == char then + start.char = char + return head, start + end + local prev = start.prev + local next = stop.next + start.prev = nil + stop.next = nil + local base = copy_glyph(start) + if start == head then + head = base + end + base.char = char + base.subtype = ligature_code + base.components = start -- start can have components + if prev then + prev.next = base + end + if next then + next.prev = base + end + base.next = next + base.prev = prev + if not discfound then + local deletemarks = markflag ~= "mark" + local components = start + local baseindex = 0 + local componentindex = 0 + local head = base + local current = base + -- first we loop over the glyphs in start .. stop + while start do + local char = start.char + if not marks[char] then + baseindex = baseindex + componentindex + componentindex = getcomponentindex(start) + elseif not deletemarks then -- quite fishy + start[a_ligacomp] = baseindex + (start[a_ligacomp] or componentindex) + if trace_marks then + logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp]) + end + head, current = insert_node_after(head,current,copy_node(start)) -- unlikely that mark has components + elseif trace_marks then + logwarning("%s: delete mark %s",pref(kind,lookupname),gref(char)) + end + start = start.next + end + -- we can have one accent as part of a lookup and another following + -- local start = components -- was wrong (component scanning was introduced when more complex ligs in devanagari was added) + local start = current.next + while start and start.id == glyph_code do + local char = start.char + if marks[char] then + start[a_ligacomp] = baseindex + (start[a_ligacomp] or componentindex) + if trace_marks then + logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp]) + end + else + break + end + start = start.next + end + end + return head, base +end + +function handlers.gsub_single(head,start,kind,lookupname,replacement) + if trace_singles then + logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(start.char),gref(replacement)) + end + start.char = replacement + return head, start, true +end + +local function get_alternative_glyph(start,alternatives,value,trace_alternatives) + local n = #alternatives + if value == "random" then + local r = random(1,n) + return alternatives[r], trace_alternatives and formatters["value %a, taking %a"](value,r) + elseif value == "first" then + return alternatives[1], trace_alternatives and formatters["value %a, taking %a"](value,1) + elseif value == "last" then + return alternatives[n], trace_alternatives and formatters["value %a, taking %a"](value,n) + else + value = tonumber(value) + if type(value) ~= "number" then + return alternatives[1], trace_alternatives and formatters["invalid value %s, taking %a"](value,1) + elseif value > n then + local defaultalt = otf.defaultnodealternate + if defaultalt == "first" then + return alternatives[n], trace_alternatives and formatters["invalid value %s, taking %a"](value,1) + elseif defaultalt == "last" then + return alternatives[1], trace_alternatives and formatters["invalid value %s, taking %a"](value,n) + else + return false, trace_alternatives and formatters["invalid value %a, %s"](value,"out of range") + end + elseif value == 0 then + return start.char, trace_alternatives and formatters["invalid value %a, %s"](value,"no change") + elseif value < 1 then + return alternatives[1], trace_alternatives and formatters["invalid value %a, taking %a"](value,1) + else + return alternatives[value], trace_alternatives and formatters["value %a, taking %a"](value,value) + end + end +end + +local function multiple_glyphs(head,start,multiple,ignoremarks) + local nofmultiples = #multiple + if nofmultiples > 0 then + start.char = multiple[1] + if nofmultiples > 1 then + local sn = start.next + for k=2,nofmultiples do -- todo: use insert_node +-- untested: +-- +-- while ignoremarks and marks[sn.char] then +-- local sn = sn.next +-- end + local n = copy_node(start) -- ignore components + n.char = multiple[k] + n.next = sn + n.prev = start + if sn then + sn.prev = n + end + start.next = n + start = n + end + end + return head, start, true + else + if trace_multiples then + logprocess("no multiple for %s",gref(start.char)) + end + return head, start, false + end +end + +function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence) + local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue + local choice, comment = get_alternative_glyph(start,alternative,value,trace_alternatives) + if choice then + if trace_alternatives then + logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(start.char),choice,gref(choice),comment) + end + start.char = choice + else + if trace_alternatives then + logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(start.char),comment) + end + end + return head, start, true +end + +function handlers.gsub_multiple(head,start,kind,lookupname,multiple,sequence) + if trace_multiples then + logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(start.char),gref(multiple)) + end + return multiple_glyphs(head,start,multiple,sequence.flags[1]) +end + +function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence) + local s, stop, discfound = start.next, nil, false + local startchar = start.char + if marks[startchar] then + while s do + local id = s.id + if id == glyph_code and s.font == currentfont and s.subtype<256 then + local lg = ligature[s.char] + if lg then + stop = s + ligature = lg + s = s.next + else + break + end + else + break + end + end + if stop then + local lig = ligature.ligature + if lig then + if trace_ligatures then + local stopchar = stop.char + head, start = markstoligature(kind,lookupname,head,start,stop,lig) + logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char)) + else + head, start = markstoligature(kind,lookupname,head,start,stop,lig) + end + return head, start, true + else + -- ok, goto next lookup + end + end + else + local skipmark = sequence.flags[1] + while s do + local id = s.id + if id == glyph_code and s.subtype<256 then + if s.font == currentfont then + local char = s.char + if skipmark and marks[char] then + s = s.next + else + local lg = ligature[char] + if lg then + stop = s + ligature = lg + s = s.next + else + break + end + end + else + break + end + elseif id == disc_code then + discfound = true + s = s.next + else + break + end + end + local lig = ligature.ligature + if lig then + if stop then + if trace_ligatures then + local stopchar = stop.char + head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound) + logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char)) + else + head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound) + end + return head, start, true + else + -- weird but happens (in some arabic font) + start.char = lig + if trace_ligatures then + logprocess("%s: replacing %s by (no real) ligature %s case 3",pref(kind,lookupname),gref(startchar),gref(lig)) + end + return head, start, true + end + else + -- weird but happens + end + end + return head, start, false +end + +--[[ldx-- +

We get hits on a mark, but we're not sure if the it has to be applied so +we need to explicitly test for basechar, baselig and basemark entries.

+--ldx]]-- + +function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence) + local markchar = start.char + if marks[markchar] then + local base = start.prev -- [glyph] [start=mark] + if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then + local basechar = base.char + if marks[basechar] then + while true do + base = base.prev + if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then + basechar = base.char + if not marks[basechar] then + break + end + else + if trace_bugs then + logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar)) + end + return head, start, false + end + end + end + local baseanchors = descriptions[basechar] + if baseanchors then + baseanchors = baseanchors.anchors + end + if baseanchors then + local baseanchors = baseanchors['basechar'] + if baseanchors then + local al = anchorlookups[lookupname] + for anchor,ba in next, baseanchors do + if al[anchor] then + local ma = markanchors[anchor] + if ma then + local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) + if trace_marks then + logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)", + pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) + end + return head, start, true + end + end + end + if trace_bugs then + logwarning("%s, no matching anchors for mark %s and base %s",pref(kind,lookupname),gref(markchar),gref(basechar)) + end + end + elseif trace_bugs then + -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar)) + onetimemessage(currentfont,basechar,"no base anchors",report_fonts) + end + elseif trace_bugs then + logwarning("%s: prev node is no char",pref(kind,lookupname)) + end + elseif trace_bugs then + logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar)) + end + return head, start, false +end + +function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence) + -- check chainpos variant + local markchar = start.char + if marks[markchar] then + local base = start.prev -- [glyph] [optional marks] [start=mark] + if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then + local basechar = base.char + if marks[basechar] then + while true do + base = base.prev + if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then + basechar = base.char + if not marks[basechar] then + break + end + else + if trace_bugs then + logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar)) + end + return head, start, false + end + end + end + local index = start[a_ligacomp] + local baseanchors = descriptions[basechar] + if baseanchors then + baseanchors = baseanchors.anchors + if baseanchors then + local baseanchors = baseanchors['baselig'] + if baseanchors then + local al = anchorlookups[lookupname] + for anchor, ba in next, baseanchors do + if al[anchor] then + local ma = markanchors[anchor] + if ma then + ba = ba[index] + if ba then + local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) -- index + if trace_marks then + logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)", + pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy) + end + return head, start, true + else + if trace_bugs then + logwarning("%s: no matching anchors for mark %s and baselig %s with index %a",pref(kind,lookupname),gref(markchar),gref(basechar),index) + end + end + end + end + end + if trace_bugs then + logwarning("%s: no matching anchors for mark %s and baselig %s",pref(kind,lookupname),gref(markchar),gref(basechar)) + end + end + end + elseif trace_bugs then + -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar)) + onetimemessage(currentfont,basechar,"no base anchors",report_fonts) + end + elseif trace_bugs then + logwarning("%s: prev node is no char",pref(kind,lookupname)) + end + elseif trace_bugs then + logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar)) + end + return head, start, false +end + +function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence) + local markchar = start.char + if marks[markchar] then + local base = start.prev -- [glyph] [basemark] [start=mark] + local slc = start[a_ligacomp] + if slc then -- a rather messy loop ... needs checking with husayni + while base do + local blc = base[a_ligacomp] + if blc and blc ~= slc then + base = base.prev + else + break + end + end + end + if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then -- subtype test can go + local basechar = base.char + local baseanchors = descriptions[basechar] + if baseanchors then + baseanchors = baseanchors.anchors + if baseanchors then + baseanchors = baseanchors['basemark'] + if baseanchors then + local al = anchorlookups[lookupname] + for anchor,ba in next, baseanchors do + if al[anchor] then + local ma = markanchors[anchor] + if ma then + local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true) + if trace_marks then + logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)", + pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) + end + return head, start, true + end + end + end + if trace_bugs then + logwarning("%s: no matching anchors for mark %s and basemark %s",pref(kind,lookupname),gref(markchar),gref(basechar)) + end + end + end + elseif trace_bugs then + -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar)) + onetimemessage(currentfont,basechar,"no base anchors",report_fonts) + end + elseif trace_bugs then + logwarning("%s: prev node is no mark",pref(kind,lookupname)) + end + elseif trace_bugs then + logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar)) + end + return head, start, false +end + +function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence) -- to be checked + local alreadydone = cursonce and start[a_cursbase] + if not alreadydone then + local done = false + local startchar = start.char + if marks[startchar] then + if trace_cursive then + logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar)) + end + else + local nxt = start.next + while not done and nxt and nxt.id == glyph_code and nxt.font == currentfont and nxt.subtype<256 do + local nextchar = nxt.char + if marks[nextchar] then + -- should not happen (maybe warning) + nxt = nxt.next + else + local entryanchors = descriptions[nextchar] + if entryanchors then + entryanchors = entryanchors.anchors + if entryanchors then + entryanchors = entryanchors['centry'] + if entryanchors then + local al = anchorlookups[lookupname] + for anchor, entry in next, entryanchors do + if al[anchor] then + local exit = exitanchors[anchor] + if exit then + local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar]) + if trace_cursive then + logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode) + end + done = true + break + end + end + end + end + end + elseif trace_bugs then + -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar)) + onetimemessage(currentfont,startchar,"no entry anchors",report_fonts) + end + break + end + end + end + return head, start, done + else + if trace_cursive and trace_details then + logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone) + end + return head, start, false + end +end + +function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence) + local startchar = start.char + local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar]) + if trace_kerns then + logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h) + end + return head, start, false +end + +function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence) + -- todo: kerns in disc nodes: pre, post, replace -> loop over disc too + -- todo: kerns in components of ligatures + local snext = start.next + if not snext then + return head, start, false + else + local prev, done = start, false + local factor = tfmdata.parameters.factor + local lookuptype = lookuptypes[lookupname] + while snext and snext.id == glyph_code and snext.font == currentfont and snext.subtype<256 do + local nextchar = snext.char + local krn = kerns[nextchar] + if not krn and marks[nextchar] then + prev = snext + snext = snext.next + else + if not krn then + -- skip + elseif type(krn) == "table" then + if lookuptype == "pair" then -- probably not needed + local a, b = krn[2], krn[3] + if a and #a > 0 then + local startchar = start.char + local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar]) + if trace_kerns then + logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h) + end + end + if b and #b > 0 then + local startchar = start.char + local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar]) + if trace_kerns then + logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h) + end + end + else -- wrong ... position has different entries + report_process("%s: check this out (old kern stuff)",pref(kind,lookupname)) + -- local a, b = krn[2], krn[6] + -- if a and a ~= 0 then + -- local k = setkern(snext,factor,rlmode,a) + -- if trace_kerns then + -- logprocess("%s: inserting first kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar)) + -- end + -- end + -- if b and b ~= 0 then + -- logwarning("%s: ignoring second kern xoff %s",pref(kind,lookupname),b*factor) + -- end + end + done = true + elseif krn ~= 0 then + local k = setkern(snext,factor,rlmode,krn) + if trace_kerns then + logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar)) + end + done = true + end + break + end + end + return head, start, done + end +end + +--[[ldx-- +

I will implement multiple chain replacements once I run into a font that uses +it. It's not that complex to handle.

+--ldx]]-- + +local chainmores = { } +local chainprocs = { } + +local function logprocess(...) + if trace_steps then + registermessage(...) + end + report_subchain(...) +end + +local logwarning = report_subchain + +local function logprocess(...) + if trace_steps then + registermessage(...) + end + report_chain(...) +end + +local logwarning = report_chain + +-- We could share functions but that would lead to extra function calls with many +-- arguments, redundant tests and confusing messages. + +function chainprocs.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname) + logwarning("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname)) + return head, start, false +end + +function chainmores.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname,n) + logprocess("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname)) + return head, start, false +end + +-- The reversesub is a special case, which is why we need to store the replacements +-- in a bit weird way. There is no lookup and the replacement comes from the lookup +-- itself. It is meant mostly for dealing with Urdu. + +function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements) + local char = start.char + local replacement = replacements[char] + if replacement then + if trace_singles then + logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement)) + end + start.char = replacement + return head, start, true + else + return head, start, false + end +end + +--[[ldx-- +

This chain stuff is somewhat tricky since we can have a sequence of actions to be +applied: single, alternate, multiple or ligature where ligature can be an invalid +one in the sense that it will replace multiple by one but not neccessary one that +looks like the combination (i.e. it is the counterpart of multiple then). For +example, the following is valid:

+ + +xxxabcdexxx [single a->A][multiple b->BCD][ligature cde->E] xxxABCDExxx + + +

Therefore we we don't really do the replacement here already unless we have the +single lookup case. The efficiency of the replacements can be improved by deleting +as less as needed but that would also make the code even more messy.

+--ldx]]-- + +-- local function delete_till_stop(head,start,stop,ignoremarks) -- keeps start +-- local n = 1 +-- if start == stop then +-- -- done +-- elseif ignoremarks then +-- repeat -- start x x m x x stop => start m +-- local next = start.next +-- if not marks[next.char] then +-- local components = next.components +-- if components then -- probably not needed +-- flush_node_list(components) +-- end +-- head = delete_node(head,next) +-- end +-- n = n + 1 +-- until next == stop +-- else -- start x x x stop => start +-- repeat +-- local next = start.next +-- local components = next.components +-- if components then -- probably not needed +-- flush_node_list(components) +-- end +-- head = delete_node(head,next) +-- n = n + 1 +-- until next == stop +-- end +-- return head, n +-- end + +--[[ldx-- +

Here we replace start by a single variant, First we delete the rest of the +match.

+--ldx]]-- + +function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex) + -- todo: marks ? + local current = start + local subtables = currentlookup.subtables + if #subtables > 1 then + logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," ")) + end + while current do + if current.id == glyph_code then + local currentchar = current.char + local lookupname = subtables[1] -- only 1 + local replacement = lookuphash[lookupname] + if not replacement then + if trace_bugs then + logwarning("%s: no single hits",cref(kind,chainname,chainlookupname,lookupname,chainindex)) + end + else + replacement = replacement[currentchar] + if not replacement or replacement == "" then + if trace_bugs then + logwarning("%s: no single for %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar)) + end + else + if trace_singles then + logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement)) + end + current.char = replacement + end + end + return head, start, true + elseif current == stop then + break + else + current = current.next + end + end + return head, start, false +end + +chainmores.gsub_single = chainprocs.gsub_single + +--[[ldx-- +

Here we replace start by a sequence of new glyphs. First we delete the rest of +the match.

+--ldx]]-- + +function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) + -- local head, n = delete_till_stop(head,start,stop) + local startchar = start.char + local subtables = currentlookup.subtables + local lookupname = subtables[1] + local replacements = lookuphash[lookupname] + if not replacements then + if trace_bugs then + logwarning("%s: no multiple hits",cref(kind,chainname,chainlookupname,lookupname)) + end + else + replacements = replacements[startchar] + if not replacements or replacement == "" then + if trace_bugs then + logwarning("%s: no multiple for %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar)) + end + else + if trace_multiples then + logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements)) + end + return multiple_glyphs(head,start,replacements,currentlookup.flags[1]) + end + end + return head, start, false +end + +chainmores.gsub_multiple = chainprocs.gsub_multiple + +--[[ldx-- +

Here we replace start by new glyph. First we delete the rest of the match.

+--ldx]]-- + +-- char_1 mark_1 -> char_x mark_1 (ignore marks) +-- char_1 mark_1 -> char_x + +-- to be checked: do we always have just one glyph? +-- we can also have alternates for marks +-- marks come last anyway +-- are there cases where we need to delete the mark + +function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) + local current = start + local subtables = currentlookup.subtables + local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue + while current do + if current.id == glyph_code then -- is this check needed? + local currentchar = current.char + local lookupname = subtables[1] + local alternatives = lookuphash[lookupname] + if not alternatives then + if trace_bugs then + logwarning("%s: no alternative hit",cref(kind,chainname,chainlookupname,lookupname)) + end + else + alternatives = alternatives[currentchar] + if alternatives then + local choice, comment = get_alternative_glyph(current,alternatives,value,trace_alternatives) + if choice then + if trace_alternatives then + logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment) + end + start.char = choice + else + if trace_alternatives then + logwarning("%s: no variant %a for %s, %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(char),comment) + end + end + elseif trace_bugs then + logwarning("%s: no alternative for %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(currentchar),comment) + end + end + return head, start, true + elseif current == stop then + break + else + current = current.next + end + end + return head, start, false +end + +chainmores.gsub_alternate = chainprocs.gsub_alternate + +--[[ldx-- +

When we replace ligatures we use a helper that handles the marks. I might change +this function (move code inline and handle the marks by a separate function). We +assume rather stupid ligatures (no complex disc nodes).

+--ldx]]-- + +function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex) + local startchar = start.char + local subtables = currentlookup.subtables + local lookupname = subtables[1] + local ligatures = lookuphash[lookupname] + if not ligatures then + if trace_bugs then + logwarning("%s: no ligature hits",cref(kind,chainname,chainlookupname,lookupname,chainindex)) + end + else + ligatures = ligatures[startchar] + if not ligatures then + if trace_bugs then + logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar)) + end + else + local s = start.next + local discfound = false + local last = stop + local nofreplacements = 0 + local skipmark = currentlookup.flags[1] + while s do + local id = s.id + if id == disc_code then + s = s.next + discfound = true + else + local schar = s.char + if skipmark and marks[schar] then -- marks + s = s.next + else + local lg = ligatures[schar] + if lg then + ligatures, last, nofreplacements = lg, s, nofreplacements + 1 + if s == stop then + break + else + s = s.next + end + else + break + end + end + end + end + local l2 = ligatures.ligature + if l2 then + if chainindex then + stop = last + end + if trace_ligatures then + if start == stop then + logprocess("%s: replacing character %s by ligature %s case 3",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2)) + else + logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char),gref(l2)) + end + end + head, start = toligature(kind,lookupname,head,start,stop,l2,currentlookup.flags[1],discfound) + return head, start, true, nofreplacements + elseif trace_bugs then + if start == stop then + logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar)) + else + logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char)) + end + end + end + end + return head, start, false, 0 +end + +chainmores.gsub_ligature = chainprocs.gsub_ligature + +function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) + local markchar = start.char + if marks[markchar] then + local subtables = currentlookup.subtables + local lookupname = subtables[1] + local markanchors = lookuphash[lookupname] + if markanchors then + markanchors = markanchors[markchar] + end + if markanchors then + local base = start.prev -- [glyph] [start=mark] + if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then + local basechar = base.char + if marks[basechar] then + while true do + base = base.prev + if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then + basechar = base.char + if not marks[basechar] then + break + end + else + if trace_bugs then + logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar)) + end + return head, start, false + end + end + end + local baseanchors = descriptions[basechar].anchors + if baseanchors then + local baseanchors = baseanchors['basechar'] + if baseanchors then + local al = anchorlookups[lookupname] + for anchor,ba in next, baseanchors do + if al[anchor] then + local ma = markanchors[anchor] + if ma then + local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) + if trace_marks then + logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)", + cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) + end + return head, start, true + end + end + end + if trace_bugs then + logwarning("%s, no matching anchors for mark %s and base %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar)) + end + end + end + elseif trace_bugs then + logwarning("%s: prev node is no char",cref(kind,chainname,chainlookupname,lookupname)) + end + elseif trace_bugs then + logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar)) + end + elseif trace_bugs then + logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar)) + end + return head, start, false +end + +function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) + local markchar = start.char + if marks[markchar] then + local subtables = currentlookup.subtables + local lookupname = subtables[1] + local markanchors = lookuphash[lookupname] + if markanchors then + markanchors = markanchors[markchar] + end + if markanchors then + local base = start.prev -- [glyph] [optional marks] [start=mark] + if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then + local basechar = base.char + if marks[basechar] then + while true do + base = base.prev + if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then + basechar = base.char + if not marks[basechar] then + break + end + else + if trace_bugs then + logwarning("%s: no base for mark %s",cref(kind,chainname,chainlookupname,lookupname),markchar) + end + return head, start, false + end + end + end + -- todo: like marks a ligatures hash + local index = start[a_ligacomp] + local baseanchors = descriptions[basechar].anchors + if baseanchors then + local baseanchors = baseanchors['baselig'] + if baseanchors then + local al = anchorlookups[lookupname] + for anchor,ba in next, baseanchors do + if al[anchor] then + local ma = markanchors[anchor] + if ma then + ba = ba[index] + if ba then + local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) -- index + if trace_marks then + logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)", + cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy) + end + return head, start, true + end + end + end + end + if trace_bugs then + logwarning("%s: no matching anchors for mark %s and baselig %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar)) + end + end + end + elseif trace_bugs then + logwarning("feature %s, lookup %s: prev node is no char",kind,lookupname) + end + elseif trace_bugs then + logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar)) + end + elseif trace_bugs then + logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar)) + end + return head, start, false +end + +function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) + local markchar = start.char + if marks[markchar] then + -- local alreadydone = markonce and start[a_markmark] + -- if not alreadydone then + -- local markanchors = descriptions[markchar].anchors markanchors = markanchors and markanchors.mark + local subtables = currentlookup.subtables + local lookupname = subtables[1] + local markanchors = lookuphash[lookupname] + if markanchors then + markanchors = markanchors[markchar] + end + if markanchors then + local base = start.prev -- [glyph] [basemark] [start=mark] + local slc = start[a_ligacomp] + if slc then -- a rather messy loop ... needs checking with husayni + while base do + local blc = base[a_ligacomp] + if blc and blc ~= slc then + base = base.prev + else + break + end + end + end + if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then -- subtype test can go + local basechar = base.char + local baseanchors = descriptions[basechar].anchors + if baseanchors then + baseanchors = baseanchors['basemark'] + if baseanchors then + local al = anchorlookups[lookupname] + for anchor,ba in next, baseanchors do + if al[anchor] then + local ma = markanchors[anchor] + if ma then + local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true) + if trace_marks then + logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)", + cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) + end + return head, start, true + end + end + end + if trace_bugs then + logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar)) + end + end + end + elseif trace_bugs then + logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname)) + end + elseif trace_bugs then + logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar)) + end + -- elseif trace_marks and trace_details then + -- logprocess("%s, mark %s is already bound (n=%s), ignoring mark2mark",pref(kind,lookupname),gref(markchar),alreadydone) + -- end + elseif trace_bugs then + logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar)) + end + return head, start, false +end + +function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) + local alreadydone = cursonce and start[a_cursbase] + if not alreadydone then + local startchar = start.char + local subtables = currentlookup.subtables + local lookupname = subtables[1] + local exitanchors = lookuphash[lookupname] + if exitanchors then + exitanchors = exitanchors[startchar] + end + if exitanchors then + local done = false + if marks[startchar] then + if trace_cursive then + logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar)) + end + else + local nxt = start.next + while not done and nxt and nxt.id == glyph_code and nxt.font == currentfont and nxt.subtype<256 do + local nextchar = nxt.char + if marks[nextchar] then + -- should not happen (maybe warning) + nxt = nxt.next + else + local entryanchors = descriptions[nextchar] + if entryanchors then + entryanchors = entryanchors.anchors + if entryanchors then + entryanchors = entryanchors['centry'] + if entryanchors then + local al = anchorlookups[lookupname] + for anchor, entry in next, entryanchors do + if al[anchor] then + local exit = exitanchors[anchor] + if exit then + local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar]) + if trace_cursive then + logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode) + end + done = true + break + end + end + end + end + end + elseif trace_bugs then + -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar)) + onetimemessage(currentfont,startchar,"no entry anchors",report_fonts) + end + break + end + end + end + return head, start, done + else + if trace_cursive and trace_details then + logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone) + end + return head, start, false + end + end + return head, start, false +end + +function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence) + -- untested .. needs checking for the new model + local startchar = start.char + local subtables = currentlookup.subtables + local lookupname = subtables[1] + local kerns = lookuphash[lookupname] + if kerns then + kerns = kerns[startchar] -- needed ? + if kerns then + local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar]) + if trace_kerns then + logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h) + end + end + end + return head, start, false +end + +chainmores.gpos_single = chainprocs.gpos_single -- okay? + +-- when machines become faster i will make a shared function + +function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence) + local snext = start.next + if snext then + local startchar = start.char + local subtables = currentlookup.subtables + local lookupname = subtables[1] + local kerns = lookuphash[lookupname] + if kerns then + kerns = kerns[startchar] + if kerns then + local lookuptype = lookuptypes[lookupname] + local prev, done = start, false + local factor = tfmdata.parameters.factor + while snext and snext.id == glyph_code and snext.font == currentfont and snext.subtype<256 do + local nextchar = snext.char + local krn = kerns[nextchar] + if not krn and marks[nextchar] then + prev = snext + snext = snext.next + else + if not krn then + -- skip + elseif type(krn) == "table" then + if lookuptype == "pair" then + local a, b = krn[2], krn[3] + if a and #a > 0 then + local startchar = start.char + local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar]) + if trace_kerns then + logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h) + end + end + if b and #b > 0 then + local startchar = start.char + local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar]) + if trace_kerns then + logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h) + end + end + else + report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname)) + local a, b = krn[2], krn[6] + if a and a ~= 0 then + local k = setkern(snext,factor,rlmode,a) + if trace_kerns then + logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar)) + end + end + if b and b ~= 0 then + logwarning("%s: ignoring second kern xoff %s",cref(kind,chainname,chainlookupname),b*factor) + end + end + done = true + elseif krn ~= 0 then + local k = setkern(snext,factor,rlmode,krn) + if trace_kerns then + logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar)) + end + done = true + end + break + end + end + return head, start, done + end + end + end + return head, start, false +end + +chainmores.gpos_pair = chainprocs.gpos_pair -- okay? + +-- what pointer to return, spec says stop +-- to be discussed ... is bidi changer a space? +-- elseif char == zwnj and sequence[n][32] then -- brrr + +-- somehow l or f is global +-- we don't need to pass the currentcontext, saves a bit +-- make a slow variant then can be activated but with more tracing + +local function show_skip(kind,chainname,char,ck,class) + if ck[9] then + logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a, %a => %a",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10]) + else + logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a",cref(kind,chainname),gref(char),class,ck[1],ck[2]) + end +end + +local function normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash) + -- local rule, lookuptype, sequence, f, l, lookups = ck[1], ck[2] ,ck[3], ck[4], ck[5], ck[6] + local flags = sequence.flags + local done = false + local skipmark = flags[1] + local skipligature = flags[2] + local skipbase = flags[3] + local someskip = skipmark or skipligature or skipbase -- could be stored in flags for a fast test (hm, flags could be false !) + local markclass = sequence.markclass -- todo, first we need a proper test + local skipped = false + for k=1,#contexts do + local match = true + local current = start + local last = start + local ck = contexts[k] + local seq = ck[3] + local s = #seq + -- f..l = mid string + if s == 1 then + -- never happens + match = current.id == glyph_code and current.font == currentfont and current.subtype<256 and seq[1][current.char] + else + -- maybe we need a better space check (maybe check for glue or category or combination) + -- we cannot optimize for n=2 because there can be disc nodes + local f, l = ck[4], ck[5] + -- current match + if f == 1 and f == l then -- current only + -- already a hit + -- match = true + else -- before/current/after | before/current | current/after + -- no need to test first hit (to be optimized) + if f == l then -- new, else last out of sync (f is > 1) + -- match = true + else + local n = f + 1 + last = last.next + while n <= l do + if last then + local id = last.id + if id == glyph_code then + if last.font == currentfont and last.subtype<256 then + local char = last.char + local ccd = descriptions[char] + if ccd then + local class = ccd.class + if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then + skipped = true + if trace_skips then + show_skip(kind,chainname,char,ck,class) + end + last = last.next + elseif seq[n][char] then + if n < l then + last = last.next + end + n = n + 1 + else + match = false + break + end + else + match = false + break + end + else + match = false + break + end + elseif id == disc_code then + last = last.next + else + match = false + break + end + else + match = false + break + end + end + end + end + -- before + if match and f > 1 then + local prev = start.prev + if prev then + local n = f-1 + while n >= 1 do + if prev then + local id = prev.id + if id == glyph_code then + if prev.font == currentfont and prev.subtype<256 then -- normal char + local char = prev.char + local ccd = descriptions[char] + if ccd then + local class = ccd.class + if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then + skipped = true + if trace_skips then + show_skip(kind,chainname,char,ck,class) + end + elseif seq[n][char] then + n = n -1 + else + match = false + break + end + else + match = false + break + end + else + match = false + break + end + elseif id == disc_code then + -- skip 'm + elseif seq[n][32] then + n = n -1 + else + match = false + break + end + prev = prev.prev + elseif seq[n][32] then -- somewhat special, as zapfino can have many preceding spaces + n = n -1 + else + match = false + break + end + end + elseif f == 2 then + match = seq[1][32] + else + for n=f-1,1 do + if not seq[n][32] then + match = false + break + end + end + end + end + -- after + if match and s > l then + local current = last and last.next + if current then + -- removed optimization for s-l == 1, we have to deal with marks anyway + local n = l + 1 + while n <= s do + if current then + local id = current.id + if id == glyph_code then + if current.font == currentfont and current.subtype<256 then -- normal char + local char = current.char + local ccd = descriptions[char] + if ccd then + local class = ccd.class + if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then + skipped = true + if trace_skips then + show_skip(kind,chainname,char,ck,class) + end + elseif seq[n][char] then + n = n + 1 + else + match = false + break + end + else + match = false + break + end + else + match = false + break + end + elseif id == disc_code then + -- skip 'm + elseif seq[n][32] then -- brrr + n = n + 1 + else + match = false + break + end + current = current.next + elseif seq[n][32] then + n = n + 1 + else + match = false + break + end + end + elseif s-l == 1 then + match = seq[s][32] + else + for n=l+1,s do + if not seq[n][32] then + match = false + break + end + end + end + end + end + if match then + -- ck == currentcontext + if trace_contexts then + local rule, lookuptype, f, l = ck[1], ck[2], ck[4], ck[5] + local char = start.char + if ck[9] then + logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a, %a => %a", + cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10]) + else + logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a", + cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype) + end + end + local chainlookups = ck[6] + if chainlookups then + local nofchainlookups = #chainlookups + -- we can speed this up if needed + if nofchainlookups == 1 then + local chainlookupname = chainlookups[1] + local chainlookup = lookuptable[chainlookupname] + if chainlookup then + local cp = chainprocs[chainlookup.type] + if cp then + local ok + head, start, ok = cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence) + if ok then + done = true + end + else + logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type) + end + else -- shouldn't happen + logprocess("%s is not yet supported",cref(kind,chainname,chainlookupname)) + end + else + local i = 1 + repeat + if skipped then + while true do + local char = start.char + local ccd = descriptions[char] + if ccd then + local class = ccd.class + if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then + start = start.next + else + break + end + else + break + end + end + end + local chainlookupname = chainlookups[i] + local chainlookup = lookuptable[chainlookupname] + if not chainlookup then + -- okay, n matches, < n replacements + i = i + 1 + else + local cp = chainmores[chainlookup.type] + if not cp then + -- actually an error + logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type) + i = i + 1 + else + local ok, n + head, start, ok, n = cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence) + -- messy since last can be changed ! + if ok then + done = true + -- skip next one(s) if ligature + i = i + (n or 1) + else + i = i + 1 + end + end + end + if start then + start = start.next + else + -- weird + end + until i > nofchainlookups + end + else + local replacements = ck[7] + if replacements then + head, start, done = chainprocs.reversesub(head,start,last,kind,chainname,ck,lookuphash,replacements) -- sequence + else + done = true -- can be meant to be skipped + if trace_contexts then + logprocess("%s: skipping match",cref(kind,chainname)) + end + end + end + end + end + return head, start, done +end + +-- Because we want to keep this elsewhere (an because speed is less an issue) we +-- pass the font id so that the verbose variant can access the relevant helper tables. + +local verbose_handle_contextchain = function(font,...) + logwarning("no verbose handler installed, reverting to 'normal'") + otf.setcontextchain() + return normal_handle_contextchain(...) +end + +otf.chainhandlers = { + normal = normal_handle_contextchain, + verbose = verbose_handle_contextchain, +} + +function otf.setcontextchain(method) + if not method or method == "normal" or not otf.chainhandlers[method] then + if handlers.contextchain then -- no need for a message while making the format + logwarning("installing normal contextchain handler") + end + handlers.contextchain = normal_handle_contextchain + else + logwarning("installing contextchain handler %a",method) + local handler = otf.chainhandlers[method] + handlers.contextchain = function(...) + return handler(currentfont,...) -- hm, get rid of ... + end + end + handlers.gsub_context = handlers.contextchain + handlers.gsub_contextchain = handlers.contextchain + handlers.gsub_reversecontextchain = handlers.contextchain + handlers.gpos_contextchain = handlers.contextchain + handlers.gpos_context = handlers.contextchain +end + +otf.setcontextchain() + +local missing = { } -- we only report once + +local function logprocess(...) + if trace_steps then + registermessage(...) + end + report_process(...) +end + +local logwarning = report_process + +local function report_missing_cache(typ,lookup) + local f = missing[currentfont] if not f then f = { } missing[currentfont] = f end + local t = f[typ] if not t then t = { } f[typ] = t end + if not t[lookup] then + t[lookup] = true + logwarning("missing cache for lookup %a, type %a, font %a, name %a",lookup,typ,currentfont,tfmdata.properties.fullname) + end +end + +local resolved = { } -- we only resolve a font,script,language pair once + +-- todo: pass all these 'locals' in a table + +local lookuphashes = { } + +setmetatableindex(lookuphashes, function(t,font) + local lookuphash = fontdata[font].resources.lookuphash + if not lookuphash or not next(lookuphash) then + lookuphash = false + end + t[font] = lookuphash + return lookuphash +end) + +-- fonts.hashes.lookups = lookuphashes + +local autofeatures = fonts.analyzers.features -- was: constants + +local function initialize(sequence,script,language,enabled) + local features = sequence.features + if features then + for kind, scripts in next, features do + local valid = enabled[kind] + if valid then + local languages = scripts[script] or scripts[wildcard] + if languages and (languages[language] or languages[wildcard]) then + return { valid, autofeatures[kind] or false, sequence.chain or 0, kind, sequence } + end + end + end + end + return false +end + +function otf.dataset(tfmdata,font) -- generic variant, overloaded in context + local shared = tfmdata.shared + local properties = tfmdata.properties + local language = properties.language or "dflt" + local script = properties.script or "dflt" + local enabled = shared.features + local res = resolved[font] + if not res then + res = { } + resolved[font] = res + end + local rs = res[script] + if not rs then + rs = { } + res[script] = rs + end + local rl = rs[language] + if not rl then + rl = { + -- indexed but we can also add specific data by key + } + rs[language] = rl + local sequences = tfmdata.resources.sequences +-- setmetatableindex(rl, function(t,k) +-- if type(k) == "number" then +-- local v = enabled and initialize(sequences[k],script,language,enabled) +-- t[k] = v +-- return v +-- end +-- end) +for s=1,#sequences do + local v = enabled and initialize(sequences[s],script,language,enabled) + if v then + rl[#rl+1] = v + end +end + end + return rl +end + +-- elseif id == glue_code then +-- if p[5] then -- chain +-- local pc = pp[32] +-- if pc then +-- start, ok = start, false -- p[1](start,kind,p[2],pc,p[3],p[4]) +-- if ok then +-- done = true +-- end +-- if start then start = start.next end +-- else +-- start = start.next +-- end +-- else +-- start = start.next +-- end + +-- there will be a new direction parser (pre-parsed etc) + +-- less bytecode: 290 -> 254 +-- +-- attr = attr or false +-- +-- local a = getattr(start,0) +-- if (a == attr and (not attribute or getattr(start,a_state) == attribute)) or (not attribute or getattr(start,a_state) == attribute) then +-- -- the action +-- end + +local function featuresprocessor(head,font,attr) + + local lookuphash = lookuphashes[font] -- we can also check sequences here + + if not lookuphash then + return head, false + end + + if trace_steps then + checkstep(head) + end + + tfmdata = fontdata[font] + descriptions = tfmdata.descriptions + characters = tfmdata.characters + resources = tfmdata.resources + + marks = resources.marks + anchorlookups = resources.lookup_to_anchor + lookuptable = resources.lookups + lookuptypes = resources.lookuptypes + + currentfont = font + rlmode = 0 + + local sequences = resources.sequences + local done = false + local datasets = otf.dataset(tfmdata,font,attr) + + local dirstack = { } -- could move outside function + + -- We could work on sub start-stop ranges instead but I wonder if there is that + -- much speed gain (experiments showed that it made not much sense) and we need + -- to keep track of directions anyway. Also at some point I want to play with + -- font interactions and then we do need the full sweeps. + + -- Keeping track of the headnode is needed for devanagari (I generalized it a bit + -- so that multiple cases are also covered.) + + for s=1,#datasets do + local dataset = datasets[s] + featurevalue = dataset[1] -- todo: pass to function instead of using a global + + local sequence = dataset[5] -- sequences[s] -- also dataset[5] + local rlparmode = 0 + local topstack = 0 + local success = false + local attribute = dataset[2] + local chain = dataset[3] -- sequence.chain or 0 + local typ = sequence.type + local subtables = sequence.subtables + if chain < 0 then + -- this is a limited case, no special treatments like 'init' etc + local handler = handlers[typ] + -- we need to get rid of this slide! probably no longer needed in latest luatex + local start = find_node_tail(head) -- slow (we can store tail because there's always a skip at the end): todo + while start do + local id = start.id + if id == glyph_code then + if start.font == font and start.subtype<256 then + local a = start[0] + if a then + a = a == attr + else + a = true + end + if a then + for i=1,#subtables do + local lookupname = subtables[i] + local lookupcache = lookuphash[lookupname] + if lookupcache then + local lookupmatch = lookupcache[start.char] + if lookupmatch then + head, start, success = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) + if success then + break + end + end + else + report_missing_cache(typ,lookupname) + end + end + if start then start = start.prev end + else + start = start.prev + end + else + start = start.prev + end + else + start = start.prev + end + end + else + local handler = handlers[typ] + local ns = #subtables + local start = head -- local ? + rlmode = 0 -- to be checked ? + if ns == 1 then -- happens often + local lookupname = subtables[1] + local lookupcache = lookuphash[lookupname] + if not lookupcache then -- also check for empty cache + report_missing_cache(typ,lookupname) + else + + local function subrun(start) + -- mostly for gsub, gpos would demand a more clever approach + local head = start + local done = false + while start do + local id = start.id + if id == glyph_code and start.font == font and start.subtype <256 then + local a = start[0] + if a then + a = (a == attr) and (not attribute or start[a_state] == attribute) + else + a = not attribute or start[a_state] == attribute + end + if a then + local lookupmatch = lookupcache[start.char] + if lookupmatch then + -- sequence kan weg + local ok + head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) + if ok then + done = true + end + end + if start then start = start.next end + else + start = start.next + end + else + start = start.next + end + end + if done then + success = true + return head + end + end + + local function kerndisc(disc) -- we can assume that prev and next are glyphs + local prev = disc.prev + local next = disc.next + if prev and next then + prev.next = next + -- next.prev = prev + local a = prev[0] + if a then + a = (a == attr) and (not attribute or prev[a_state] == attribute) + else + a = not attribute or prev[a_state] == attribute + end + if a then + local lookupmatch = lookupcache[prev.char] + if lookupmatch then + -- sequence kan weg + local h, d, ok = handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) + if ok then + done = true + success = true + end + end + end + prev.next = disc + -- next.prev = disc + end + return next + end + + while start do + local id = start.id + if id == glyph_code then + if start.font == font and start.subtype<256 then + local a = start[0] + if a then + a = (a == attr) and (not attribute or start[a_state] == attribute) + else + a = not attribute or start[a_state] == attribute + end + if a then + local lookupmatch = lookupcache[start.char] + if lookupmatch then + -- sequence kan weg + local ok + head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) + if ok then + success = true + end + end + if start then start = start.next end + else + start = start.next + end + else + start = start.next + end + elseif id == disc_code then + -- mostly for gsub + if start.subtype == discretionary_code then + local pre = start.pre + if pre then + local new = subrun(pre) + if new then start.pre = new end + end + local post = start.post + if post then + local new = subrun(post) + if new then start.post = new end + end + local replace = start.replace + if replace then + local new = subrun(replace) + if new then start.replace = new end + end +elseif typ == "gpos_single" or typ == "gpos_pair" then + kerndisc(start) + end + start = start.next + elseif id == whatsit_code then -- will be function + local subtype = start.subtype + if subtype == dir_code then + local dir = start.dir + if dir == "+TRT" or dir == "+TLT" then + topstack = topstack + 1 + dirstack[topstack] = dir + elseif dir == "-TRT" or dir == "-TLT" then + topstack = topstack - 1 + end + local newdir = dirstack[topstack] + if newdir == "+TRT" then + rlmode = -1 + elseif newdir == "+TLT" then + rlmode = 1 + else + rlmode = rlparmode + end + if trace_directions then + report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir) + end + elseif subtype == localpar_code then + local dir = start.dir + if dir == "TRT" then + rlparmode = -1 + elseif dir == "TLT" then + rlparmode = 1 + else + rlparmode = 0 + end + -- one might wonder if the par dir should be looked at, so we might as well drop the next line + rlmode = rlparmode + if trace_directions then + report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode) + end + end + start = start.next + elseif id == math_code then + start = end_of_math(start).next + else + start = start.next + end + end + end + else + + local function subrun(start) + -- mostly for gsub, gpos would demand a more clever approach + local head = start + local done = false + while start do + local id = start.id + if id == glyph_code and start.id == font and start.subtype <256 then + local a = start[0] + if a then + a = (a == attr) and (not attribute or start[a_state] == attribute) + else + a = not attribute or start[a_state] == attribute + end + if a then + for i=1,ns do + local lookupname = subtables[i] + local lookupcache = lookuphash[lookupname] + if lookupcache then + local lookupmatch = lookupcache[start.char] + if lookupmatch then + -- we could move all code inline but that makes things even more unreadable + local ok + head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) + if ok then + done = true + break + elseif not start then + -- don't ask why ... shouldn't happen + break + end + end + else + report_missing_cache(typ,lookupname) + end + end + if start then start = start.next end + else + start = start.next + end + else + start = start.next + end + end + if done then + success = true + return head + end + end + + local function kerndisc(disc) -- we can assume that prev and next are glyphs + local prev = disc.prev + local next = disc.next + if prev and next then + prev.next = next + -- next.prev = prev + local a = prev[0] + if a then + a = (a == attr) and (not attribute or prev[a_state] == attribute) + else + a = not attribute or prev[a_state] == attribute + end + if a then + for i=1,ns do + local lookupname = subtables[i] + local lookupcache = lookuphash[lookupname] + if lookupcache then + local lookupmatch = lookupcache[prev.char] + if lookupmatch then + -- we could move all code inline but that makes things even more unreadable + local h, d, ok = handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) + if ok then + done = true + break + end + end + else + report_missing_cache(typ,lookupname) + end + end + end + prev.next = disc + -- next.prev = disc + end + return next + end + + while start do + local id = start.id + if id == glyph_code then + if start.font == font and start.subtype<256 then + local a = start[0] + if a then + a = (a == attr) and (not attribute or start[a_state] == attribute) + else + a = not attribute or start[a_state] == attribute + end + if a then + for i=1,ns do + local lookupname = subtables[i] + local lookupcache = lookuphash[lookupname] + if lookupcache then + local lookupmatch = lookupcache[start.char] + if lookupmatch then + -- we could move all code inline but that makes things even more unreadable + local ok + head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) + if ok then + success = true + break + elseif not start then + -- don't ask why ... shouldn't happen + break + end + end + else + report_missing_cache(typ,lookupname) + end + end + if start then start = start.next end + else + start = start.next + end + else + start = start.next + end + elseif id == disc_code then + -- mostly for gsub + if start.subtype == discretionary_code then + local pre = start.pre + if pre then + local new = subrun(pre) + if new then start.pre = new end + end + local post = start.post + if post then + local new = subrun(post) + if new then start.post = new end + end + local replace = start.replace + if replace then + local new = subrun(replace) + if new then start.replace = new end + end +elseif typ == "gpos_single" or typ == "gpos_pair" then + kerndisc(start) + end + start = start.next + elseif id == whatsit_code then + local subtype = start.subtype + if subtype == dir_code then + local dir = start.dir + if dir == "+TRT" or dir == "+TLT" then + topstack = topstack + 1 + dirstack[topstack] = dir + elseif dir == "-TRT" or dir == "-TLT" then + topstack = topstack - 1 + end + local newdir = dirstack[topstack] + if newdir == "+TRT" then + rlmode = -1 + elseif newdir == "+TLT" then + rlmode = 1 + else + rlmode = rlparmode + end + if trace_directions then + report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir) + end + elseif subtype == localpar_code then + local dir = start.dir + if dir == "TRT" then + rlparmode = -1 + elseif dir == "TLT" then + rlparmode = 1 + else + rlparmode = 0 + end + rlmode = rlparmode + if trace_directions then + report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode) + end + end + start = start.next + elseif id == math_code then + start = end_of_math(start).next + else + start = start.next + end + end + end + end + if success then + done = true + end + if trace_steps then -- ? + registerstep(head) + end + end + return head, done +end + +local function generic(lookupdata,lookupname,unicode,lookuphash) + local target = lookuphash[lookupname] + if target then + target[unicode] = lookupdata + else + lookuphash[lookupname] = { [unicode] = lookupdata } + end +end + +local action = { + + substitution = generic, + multiple = generic, + alternate = generic, + position = generic, + + ligature = function(lookupdata,lookupname,unicode,lookuphash) + local target = lookuphash[lookupname] + if not target then + target = { } + lookuphash[lookupname] = target + end + for i=1,#lookupdata do + local li = lookupdata[i] + local tu = target[li] + if not tu then + tu = { } + target[li] = tu + end + target = tu + end + target.ligature = unicode + end, + + pair = function(lookupdata,lookupname,unicode,lookuphash) + local target = lookuphash[lookupname] + if not target then + target = { } + lookuphash[lookupname] = target + end + local others = target[unicode] + local paired = lookupdata[1] + if others then + others[paired] = lookupdata + else + others = { [paired] = lookupdata } + target[unicode] = others + end + end, + +} + +local function prepare_lookups(tfmdata) + + local rawdata = tfmdata.shared.rawdata + local resources = rawdata.resources + local lookuphash = resources.lookuphash + local anchor_to_lookup = resources.anchor_to_lookup + local lookup_to_anchor = resources.lookup_to_anchor + local lookuptypes = resources.lookuptypes + local characters = tfmdata.characters + local descriptions = tfmdata.descriptions + + -- we cannot free the entries in the descriptions as sometimes we access + -- then directly (for instance anchors) ... selectively freeing does save + -- much memory as it's only a reference to a table and the slot in the + -- description hash is not freed anyway + + for unicode, character in next, characters do -- we cannot loop over descriptions ! + + local description = descriptions[unicode] + + if description then + + local lookups = description.slookups + if lookups then + for lookupname, lookupdata in next, lookups do + action[lookuptypes[lookupname]](lookupdata,lookupname,unicode,lookuphash) + end + end + + local lookups = description.mlookups + if lookups then + for lookupname, lookuplist in next, lookups do + local lookuptype = lookuptypes[lookupname] + for l=1,#lookuplist do + local lookupdata = lookuplist[l] + action[lookuptype](lookupdata,lookupname,unicode,lookuphash) + end + end + end + + local list = description.kerns + if list then + for lookup, krn in next, list do -- ref to glyph, saves lookup + local target = lookuphash[lookup] + if target then + target[unicode] = krn + else + lookuphash[lookup] = { [unicode] = krn } + end + end + end + + local list = description.anchors + if list then + for typ, anchors in next, list do -- types + if typ == "mark" or typ == "cexit" then -- or entry? + for name, anchor in next, anchors do + local lookups = anchor_to_lookup[name] + if lookups then + for lookup, _ in next, lookups do + local target = lookuphash[lookup] + if target then + target[unicode] = anchors + else + lookuphash[lookup] = { [unicode] = anchors } + end + end + end + end + end + end + end + + end + + end + +end + +local function split(replacement,original) + local result = { } + for i=1,#replacement do + result[original[i]] = replacement[i] + end + return result +end + +local valid = { + coverage = { chainsub = true, chainpos = true, contextsub = true }, + reversecoverage = { reversesub = true }, + glyphs = { chainsub = true, chainpos = true }, +} + +local function prepare_contextchains(tfmdata) + local rawdata = tfmdata.shared.rawdata + local resources = rawdata.resources + local lookuphash = resources.lookuphash + local lookups = rawdata.lookups + if lookups then + for lookupname, lookupdata in next, rawdata.lookups do + local lookuptype = lookupdata.type + if lookuptype then + local rules = lookupdata.rules + if rules then + local format = lookupdata.format + local validformat = valid[format] + if not validformat then + report_prepare("unsupported format %a",format) + elseif not validformat[lookuptype] then + -- todo: dejavu-serif has one (but i need to see what use it has) + report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookupname) + else + local contexts = lookuphash[lookupname] + if not contexts then + contexts = { } + lookuphash[lookupname] = contexts + end + local t, nt = { }, 0 + for nofrules=1,#rules do + local rule = rules[nofrules] + local current = rule.current + local before = rule.before + local after = rule.after + local replacements = rule.replacements + local sequence = { } + local nofsequences = 0 + -- Eventually we can store start, stop and sequence in the cached file + -- but then less sharing takes place so best not do that without a lot + -- of profiling so let's forget about it. + if before then + for n=1,#before do + nofsequences = nofsequences + 1 + sequence[nofsequences] = before[n] + end + end + local start = nofsequences + 1 + for n=1,#current do + nofsequences = nofsequences + 1 + sequence[nofsequences] = current[n] + end + local stop = nofsequences + if after then + for n=1,#after do + nofsequences = nofsequences + 1 + sequence[nofsequences] = after[n] + end + end + if sequence[1] then + -- Replacements only happen with reverse lookups as they are single only. We + -- could pack them into current (replacement value instead of true) and then + -- use sequence[start] instead but it's somewhat ugly. + nt = nt + 1 + t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups, replacements } + for unic, _ in next, sequence[start] do + local cu = contexts[unic] + if not cu then + contexts[unic] = t + end + end + end + end + end + else + -- no rules + end + else + report_prepare("missing lookuptype for lookupname %a",lookupname) + end + end + end +end + +-- we can consider lookuphash == false (initialized but empty) vs lookuphash == table + +local function featuresinitializer(tfmdata,value) + if true then -- value then + -- beware we need to use the topmost properties table + local rawdata = tfmdata.shared.rawdata + local properties = rawdata.properties + if not properties.initialized then + local starttime = trace_preparing and os.clock() + local resources = rawdata.resources + resources.lookuphash = resources.lookuphash or { } + prepare_contextchains(tfmdata) + prepare_lookups(tfmdata) + properties.initialized = true + if trace_preparing then + report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,tfmdata.properties.fullname) + end + end + end +end + +registerotffeature { + name = "features", + description = "features", + default = true, + initializers = { + position = 1, + node = featuresinitializer, + }, + processors = { + node = featuresprocessor, + } +} + +-- This can be used for extra handlers, but should be used with care! + +otf.handlers = handlers diff --git a/src/fontloader/fontloader-fonts-tfm.lua b/src/fontloader/fontloader-fonts-tfm.lua new file mode 100644 index 0000000..b9bb1bd --- /dev/null +++ b/src/fontloader/fontloader-fonts-tfm.lua @@ -0,0 +1,38 @@ +if not modules then modules = { } end modules ['luatex-fonts-tfm'] = { + version = 1.001, + comment = "companion to luatex-*.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end + +local fonts = fonts +local tfm = { } +fonts.handlers.tfm = tfm +fonts.formats.tfm = "type1" -- we need to have at least a value here + +function fonts.readers.tfm(specification) + local fullname = specification.filename or "" + if fullname == "" then + local forced = specification.forced or "" + if forced ~= "" then + fullname = specification.name .. "." .. forced + else + fullname = specification.name + end + end + local foundname = resolvers.findbinfile(fullname, 'tfm') or "" + if foundname == "" then + foundname = resolvers.findbinfile(fullname, 'ofm') or "" + end + if foundname ~= "" then + specification.filename = foundname + specification.format = "ofm" + return font.read_tfm(specification.filename,specification.size) + end +end diff --git a/src/fontloader/luaotfload-basics-gen.lua b/src/fontloader/luaotfload-basics-gen.lua deleted file mode 100644 index c19a49a..0000000 --- a/src/fontloader/luaotfload-basics-gen.lua +++ /dev/null @@ -1,368 +0,0 @@ -if not modules then modules = { } end modules ['luat-basics-gen'] = { - version = 1.100, - comment = "companion to luatex-*.tex", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end - -local dummyfunction = function() -end - -local dummyreporter = function(c) - return function(...) - (texio.reporter or texio.write_nl)(c .. " : " .. string.formatters(...)) - end -end - -statistics = { - register = dummyfunction, - starttiming = dummyfunction, - stoptiming = dummyfunction, - elapsedtime = nil, -} - -directives = { - register = dummyfunction, - enable = dummyfunction, - disable = dummyfunction, -} - -trackers = { - register = dummyfunction, - enable = dummyfunction, - disable = dummyfunction, -} - -experiments = { - register = dummyfunction, - enable = dummyfunction, - disable = dummyfunction, -} - -storage = { -- probably no longer needed - register = dummyfunction, - shared = { }, -} - -logs = { - new = dummyreporter, - reporter = dummyreporter, - messenger = dummyreporter, - report = dummyfunction, -} - -callbacks = { - register = function(n,f) return callback.register(n,f) end, - -} - -utilities = { - storage = { - allocate = function(t) return t or { } end, - mark = function(t) return t or { } end, - }, -} - -characters = characters or { - data = { } -} - --- we need to cheat a bit here - -texconfig.kpse_init = true - -resolvers = resolvers or { } -- no fancy file helpers used - -local remapper = { - otf = "opentype fonts", - ttf = "truetype fonts", - ttc = "truetype fonts", - dfont = "truetype fonts", -- "truetype dictionary", - cid = "cid maps", - cidmap = "cid maps", - fea = "font feature files", - pfa = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this! - pfb = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this! - afm = "afm", -} - -function resolvers.findfile(name,fileformat) - name = string.gsub(name,"\\","/") - if not fileformat or fileformat == "" then - fileformat = file.suffix(name) - if fileformat == "" then - fileformat = "tex" - end - end - fileformat = string.lower(fileformat) - fileformat = remapper[fileformat] or fileformat - local found = kpse.find_file(name,fileformat) - if not found or found == "" then - found = kpse.find_file(name,"other text files") - end - return found -end - --- function resolvers.findbinfile(name,fileformat) --- if not fileformat or fileformat == "" then --- fileformat = file.suffix(name) --- end --- return resolvers.findfile(name,(fileformat and remapper[fileformat]) or fileformat) --- end - -resolvers.findbinfile = resolvers.findfile - -function resolvers.loadbinfile(filename,filetype) - local data = io.loaddata(filename) - return true, data, #data -end - -function resolvers.resolve(s) - return s -end - -function resolvers.unresolve(s) - return s -end - --- Caches ... I will make a real stupid version some day when I'm in the --- mood. After all, the generic code does not need the more advanced --- ConTeXt features. Cached data is not shared between ConTeXt and other --- usage as I don't want any dependency at all. Also, ConTeXt might have --- different needs and tricks added. - ---~ containers.usecache = true - -caches = { } - -local writable = nil -local readables = { } -local usingjit = jit - -if not caches.namespace or caches.namespace == "" or caches.namespace == "context" then - caches.namespace = 'generic' -end - -do - - -- standard context tree setup - - local cachepaths = kpse.expand_var('$TEXMFCACHE') or "" - - -- quite like tex live or so (the weird $TEXMFCACHE test seems to be needed on miktex) - - if cachepaths == "" or cachepaths == "$TEXMFCACHE" then - cachepaths = kpse.expand_var('$TEXMFVAR') or "" - end - - -- this also happened to be used (the weird $TEXMFVAR test seems to be needed on miktex) - - if cachepaths == "" or cachepaths == "$TEXMFVAR" then - cachepaths = kpse.expand_var('$VARTEXMF') or "" - end - - -- and this is a last resort (hm, we could use TEMP or TEMPDIR) - - if cachepaths == "" then - local fallbacks = { "TMPDIR", "TEMPDIR", "TMP", "TEMP", "HOME", "HOMEPATH" } - for i=1,#fallbacks do - cachepaths = os.getenv(fallbacks[i]) or "" - if cachepath ~= "" and lfs.isdir(cachepath) then - break - end - end - end - - if cachepaths == "" then - cachepaths = "." - end - - cachepaths = string.split(cachepaths,os.type == "windows" and ";" or ":") - - for i=1,#cachepaths do - local cachepath = cachepaths[i] - if not lfs.isdir(cachepath) then - lfs.mkdirs(cachepath) -- needed for texlive and latex - if lfs.isdir(cachepath) then - texio.write(string.format("(created cache path: %s)",cachepath)) - end - end - if file.is_writable(cachepath) then - writable = file.join(cachepath,"luatex-cache") - lfs.mkdir(writable) - writable = file.join(writable,caches.namespace) - lfs.mkdir(writable) - break - end - end - - for i=1,#cachepaths do - if file.is_readable(cachepaths[i]) then - readables[#readables+1] = file.join(cachepaths[i],"luatex-cache",caches.namespace) - end - end - - if not writable then - texio.write_nl("quiting: fix your writable cache path") - os.exit() - elseif #readables == 0 then - texio.write_nl("quiting: fix your readable cache path") - os.exit() - elseif #readables == 1 and readables[1] == writable then - texio.write(string.format("(using cache: %s)",writable)) - else - texio.write(string.format("(using write cache: %s)",writable)) - texio.write(string.format("(using read cache: %s)",table.concat(readables, " "))) - end - -end - -function caches.getwritablepath(category,subcategory) - local path = file.join(writable,category) - lfs.mkdir(path) - path = file.join(path,subcategory) - lfs.mkdir(path) - return path -end - -function caches.getreadablepaths(category,subcategory) - local t = { } - for i=1,#readables do - t[i] = file.join(readables[i],category,subcategory) - end - return t -end - -local function makefullname(path,name) - if path and path ~= "" then - return file.addsuffix(file.join(path,name),"lua"), file.addsuffix(file.join(path,name),usingjit and "lub" or "luc") - end -end - -function caches.is_writable(path,name) - local fullname = makefullname(path,name) - return fullname and file.is_writable(fullname) -end - -function caches.loaddata(paths,name) - for i=1,#paths do - local data = false - local luaname, lucname = makefullname(paths[i],name) - if lucname and not lfs.isfile(lucname) and type(caches.compile) == "function" then - -- in case we used luatex and luajittex mixed ... lub or luc file - texio.write(string.format("(compiling luc: %s)",lucname)) - data = loadfile(luaname) - if data then - data = data() - end - if data then - caches.compile(data,luaname,lucname) - return data - end - end - if lucname and lfs.isfile(lucname) then -- maybe also check for size - texio.write(string.format("(load luc: %s)",lucname)) - data = loadfile(lucname) - if data then - data = data() - end - if data then - return data - else - texio.write(string.format("(loading failed: %s)",lucname)) - end - end - if luaname and lfs.isfile(luaname) then - texio.write(string.format("(load lua: %s)",luaname)) - data = loadfile(luaname) - if data then - data = data() - end - if data then - return data - end - end - end -end - -function caches.savedata(path,name,data) - local luaname, lucname = makefullname(path,name) - if luaname then - texio.write(string.format("(save: %s)",luaname)) - table.tofile(luaname,data,true) - if lucname and type(caches.compile) == "function" then - os.remove(lucname) -- better be safe - texio.write(string.format("(save: %s)",lucname)) - caches.compile(data,luaname,lucname) - end - end -end - --- According to KH os.execute is not permitted in plain/latex so there is --- no reason to use the normal context way. So the method here is slightly --- different from the one we have in context. We also use different suffixes --- as we don't want any clashes (sharing cache files is not that handy as --- context moves on faster.) --- --- Beware: serialization might fail on large files (so maybe we should pcall --- this) in which case one should limit the method to luac and enable support --- for execution. - --- function caches.compile(data,luaname,lucname) --- local d = io.loaddata(luaname) --- if not d or d == "" then --- d = table.serialize(data,true) -- slow --- end --- if d and d ~= "" then --- local f = io.open(lucname,'w') --- if f then --- local s = loadstring(d) --- if s then --- f:write(string.dump(s,true)) --- end --- f:close() --- end --- end --- end - -function caches.compile(data,luaname,lucname) - local d = io.loaddata(luaname) - if not d or d == "" then - d = table.serialize(data,true) -- slow - end - if d and d ~= "" then - local f = io.open(lucname,'wb') - if f then - local s = loadstring(d) - if s then - f:write(string.dump(s,true)) - end - f:close() - end - end -end - --- - -function table.setmetatableindex(t,f) - setmetatable(t,{ __index = f }) -end - --- helper for plain: - -arguments = { } - -if arg then - for i=1,#arg do - local k, v = string.match(arg[i],"^%-%-([^=]+)=?(.-)$") - if k and v then - arguments[k] = v - end - end -end diff --git a/src/fontloader/luaotfload-basics-nod.lua b/src/fontloader/luaotfload-basics-nod.lua deleted file mode 100644 index 373dab5..0000000 --- a/src/fontloader/luaotfload-basics-nod.lua +++ /dev/null @@ -1,178 +0,0 @@ -if not modules then modules = { } end modules ['luatex-fonts-nod'] = { - version = 1.001, - comment = "companion to luatex-fonts.lua", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end - --- Don't depend on code here as it is only needed to complement the --- font handler code. - --- Attributes: - -if tex.attribute[0] ~= 0 then - - texio.write_nl("log","!") - texio.write_nl("log","! Attribute 0 is reserved for ConTeXt's font feature management and has to be") - texio.write_nl("log","! set to zero. Also, some attributes in the range 1-255 are used for special") - texio.write_nl("log","! purposes so setting them at the TeX end might break the font handler.") - texio.write_nl("log","!") - - tex.attribute[0] = 0 -- else no features - -end - -attributes = attributes or { } -attributes.unsetvalue = -0x7FFFFFFF - -local numbers, last = { }, 127 - -attributes.private = attributes.private or function(name) - local number = numbers[name] - if not number then - if last < 255 then - last = last + 1 - end - number = last - numbers[name] = number - end - return number -end - --- Nodes: - -nodes = { } -nodes.pool = { } -nodes.handlers = { } - -local nodecodes = { } for k,v in next, node.types () do nodecodes[string.gsub(v,"_","")] = k end -local whatcodes = { } for k,v in next, node.whatsits() do whatcodes[string.gsub(v,"_","")] = k end -local glyphcodes = { [0] = "character", "glyph", "ligature", "ghost", "left", "right" } -local disccodes = { [0] = "discretionary", "explicit", "automatic", "regular", "first", "second" } - -nodes.nodecodes = nodecodes -nodes.whatcodes = whatcodes -nodes.whatsitcodes = whatcodes -nodes.glyphcodes = glyphcodes -nodes.disccodes = disccodes - -local free_node = node.free -local remove_node = node.remove -local new_node = node.new -local traverse_id = node.traverse_id - -nodes.handlers.protectglyphs = node.protect_glyphs -nodes.handlers.unprotectglyphs = node.unprotect_glyphs - -local math_code = nodecodes.math -local end_of_math = node.end_of_math - -function node.end_of_math(n) - if n.id == math_code and n.subtype == 1 then - return n - else - return end_of_math(n) - end -end - -function nodes.remove(head, current, free_too) - local t = current - head, current = remove_node(head,current) - if t then - if free_too then - free_node(t) - t = nil - else - t.next, t.prev = nil, nil - end - end - return head, current, t -end - -function nodes.delete(head,current) - return nodes.remove(head,current,true) -end - -function nodes.pool.kern(k) - local n = new_node("kern",1) - n.kern = k - return n -end - --- experimental - -local getfield = node.getfield or function(n,tag) return n[tag] end -local setfield = node.setfield or function(n,tag,value) n[tag] = value end - -nodes.getfield = getfield -nodes.setfield = setfield - -nodes.getattr = getfield -nodes.setattr = setfield - -if node.getid then nodes.getid = node.getid else function nodes.getid (n) return getfield(n,"id") end end -if node.getsubtype then nodes.getsubtype = node.getsubtype else function nodes.getsubtype(n) return getfield(n,"subtype") end end -if node.getnext then nodes.getnext = node.getnext else function nodes.getnext (n) return getfield(n,"next") end end -if node.getprev then nodes.getprev = node.getprev else function nodes.getprev (n) return getfield(n,"prev") end end -if node.getchar then nodes.getchar = node.getchar else function nodes.getchar (n) return getfield(n,"char") end end -if node.getfont then nodes.getfont = node.getfont else function nodes.getfont (n) return getfield(n,"font") end end -if node.getlist then nodes.getlist = node.getlist else function nodes.getlist (n) return getfield(n,"list") end end - -function nodes.tonut (n) return n end -function nodes.tonode(n) return n end - --- being lazy ... just copy a bunch ... not all needed in generic but we assume --- nodes to be kind of private anyway - -nodes.tostring = node.tostring or tostring -nodes.copy = node.copy -nodes.copy_list = node.copy_list -nodes.delete = node.delete -nodes.dimensions = node.dimensions -nodes.end_of_math = node.end_of_math -nodes.flush_list = node.flush_list -nodes.flush_node = node.flush_node -nodes.free = node.free -nodes.insert_after = node.insert_after -nodes.insert_before = node.insert_before -nodes.hpack = node.hpack -nodes.new = node.new -nodes.tail = node.tail -nodes.traverse = node.traverse -nodes.traverse_id = node.traverse_id -nodes.slide = node.slide -nodes.vpack = node.vpack - -nodes.first_glyph = node.first_glyph -nodes.first_character = node.first_character -nodes.has_glyph = node.has_glyph or node.first_glyph - -nodes.current_attr = node.current_attr -nodes.do_ligature_n = node.do_ligature_n -nodes.has_field = node.has_field -nodes.last_node = node.last_node -nodes.usedlist = node.usedlist -nodes.protrusion_skippable = node.protrusion_skippable -nodes.write = node.write - -nodes.has_attribute = node.has_attribute -nodes.set_attribute = node.set_attribute -nodes.unset_attribute = node.unset_attribute - -nodes.protect_glyphs = node.protect_glyphs -nodes.unprotect_glyphs = node.unprotect_glyphs -nodes.kerning = node.kerning -nodes.ligaturing = node.ligaturing -nodes.mlist_to_hlist = node.mlist_to_hlist - --- in generic code, at least for some time, we stay nodes, while in context --- we can go nuts (e.g. experimental); this split permits us us keep code --- used elsewhere stable but at the same time play around in context - -nodes.nuts = nodes diff --git a/src/fontloader/luaotfload-fontloader.lua b/src/fontloader/luaotfload-fontloader.lua deleted file mode 100644 index e9c6638..0000000 --- a/src/fontloader/luaotfload-fontloader.lua +++ /dev/null @@ -1,14628 +0,0 @@ --- merged file : luatex-fonts-merged.lua --- parent file : luatex-fonts.lua --- merge date : 12/06/14 14:20:08 - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['l-lua']={ - version=1.001, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local major,minor=string.match(_VERSION,"^[^%d]+(%d+)%.(%d+).*$") -_MAJORVERSION=tonumber(major) or 5 -_MINORVERSION=tonumber(minor) or 1 -_LUAVERSION=_MAJORVERSION+_MINORVERSION/10 -if not lpeg then - lpeg=require("lpeg") -end -if loadstring then - local loadnormal=load - function load(first,...) - if type(first)=="string" then - return loadstring(first,...) - else - return loadnormal(first,...) - end - end -else - loadstring=load -end -if not ipairs then - local function iterate(a,i) - i=i+1 - local v=a[i] - if v~=nil then - return i,v - end - end - function ipairs(a) - return iterate,a,0 - end -end -if not pairs then - function pairs(t) - return next,t - end -end -if not table.unpack then - table.unpack=_G.unpack -elseif not unpack then - _G.unpack=table.unpack -end -if not package.loaders then - package.loaders=package.searchers -end -local print,select,tostring=print,select,tostring -local inspectors={} -function setinspector(inspector) - inspectors[#inspectors+1]=inspector -end -function inspect(...) - for s=1,select("#",...) do - local value=select(s,...) - local done=false - for i=1,#inspectors do - done=inspectors[i](value) - if done then - break - end - end - if not done then - print(tostring(value)) - end - end -end -local dummy=function() end -function optionalrequire(...) - local ok,result=xpcall(require,dummy,...) - if ok then - return result - end -end -if lua then - lua.mask=load([[τεχ = 1]]) and "utf" or "ascii" -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['l-lpeg']={ - version=1.001, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -lpeg=require("lpeg") -if not lpeg.print then function lpeg.print(...) print(lpeg.pcode(...)) end end -local type,next,tostring=type,next,tostring -local byte,char,gmatch,format=string.byte,string.char,string.gmatch,string.format -local floor=math.floor -local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt -local lpegtype,lpegmatch,lpegprint=lpeg.type,lpeg.match,lpeg.print -if setinspector then - setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end) -end -lpeg.patterns=lpeg.patterns or {} -local patterns=lpeg.patterns -local anything=P(1) -local endofstring=P(-1) -local alwaysmatched=P(true) -patterns.anything=anything -patterns.endofstring=endofstring -patterns.beginofstring=alwaysmatched -patterns.alwaysmatched=alwaysmatched -local sign=S('+-') -local zero=P('0') -local digit=R('09') -local octdigit=R("07") -local lowercase=R("az") -local uppercase=R("AZ") -local underscore=P("_") -local hexdigit=digit+lowercase+uppercase -local cr,lf,crlf=P("\r"),P("\n"),P("\r\n") -local newline=P("\r")*(P("\n")+P(true))+P("\n") -local escaped=P("\\")*anything -local squote=P("'") -local dquote=P('"') -local space=P(" ") -local period=P(".") -local comma=P(",") -local utfbom_32_be=P('\000\000\254\255') -local utfbom_32_le=P('\255\254\000\000') -local utfbom_16_be=P('\254\255') -local utfbom_16_le=P('\255\254') -local utfbom_8=P('\239\187\191') -local utfbom=utfbom_32_be+utfbom_32_le+utfbom_16_be+utfbom_16_le+utfbom_8 -local utftype=utfbom_32_be*Cc("utf-32-be")+utfbom_32_le*Cc("utf-32-le")+utfbom_16_be*Cc("utf-16-be")+utfbom_16_le*Cc("utf-16-le")+utfbom_8*Cc("utf-8")+alwaysmatched*Cc("utf-8") -local utfstricttype=utfbom_32_be*Cc("utf-32-be")+utfbom_32_le*Cc("utf-32-le")+utfbom_16_be*Cc("utf-16-be")+utfbom_16_le*Cc("utf-16-le")+utfbom_8*Cc("utf-8") -local utfoffset=utfbom_32_be*Cc(4)+utfbom_32_le*Cc(4)+utfbom_16_be*Cc(2)+utfbom_16_le*Cc(2)+utfbom_8*Cc(3)+Cc(0) -local utf8next=R("\128\191") -patterns.utfbom_32_be=utfbom_32_be -patterns.utfbom_32_le=utfbom_32_le -patterns.utfbom_16_be=utfbom_16_be -patterns.utfbom_16_le=utfbom_16_le -patterns.utfbom_8=utfbom_8 -patterns.utf_16_be_nl=P("\000\r\000\n")+P("\000\r")+P("\000\n") -patterns.utf_16_le_nl=P("\r\000\n\000")+P("\r\000")+P("\n\000") -patterns.utf_32_be_nl=P("\000\000\000\r\000\000\000\n")+P("\000\000\000\r")+P("\000\000\000\n") -patterns.utf_32_le_nl=P("\r\000\000\000\n\000\000\000")+P("\r\000\000\000")+P("\n\000\000\000") -patterns.utf8one=R("\000\127") -patterns.utf8two=R("\194\223")*utf8next -patterns.utf8three=R("\224\239")*utf8next*utf8next -patterns.utf8four=R("\240\244")*utf8next*utf8next*utf8next -patterns.utfbom=utfbom -patterns.utftype=utftype -patterns.utfstricttype=utfstricttype -patterns.utfoffset=utfoffset -local utf8char=patterns.utf8one+patterns.utf8two+patterns.utf8three+patterns.utf8four -local validutf8char=utf8char^0*endofstring*Cc(true)+Cc(false) -local utf8character=P(1)*R("\128\191")^0 -patterns.utf8=utf8char -patterns.utf8char=utf8char -patterns.utf8character=utf8character -patterns.validutf8=validutf8char -patterns.validutf8char=validutf8char -local eol=S("\n\r") -local spacer=S(" \t\f\v") -local whitespace=eol+spacer -local nonspacer=1-spacer -local nonwhitespace=1-whitespace -patterns.eol=eol -patterns.spacer=spacer -patterns.whitespace=whitespace -patterns.nonspacer=nonspacer -patterns.nonwhitespace=nonwhitespace -local stripper=spacer^0*C((spacer^0*nonspacer^1)^0) -local fullstripper=whitespace^0*C((whitespace^0*nonwhitespace^1)^0) -local collapser=Cs(spacer^0/""*nonspacer^0*((spacer^0/" "*nonspacer^1)^0)) -local b_collapser=Cs(whitespace^0/""*(nonwhitespace^1+whitespace^1/" ")^0) -local e_collapser=Cs((whitespace^1*P(-1)/""+nonwhitespace^1+whitespace^1/" ")^0) -local m_collapser=Cs((nonwhitespace^1+whitespace^1/" ")^0) -local b_stripper=Cs(spacer^0/""*(nonspacer^1+spacer^1/" ")^0) -local e_stripper=Cs((spacer^1*P(-1)/""+nonspacer^1+spacer^1/" ")^0) -local m_stripper=Cs((nonspacer^1+spacer^1/" ")^0) -patterns.stripper=stripper -patterns.fullstripper=fullstripper -patterns.collapser=collapser -patterns.b_collapser=b_collapser -patterns.m_collapser=m_collapser -patterns.e_collapser=e_collapser -patterns.b_stripper=b_stripper -patterns.m_stripper=m_stripper -patterns.e_stripper=e_stripper -patterns.lowercase=lowercase -patterns.uppercase=uppercase -patterns.letter=patterns.lowercase+patterns.uppercase -patterns.space=space -patterns.tab=P("\t") -patterns.spaceortab=patterns.space+patterns.tab -patterns.newline=newline -patterns.emptyline=newline^1 -patterns.equal=P("=") -patterns.comma=comma -patterns.commaspacer=comma*spacer^0 -patterns.period=period -patterns.colon=P(":") -patterns.semicolon=P(";") -patterns.underscore=underscore -patterns.escaped=escaped -patterns.squote=squote -patterns.dquote=dquote -patterns.nosquote=(escaped+(1-squote))^0 -patterns.nodquote=(escaped+(1-dquote))^0 -patterns.unsingle=(squote/"")*patterns.nosquote*(squote/"") -patterns.undouble=(dquote/"")*patterns.nodquote*(dquote/"") -patterns.unquoted=patterns.undouble+patterns.unsingle -patterns.unspacer=((patterns.spacer^1)/"")^0 -patterns.singlequoted=squote*patterns.nosquote*squote -patterns.doublequoted=dquote*patterns.nodquote*dquote -patterns.quoted=patterns.doublequoted+patterns.singlequoted -patterns.digit=digit -patterns.octdigit=octdigit -patterns.hexdigit=hexdigit -patterns.sign=sign -patterns.cardinal=digit^1 -patterns.integer=sign^-1*digit^1 -patterns.unsigned=digit^0*period*digit^1 -patterns.float=sign^-1*patterns.unsigned -patterns.cunsigned=digit^0*comma*digit^1 -patterns.cpunsigned=digit^0*(period+comma)*digit^1 -patterns.cfloat=sign^-1*patterns.cunsigned -patterns.cpfloat=sign^-1*patterns.cpunsigned -patterns.number=patterns.float+patterns.integer -patterns.cnumber=patterns.cfloat+patterns.integer -patterns.cpnumber=patterns.cpfloat+patterns.integer -patterns.oct=zero*octdigit^1 -patterns.octal=patterns.oct -patterns.HEX=zero*P("X")*(digit+uppercase)^1 -patterns.hex=zero*P("x")*(digit+lowercase)^1 -patterns.hexadecimal=zero*S("xX")*hexdigit^1 -patterns.hexafloat=sign^-1*zero*S("xX")*(hexdigit^0*period*hexdigit^1+hexdigit^1*period*hexdigit^0+hexdigit^1)*(S("pP")*sign^-1*hexdigit^1)^-1 -patterns.decafloat=sign^-1*(digit^0*period*digit^1+digit^1*period*digit^0+digit^1)*S("eE")*sign^-1*digit^1 -patterns.propername=(uppercase+lowercase+underscore)*(uppercase+lowercase+underscore+digit)^0*endofstring -patterns.somecontent=(anything-newline-space)^1 -patterns.beginline=#(1-newline) -patterns.longtostring=Cs(whitespace^0/""*((patterns.quoted+nonwhitespace^1+whitespace^1/""*(P(-1)+Cc(" ")))^0)) -local function anywhere(pattern) - return P { P(pattern)+1*V(1) } -end -lpeg.anywhere=anywhere -function lpeg.instringchecker(p) - p=anywhere(p) - return function(str) - return lpegmatch(p,str) and true or false - end -end -function lpeg.splitter(pattern,action) - return (((1-P(pattern))^1)/action+1)^0 -end -function lpeg.tsplitter(pattern,action) - return Ct((((1-P(pattern))^1)/action+1)^0) -end -local splitters_s,splitters_m,splitters_t={},{},{} -local function splitat(separator,single) - local splitter=(single and splitters_s[separator]) or splitters_m[separator] - if not splitter then - separator=P(separator) - local other=C((1-separator)^0) - if single then - local any=anything - splitter=other*(separator*C(any^0)+"") - splitters_s[separator]=splitter - else - splitter=other*(separator*other)^0 - splitters_m[separator]=splitter - end - end - return splitter -end -local function tsplitat(separator) - local splitter=splitters_t[separator] - if not splitter then - splitter=Ct(splitat(separator)) - splitters_t[separator]=splitter - end - return splitter -end -lpeg.splitat=splitat -lpeg.tsplitat=tsplitat -function string.splitup(str,separator) - if not separator then - separator="," - end - return lpegmatch(splitters_m[separator] or splitat(separator),str) -end -local cache={} -function lpeg.split(separator,str) - local c=cache[separator] - if not c then - c=tsplitat(separator) - cache[separator]=c - end - return lpegmatch(c,str) -end -function string.split(str,separator) - if separator then - local c=cache[separator] - if not c then - c=tsplitat(separator) - cache[separator]=c - end - return lpegmatch(c,str) - else - return { str } - end -end -local spacing=patterns.spacer^0*newline -local empty=spacing*Cc("") -local nonempty=Cs((1-spacing)^1)*spacing^-1 -local content=(empty+nonempty)^1 -patterns.textline=content -local linesplitter=tsplitat(newline) -patterns.linesplitter=linesplitter -function string.splitlines(str) - return lpegmatch(linesplitter,str) -end -local cache={} -function lpeg.checkedsplit(separator,str) - local c=cache[separator] - if not c then - separator=P(separator) - local other=C((1-separator)^1) - c=Ct(separator^0*other*(separator^1*other)^0) - cache[separator]=c - end - return lpegmatch(c,str) -end -function string.checkedsplit(str,separator) - local c=cache[separator] - if not c then - separator=P(separator) - local other=C((1-separator)^1) - c=Ct(separator^0*other*(separator^1*other)^0) - cache[separator]=c - end - return lpegmatch(c,str) -end -local function f2(s) local c1,c2=byte(s,1,2) return c1*64+c2-12416 end -local function f3(s) local c1,c2,c3=byte(s,1,3) return (c1*64+c2)*64+c3-925824 end -local function f4(s) local c1,c2,c3,c4=byte(s,1,4) return ((c1*64+c2)*64+c3)*64+c4-63447168 end -local utf8byte=patterns.utf8one/byte+patterns.utf8two/f2+patterns.utf8three/f3+patterns.utf8four/f4 -patterns.utf8byte=utf8byte -local cache={} -function lpeg.stripper(str) - if type(str)=="string" then - local s=cache[str] - if not s then - s=Cs(((S(str)^1)/""+1)^0) - cache[str]=s - end - return s - else - return Cs(((str^1)/""+1)^0) - end -end -local cache={} -function lpeg.keeper(str) - if type(str)=="string" then - local s=cache[str] - if not s then - s=Cs((((1-S(str))^1)/""+1)^0) - cache[str]=s - end - return s - else - return Cs((((1-str)^1)/""+1)^0) - end -end -function lpeg.frontstripper(str) - return (P(str)+P(true))*Cs(anything^0) -end -function lpeg.endstripper(str) - return Cs((1-P(str)*endofstring)^0) -end -function lpeg.replacer(one,two,makefunction,isutf) - local pattern - local u=isutf and utf8char or 1 - if type(one)=="table" then - local no=#one - local p=P(false) - if no==0 then - for k,v in next,one do - p=p+P(k)/v - end - pattern=Cs((p+u)^0) - elseif no==1 then - local o=one[1] - one,two=P(o[1]),o[2] - pattern=Cs((one/two+u)^0) - else - for i=1,no do - local o=one[i] - p=p+P(o[1])/o[2] - end - pattern=Cs((p+u)^0) - end - else - pattern=Cs((P(one)/(two or "")+u)^0) - end - if makefunction then - return function(str) - return lpegmatch(pattern,str) - end - else - return pattern - end -end -function lpeg.finder(lst,makefunction,isutf) - local pattern - if type(lst)=="table" then - pattern=P(false) - if #lst==0 then - for k,v in next,lst do - pattern=pattern+P(k) - end - else - for i=1,#lst do - pattern=pattern+P(lst[i]) - end - end - else - pattern=P(lst) - end - if isutf then - pattern=((utf8char or 1)-pattern)^0*pattern - else - pattern=(1-pattern)^0*pattern - end - if makefunction then - return function(str) - return lpegmatch(pattern,str) - end - else - return pattern - end -end -local splitters_f,splitters_s={},{} -function lpeg.firstofsplit(separator) - local splitter=splitters_f[separator] - if not splitter then - local pattern=P(separator) - splitter=C((1-pattern)^0) - splitters_f[separator]=splitter - end - return splitter -end -function lpeg.secondofsplit(separator) - local splitter=splitters_s[separator] - if not splitter then - local pattern=P(separator) - splitter=(1-pattern)^0*pattern*C(anything^0) - splitters_s[separator]=splitter - end - return splitter -end -local splitters_s,splitters_p={},{} -function lpeg.beforesuffix(separator) - local splitter=splitters_s[separator] - if not splitter then - local pattern=P(separator) - splitter=C((1-pattern)^0)*pattern*endofstring - splitters_s[separator]=splitter - end - return splitter -end -function lpeg.afterprefix(separator) - local splitter=splitters_p[separator] - if not splitter then - local pattern=P(separator) - splitter=pattern*C(anything^0) - splitters_p[separator]=splitter - end - return splitter -end -function lpeg.balancer(left,right) - left,right=P(left),P(right) - return P { left*((1-left-right)+V(1))^0*right } -end -local nany=utf8char/"" -function lpeg.counter(pattern) - pattern=Cs((P(pattern)/" "+nany)^0) - return function(str) - return #lpegmatch(pattern,str) - end -end -utf=utf or (unicode and unicode.utf8) or {} -local utfcharacters=utf and utf.characters or string.utfcharacters -local utfgmatch=utf and utf.gmatch -local utfchar=utf and utf.char -lpeg.UP=lpeg.P -if utfcharacters then - function lpeg.US(str) - local p=P(false) - for uc in utfcharacters(str) do - p=p+P(uc) - end - return p - end -elseif utfgmatch then - function lpeg.US(str) - local p=P(false) - for uc in utfgmatch(str,".") do - p=p+P(uc) - end - return p - end -else - function lpeg.US(str) - local p=P(false) - local f=function(uc) - p=p+P(uc) - end - lpegmatch((utf8char/f)^0,str) - return p - end -end -local range=utf8byte*utf8byte+Cc(false) -function lpeg.UR(str,more) - local first,last - if type(str)=="number" then - first=str - last=more or first - else - first,last=lpegmatch(range,str) - if not last then - return P(str) - end - end - if first==last then - return P(str) - elseif utfchar and (last-first<8) then - local p=P(false) - for i=first,last do - p=p+P(utfchar(i)) - end - return p - else - local f=function(b) - return b>=first and b<=last - end - return utf8byte/f - end -end -function lpeg.is_lpeg(p) - return p and lpegtype(p)=="pattern" -end -function lpeg.oneof(list,...) - if type(list)~="table" then - list={ list,... } - end - local p=P(list[1]) - for l=2,#list do - p=p+P(list[l]) - end - return p -end -local sort=table.sort -local function copyindexed(old) - local new={} - for i=1,#old do - new[i]=old - end - return new -end -local function sortedkeys(tab) - local keys,s={},0 - for key,_ in next,tab do - s=s+1 - keys[s]=key - end - sort(keys) - return keys -end -function lpeg.append(list,pp,delayed,checked) - local p=pp - if #list>0 then - local keys=copyindexed(list) - sort(keys) - for i=#keys,1,-1 do - local k=keys[i] - if p then - p=P(k)+p - else - p=P(k) - end - end - elseif delayed then - local keys=sortedkeys(list) - if p then - for i=1,#keys,1 do - local k=keys[i] - local v=list[k] - p=P(k)/list+p - end - else - for i=1,#keys do - local k=keys[i] - local v=list[k] - if p then - p=P(k)+p - else - p=P(k) - end - end - if p then - p=p/list - end - end - elseif checked then - local keys=sortedkeys(list) - for i=1,#keys do - local k=keys[i] - local v=list[k] - if p then - if k==v then - p=P(k)+p - else - p=P(k)/v+p - end - else - if k==v then - p=P(k) - else - p=P(k)/v - end - end - end - else - local keys=sortedkeys(list) - for i=1,#keys do - local k=keys[i] - local v=list[k] - if p then - p=P(k)/v+p - else - p=P(k)/v - end - end - end - return p -end -local function make(t,hash) - local p=P(false) - local keys=sortedkeys(t) - for i=1,#keys do - local k=keys[i] - local v=t[k] - local h=hash[v] - if h then - if next(v) then - p=p+P(k)*(make(v,hash)+P(true)) - else - p=p+P(k)*P(true) - end - else - if next(v) then - p=p+P(k)*make(v,hash) - else - p=p+P(k) - end - end - end - return p -end -function lpeg.utfchartabletopattern(list) - local tree={} - local hash={} - local n=#list - if n==0 then - for s in next,list do - local t=tree - for c in gmatch(s,".") do - local tc=t[c] - if not tc then - tc={} - t[c]=tc - end - t=tc - end - hash[t]=s - end - else - for i=1,n do - local t=tree - local s=list[i] - for c in gmatch(s,".") do - local tc=t[c] - if not tc then - tc={} - t[c]=tc - end - t=tc - end - hash[t]=s - end - end - return make(tree,hash) -end -patterns.containseol=lpeg.finder(eol) -local function nextstep(n,step,result) - local m=n%step - local d=floor(n/step) - if d>0 then - local v=V(tostring(step)) - local s=result.start - for i=1,d do - if s then - s=v*s - else - s=v - end - end - result.start=s - end - if step>1 and result.start then - local v=V(tostring(step/2)) - result[tostring(step)]=v*v - end - if step>0 then - return nextstep(m,step/2,result) - else - return result - end -end -function lpeg.times(pattern,n) - return P(nextstep(n,2^16,{ "start",["1"]=pattern })) -end -local trailingzeros=zero^0*-digit -local case_1=period*trailingzeros/"" -local case_2=period*(digit-trailingzeros)^1*(trailingzeros/"") -local number=digit^1*(case_1+case_2) -local stripper=Cs((number+1)^0) -lpeg.patterns.stripzeros=stripper -local byte_to_HEX={} -local byte_to_hex={} -local byte_to_dec={} -local hex_to_byte={} -for i=0,255 do - local H=format("%02X",i) - local h=format("%02x",i) - local d=format("%03i",i) - local c=char(i) - byte_to_HEX[c]=H - byte_to_hex[c]=h - byte_to_dec[c]=d - hex_to_byte[h]=c - hex_to_byte[H]=c -end -local hextobyte=P(2)/hex_to_byte -local bytetoHEX=P(1)/byte_to_HEX -local bytetohex=P(1)/byte_to_hex -local bytetodec=P(1)/byte_to_dec -local hextobytes=Cs(hextobyte^0) -local bytestoHEX=Cs(bytetoHEX^0) -local bytestohex=Cs(bytetohex^0) -local bytestodec=Cs(bytetodec^0) -patterns.hextobyte=hextobyte -patterns.bytetoHEX=bytetoHEX -patterns.bytetohex=bytetohex -patterns.bytetodec=bytetodec -patterns.hextobytes=hextobytes -patterns.bytestoHEX=bytestoHEX -patterns.bytestohex=bytestohex -patterns.bytestodec=bytestodec -function string.toHEX(s) - if not s or s=="" then - return s - else - return lpegmatch(bytestoHEX,s) - end -end -function string.tohex(s) - if not s or s=="" then - return s - else - return lpegmatch(bytestohex,s) - end -end -function string.todec(s) - if not s or s=="" then - return s - else - return lpegmatch(bytestodec,s) - end -end -function string.tobytes(s) - if not s or s=="" then - return s - else - return lpegmatch(hextobytes,s) - end -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['l-functions']={ - version=1.001, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -functions=functions or {} -function functions.dummy() end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['l-string']={ - version=1.001, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local string=string -local sub,gmatch,format,char,byte,rep,lower=string.sub,string.gmatch,string.format,string.char,string.byte,string.rep,string.lower -local lpegmatch,patterns=lpeg.match,lpeg.patterns -local P,S,C,Ct,Cc,Cs=lpeg.P,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cc,lpeg.Cs -local unquoted=patterns.squote*C(patterns.nosquote)*patterns.squote+patterns.dquote*C(patterns.nodquote)*patterns.dquote -function string.unquoted(str) - return lpegmatch(unquoted,str) or str -end -function string.quoted(str) - return format("%q",str) -end -function string.count(str,pattern) - local n=0 - for _ in gmatch(str,pattern) do - n=n+1 - end - return n -end -function string.limit(str,n,sentinel) - if #str>n then - sentinel=sentinel or "..." - return sub(str,1,(n-#sentinel))..sentinel - else - return str - end -end -local stripper=patterns.stripper -local fullstripper=patterns.fullstripper -local collapser=patterns.collapser -local longtostring=patterns.longtostring -function string.strip(str) - return lpegmatch(stripper,str) or "" -end -function string.fullstrip(str) - return lpegmatch(fullstripper,str) or "" -end -function string.collapsespaces(str) - return lpegmatch(collapser,str) or "" -end -function string.longtostring(str) - return lpegmatch(longtostring,str) or "" -end -local pattern=P(" ")^0*P(-1) -function string.is_empty(str) - if str=="" then - return true - else - return lpegmatch(pattern,str) and true or false - end -end -local anything=patterns.anything -local allescapes=Cc("%")*S(".-+%?()[]*") -local someescapes=Cc("%")*S(".-+%()[]") -local matchescapes=Cc(".")*S("*?") -local pattern_a=Cs ((allescapes+anything )^0 ) -local pattern_b=Cs ((someescapes+matchescapes+anything )^0 ) -local pattern_c=Cs (Cc("^")*(someescapes+matchescapes+anything )^0*Cc("$") ) -function string.escapedpattern(str,simple) - return lpegmatch(simple and pattern_b or pattern_a,str) -end -function string.topattern(str,lowercase,strict) - if str=="" or type(str)~="string" then - return ".*" - elseif strict then - str=lpegmatch(pattern_c,str) - else - str=lpegmatch(pattern_b,str) - end - if lowercase then - return lower(str) - else - return str - end -end -function string.valid(str,default) - return (type(str)=="string" and str~="" and str) or default or nil -end -string.itself=function(s) return s end -local pattern=Ct(C(1)^0) -function string.totable(str) - return lpegmatch(pattern,str) -end -local replacer=lpeg.replacer("@","%%") -function string.tformat(fmt,...) - return format(lpegmatch(replacer,fmt),...) -end -string.quote=string.quoted -string.unquote=string.unquoted - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['l-table']={ - version=1.001, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local type,next,tostring,tonumber,ipairs,select=type,next,tostring,tonumber,ipairs,select -local table,string=table,string -local concat,sort,insert,remove=table.concat,table.sort,table.insert,table.remove -local format,lower,dump=string.format,string.lower,string.dump -local getmetatable,setmetatable=getmetatable,setmetatable -local getinfo=debug.getinfo -local lpegmatch,patterns=lpeg.match,lpeg.patterns -local floor=math.floor -local stripper=patterns.stripper -function table.strip(tab) - local lst,l={},0 - for i=1,#tab do - local s=lpegmatch(stripper,tab[i]) or "" - if s=="" then - else - l=l+1 - lst[l]=s - end - end - return lst -end -function table.keys(t) - if t then - local keys,k={},0 - for key,_ in next,t do - k=k+1 - keys[k]=key - end - return keys - else - return {} - end -end -local function compare(a,b) - local ta,tb=type(a),type(b) - if ta==tb then - return a0 then - local n=0 - for _,v in next,t do - n=n+1 - end - if n==#t then - local tt,nt={},0 - for i=1,#t do - local v=t[i] - local tv=type(v) - if tv=="number" then - nt=nt+1 - if hexify then - tt[nt]=format("0x%X",v) - else - tt[nt]=tostring(v) - end - elseif tv=="string" then - nt=nt+1 - tt[nt]=format("%q",v) - elseif tv=="boolean" then - nt=nt+1 - tt[nt]=v and "true" or "false" - else - tt=nil - break - end - end - return tt - end - end - return nil -end -local propername=patterns.propername -local function dummy() end -local function do_serialize(root,name,depth,level,indexed) - if level>0 then - depth=depth.." " - if indexed then - handle(format("%s{",depth)) - else - local tn=type(name) - if tn=="number" then - if hexify then - handle(format("%s[0x%X]={",depth,name)) - else - handle(format("%s[%s]={",depth,name)) - end - elseif tn=="string" then - if noquotes and not reserved[name] and lpegmatch(propername,name) then - handle(format("%s%s={",depth,name)) - else - handle(format("%s[%q]={",depth,name)) - end - elseif tn=="boolean" then - handle(format("%s[%s]={",depth,name and "true" or "false")) - else - handle(format("%s{",depth)) - end - end - end - if root and next(root) then - local first,last=nil,0 - if compact then - last=#root - for k=1,last do - if root[k]==nil then - last=k-1 - break - end - end - if last>0 then - first=1 - end - end - local sk=sortedkeys(root) - for i=1,#sk do - local k=sk[i] - local v=root[k] - local tv,tk=type(v),type(k) - if compact and first and tk=="number" and k>=first and k<=last then - if tv=="number" then - if hexify then - handle(format("%s 0x%X,",depth,v)) - else - handle(format("%s %s,",depth,v)) - end - elseif tv=="string" then - if reduce and tonumber(v) then - handle(format("%s %s,",depth,v)) - else - handle(format("%s %q,",depth,v)) - end - elseif tv=="table" then - if not next(v) then - handle(format("%s {},",depth)) - elseif inline then - local st=simple_table(v) - if st then - handle(format("%s { %s },",depth,concat(st,", "))) - else - do_serialize(v,k,depth,level+1,true) - end - else - do_serialize(v,k,depth,level+1,true) - end - elseif tv=="boolean" then - handle(format("%s %s,",depth,v and "true" or "false")) - elseif tv=="function" then - if functions then - handle(format('%s load(%q),',depth,dump(v))) - else - handle(format('%s "function",',depth)) - end - else - handle(format("%s %q,",depth,tostring(v))) - end - elseif k=="__p__" then - if false then - handle(format("%s __p__=nil,",depth)) - end - elseif tv=="number" then - if tk=="number" then - if hexify then - handle(format("%s [0x%X]=0x%X,",depth,k,v)) - else - handle(format("%s [%s]=%s,",depth,k,v)) - end - elseif tk=="boolean" then - if hexify then - handle(format("%s [%s]=0x%X,",depth,k and "true" or "false",v)) - else - handle(format("%s [%s]=%s,",depth,k and "true" or "false",v)) - end - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - if hexify then - handle(format("%s %s=0x%X,",depth,k,v)) - else - handle(format("%s %s=%s,",depth,k,v)) - end - else - if hexify then - handle(format("%s [%q]=0x%X,",depth,k,v)) - else - handle(format("%s [%q]=%s,",depth,k,v)) - end - end - elseif tv=="string" then - if reduce and tonumber(v) then - if tk=="number" then - if hexify then - handle(format("%s [0x%X]=%s,",depth,k,v)) - else - handle(format("%s [%s]=%s,",depth,k,v)) - end - elseif tk=="boolean" then - handle(format("%s [%s]=%s,",depth,k and "true" or "false",v)) - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - handle(format("%s %s=%s,",depth,k,v)) - else - handle(format("%s [%q]=%s,",depth,k,v)) - end - else - if tk=="number" then - if hexify then - handle(format("%s [0x%X]=%q,",depth,k,v)) - else - handle(format("%s [%s]=%q,",depth,k,v)) - end - elseif tk=="boolean" then - handle(format("%s [%s]=%q,",depth,k and "true" or "false",v)) - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - handle(format("%s %s=%q,",depth,k,v)) - else - handle(format("%s [%q]=%q,",depth,k,v)) - end - end - elseif tv=="table" then - if not next(v) then - if tk=="number" then - if hexify then - handle(format("%s [0x%X]={},",depth,k)) - else - handle(format("%s [%s]={},",depth,k)) - end - elseif tk=="boolean" then - handle(format("%s [%s]={},",depth,k and "true" or "false")) - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - handle(format("%s %s={},",depth,k)) - else - handle(format("%s [%q]={},",depth,k)) - end - elseif inline then - local st=simple_table(v) - if st then - if tk=="number" then - if hexify then - handle(format("%s [0x%X]={ %s },",depth,k,concat(st,", "))) - else - handle(format("%s [%s]={ %s },",depth,k,concat(st,", "))) - end - elseif tk=="boolean" then - handle(format("%s [%s]={ %s },",depth,k and "true" or "false",concat(st,", "))) - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - handle(format("%s %s={ %s },",depth,k,concat(st,", "))) - else - handle(format("%s [%q]={ %s },",depth,k,concat(st,", "))) - end - else - do_serialize(v,k,depth,level+1) - end - else - do_serialize(v,k,depth,level+1) - end - elseif tv=="boolean" then - if tk=="number" then - if hexify then - handle(format("%s [0x%X]=%s,",depth,k,v and "true" or "false")) - else - handle(format("%s [%s]=%s,",depth,k,v and "true" or "false")) - end - elseif tk=="boolean" then - handle(format("%s [%s]=%s,",depth,tostring(k),v and "true" or "false")) - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - handle(format("%s %s=%s,",depth,k,v and "true" or "false")) - else - handle(format("%s [%q]=%s,",depth,k,v and "true" or "false")) - end - elseif tv=="function" then - if functions then - local f=getinfo(v).what=="C" and dump(dummy) or dump(v) - if tk=="number" then - if hexify then - handle(format("%s [0x%X]=load(%q),",depth,k,f)) - else - handle(format("%s [%s]=load(%q),",depth,k,f)) - end - elseif tk=="boolean" then - handle(format("%s [%s]=load(%q),",depth,k and "true" or "false",f)) - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - handle(format("%s %s=load(%q),",depth,k,f)) - else - handle(format("%s [%q]=load(%q),",depth,k,f)) - end - end - else - if tk=="number" then - if hexify then - handle(format("%s [0x%X]=%q,",depth,k,tostring(v))) - else - handle(format("%s [%s]=%q,",depth,k,tostring(v))) - end - elseif tk=="boolean" then - handle(format("%s [%s]=%q,",depth,k and "true" or "false",tostring(v))) - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - handle(format("%s %s=%q,",depth,k,tostring(v))) - else - handle(format("%s [%q]=%q,",depth,k,tostring(v))) - end - end - end - end - if level>0 then - handle(format("%s},",depth)) - end -end -local function serialize(_handle,root,name,specification) - local tname=type(name) - if type(specification)=="table" then - noquotes=specification.noquotes - hexify=specification.hexify - handle=_handle or specification.handle or print - reduce=specification.reduce or false - functions=specification.functions - compact=specification.compact - inline=specification.inline and compact - if functions==nil then - functions=true - end - if compact==nil then - compact=true - end - if inline==nil then - inline=compact - end - else - noquotes=false - hexify=false - handle=_handle or print - reduce=false - compact=true - inline=true - functions=true - end - if tname=="string" then - if name=="return" then - handle("return {") - else - handle(name.."={") - end - elseif tname=="number" then - if hexify then - handle(format("[0x%X]={",name)) - else - handle("["..name.."]={") - end - elseif tname=="boolean" then - if name then - handle("return {") - else - handle("{") - end - else - handle("t={") - end - if root then - if getmetatable(root) then - local dummy=root._w_h_a_t_e_v_e_r_ - root._w_h_a_t_e_v_e_r_=nil - end - if next(root) then - do_serialize(root,name,"",0) - end - end - handle("}") -end -function table.serialize(root,name,specification) - local t,n={},0 - local function flush(s) - n=n+1 - t[n]=s - end - serialize(flush,root,name,specification) - return concat(t,"\n") -end -table.tohandle=serialize -local maxtab=2*1024 -function table.tofile(filename,root,name,specification) - local f=io.open(filename,'w') - if f then - if maxtab>1 then - local t,n={},0 - local function flush(s) - n=n+1 - t[n]=s - if n>maxtab then - f:write(concat(t,"\n"),"\n") - t,n={},0 - end - end - serialize(flush,root,name,specification) - f:write(concat(t,"\n"),"\n") - else - local function flush(s) - f:write(s,"\n") - end - serialize(flush,root,name,specification) - end - f:close() - io.flush() - end -end -local function flattened(t,f,depth) - if f==nil then - f={} - depth=0xFFFF - elseif tonumber(f) then - depth=f - f={} - elseif not depth then - depth=0xFFFF - end - for k,v in next,t do - if type(k)~="number" then - if depth>0 and type(v)=="table" then - flattened(v,f,depth-1) - else - f[#f+1]=v - end - end - end - for k=1,#t do - local v=t[k] - if depth>0 and type(v)=="table" then - flattened(v,f,depth-1) - else - f[#f+1]=v - end - end - return f -end -table.flattened=flattened -local function unnest(t,f) - if not f then - f={} - end - for i=1,#t do - local v=t[i] - if type(v)=="table" then - if type(v[1])=="table" then - unnest(v,f) - else - f[#f+1]=v - end - else - f[#f+1]=v - end - end - return f -end -function table.unnest(t) - return unnest(t) -end -local function are_equal(a,b,n,m) - if a and b and #a==#b then - n=n or 1 - m=m or #a - for i=n,m do - local ai,bi=a[i],b[i] - if ai==bi then - elseif type(ai)=="table" and type(bi)=="table" then - if not are_equal(ai,bi) then - return false - end - else - return false - end - end - return true - else - return false - end -end -local function identical(a,b) - for ka,va in next,a do - local vb=b[ka] - if va==vb then - elseif type(va)=="table" and type(vb)=="table" then - if not identical(va,vb) then - return false - end - else - return false - end - end - return true -end -table.identical=identical -table.are_equal=are_equal -local function sparse(old,nest,keeptables) - local new={} - for k,v in next,old do - if not (v=="" or v==false) then - if nest and type(v)=="table" then - v=sparse(v,nest) - if keeptables or next(v) then - new[k]=v - end - else - new[k]=v - end - end - end - return new -end -table.sparse=sparse -function table.compact(t) - return sparse(t,true,true) -end -function table.contains(t,v) - if t then - for i=1,#t do - if t[i]==v then - return i - end - end - end - return false -end -function table.count(t) - local n=0 - for k,v in next,t do - n=n+1 - end - return n -end -function table.swapped(t,s) - local n={} - if s then - for k,v in next,s do - n[k]=v - end - end - for k,v in next,t do - n[v]=k - end - return n -end -function table.mirrored(t) - local n={} - for k,v in next,t do - n[v]=k - n[k]=v - end - return n -end -function table.reversed(t) - if t then - local tt,tn={},#t - if tn>0 then - local ttn=0 - for i=tn,1,-1 do - ttn=ttn+1 - tt[ttn]=t[i] - end - end - return tt - end -end -function table.reverse(t) - if t then - local n=#t - for i=1,floor(n/2) do - local j=n-i+1 - t[i],t[j]=t[j],t[i] - end - return t - end -end -function table.sequenced(t,sep,simple) - if not t then - return "" - end - local n=#t - local s={} - if n>0 then - for i=1,n do - s[i]=tostring(t[i]) - end - else - n=0 - for k,v in sortedhash(t) do - if simple then - if v==true then - n=n+1 - s[n]=k - elseif v and v~="" then - n=n+1 - s[n]=k.."="..tostring(v) - end - else - n=n+1 - s[n]=k.."="..tostring(v) - end - end - end - return concat(s,sep or " | ") -end -function table.print(t,...) - if type(t)~="table" then - print(tostring(t)) - else - serialize(print,t,...) - end -end -if setinspector then - setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end) -end -function table.sub(t,i,j) - return { unpack(t,i,j) } -end -function table.is_empty(t) - return not t or not next(t) -end -function table.has_one_entry(t) - return t and not next(t,next(t)) -end -function table.loweredkeys(t) - local l={} - for k,v in next,t do - l[lower(k)]=v - end - return l -end -function table.unique(old) - local hash={} - local new={} - local n=0 - for i=1,#old do - local oi=old[i] - if not hash[oi] then - n=n+1 - new[n]=oi - hash[oi]=true - end - end - return new -end -function table.sorted(t,...) - sort(t,...) - return t -end -function table.values(t,s) - if t then - local values,keys,v={},{},0 - for key,value in next,t do - if not keys[value] then - v=v+1 - values[v]=value - keys[k]=key - end - end - if s then - sort(values) - end - return values - else - return {} - end -end -function table.filtered(t,pattern,sort,cmp) - if t and type(pattern)=="string" then - if sort then - local s - if cmp then - s=sortedhashkeys(t,function(a,b) return cmp(t,a,b) end) - else - s=sortedkeys(t) - end - local n=0 - local m=#s - local function kv(s) - while n16*1024*1024 then - step=16*1024*1024 - else - step=floor(size/(1024*1024))*1024*1024/8 - end - local data={} - while true do - local r=f:read(step) - if not r then - return concat(data) - else - data[#data+1]=r - end - end - end -end -io.readall=readall -function io.loaddata(filename,textmode) - local f=io.open(filename,(textmode and 'r') or 'rb') - if f then - local data=readall(f) - f:close() - if #data>0 then - return data - end - end -end -function io.savedata(filename,data,joiner) - local f=io.open(filename,"wb") - if f then - if type(data)=="table" then - f:write(concat(data,joiner or "")) - elseif type(data)=="function" then - data(f) - else - f:write(data or "") - end - f:close() - io.flush() - return true - else - return false - end -end -function io.loadlines(filename,n) - local f=io.open(filename,'r') - if not f then - elseif n then - local lines={} - for i=1,n do - local line=f:read("*lines") - if line then - lines[#lines+1]=line - else - break - end - end - f:close() - lines=concat(lines,"\n") - if #lines>0 then - return lines - end - else - local line=f:read("*line") or "" - f:close() - if #line>0 then - return line - end - end -end -function io.loadchunk(filename,n) - local f=io.open(filename,'rb') - if f then - local data=f:read(n or 1024) - f:close() - if #data>0 then - return data - end - end -end -function io.exists(filename) - local f=io.open(filename) - if f==nil then - return false - else - f:close() - return true - end -end -function io.size(filename) - local f=io.open(filename) - if f==nil then - return 0 - else - local s=f:seek("end") - f:close() - return s - end -end -function io.noflines(f) - if type(f)=="string" then - local f=io.open(filename) - if f then - local n=f and io.noflines(f) or 0 - f:close() - return n - else - return 0 - end - else - local n=0 - for _ in f:lines() do - n=n+1 - end - f:seek('set',0) - return n - end -end -local nextchar={ - [ 4]=function(f) - return f:read(1,1,1,1) - end, - [ 2]=function(f) - return f:read(1,1) - end, - [ 1]=function(f) - return f:read(1) - end, - [-2]=function(f) - local a,b=f:read(1,1) - return b,a - end, - [-4]=function(f) - local a,b,c,d=f:read(1,1,1,1) - return d,c,b,a - end -} -function io.characters(f,n) - if f then - return nextchar[n or 1],f - end -end -local nextbyte={ - [4]=function(f) - local a,b,c,d=f:read(1,1,1,1) - if d then - return byte(a),byte(b),byte(c),byte(d) - end - end, - [3]=function(f) - local a,b,c=f:read(1,1,1) - if b then - return byte(a),byte(b),byte(c) - end - end, - [2]=function(f) - local a,b=f:read(1,1) - if b then - return byte(a),byte(b) - end - end, - [1]=function (f) - local a=f:read(1) - if a then - return byte(a) - end - end, - [-2]=function (f) - local a,b=f:read(1,1) - if b then - return byte(b),byte(a) - end - end, - [-3]=function(f) - local a,b,c=f:read(1,1,1) - if b then - return byte(c),byte(b),byte(a) - end - end, - [-4]=function(f) - local a,b,c,d=f:read(1,1,1,1) - if d then - return byte(d),byte(c),byte(b),byte(a) - end - end -} -function io.bytes(f,n) - if f then - return nextbyte[n or 1],f - else - return nil,nil - end -end -function io.ask(question,default,options) - while true do - io.write(question) - if options then - io.write(format(" [%s]",concat(options,"|"))) - end - if default then - io.write(format(" [%s]",default)) - end - io.write(format(" ")) - io.flush() - local answer=io.read() - answer=gsub(answer,"^%s*(.*)%s*$","%1") - if answer=="" and default then - return default - elseif not options then - return answer - else - for k=1,#options do - if options[k]==answer then - return answer - end - end - local pattern="^"..answer - for k=1,#options do - local v=options[k] - if find(v,pattern) then - return v - end - end - end - end -end -local function readnumber(f,n,m) - if m then - f:seek("set",n) - n=m - end - if n==1 then - return byte(f:read(1)) - elseif n==2 then - local a,b=byte(f:read(2),1,2) - return 256*a+b - elseif n==3 then - local a,b,c=byte(f:read(3),1,3) - return 256*256*a+256*b+c - elseif n==4 then - local a,b,c,d=byte(f:read(4),1,4) - return 256*256*256*a+256*256*b+256*c+d - elseif n==8 then - local a,b=readnumber(f,4),readnumber(f,4) - return 256*a+b - elseif n==12 then - local a,b,c=readnumber(f,4),readnumber(f,4),readnumber(f,4) - return 256*256*a+256*b+c - elseif n==-2 then - local b,a=byte(f:read(2),1,2) - return 256*a+b - elseif n==-3 then - local c,b,a=byte(f:read(3),1,3) - return 256*256*a+256*b+c - elseif n==-4 then - local d,c,b,a=byte(f:read(4),1,4) - return 256*256*256*a+256*256*b+256*c+d - elseif n==-8 then - local h,g,f,e,d,c,b,a=byte(f:read(8),1,8) - return 256*256*256*256*256*256*256*a+256*256*256*256*256*256*b+256*256*256*256*256*c+256*256*256*256*d+256*256*256*e+256*256*f+256*g+h - else - return 0 - end -end -io.readnumber=readnumber -function io.readstring(f,n,m) - if m then - f:seek("set",n) - n=m - end - local str=gsub(f:read(n),"\000","") - return str -end -if not io.i_limiter then function io.i_limiter() end end -if not io.o_limiter then function io.o_limiter() end end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['l-file']={ - version=1.001, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -file=file or {} -local file=file -if not lfs then - lfs=optionalrequire("lfs") -end -if not lfs then - lfs={ - getcurrentdir=function() - return "." - end, - attributes=function() - return nil - end, - isfile=function(name) - local f=io.open(name,'rb') - if f then - f:close() - return true - end - end, - isdir=function(name) - print("you need to load lfs") - return false - end - } -elseif not lfs.isfile then - local attributes=lfs.attributes - function lfs.isdir(name) - return attributes(name,"mode")=="directory" - end - function lfs.isfile(name) - return attributes(name,"mode")=="file" - end -end -local insert,concat=table.insert,table.concat -local match,find,gmatch=string.match,string.find,string.gmatch -local lpegmatch=lpeg.match -local getcurrentdir,attributes=lfs.currentdir,lfs.attributes -local checkedsplit=string.checkedsplit -local P,R,S,C,Cs,Cp,Cc,Ct=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Cs,lpeg.Cp,lpeg.Cc,lpeg.Ct -local colon=P(":") -local period=P(".") -local periods=P("..") -local fwslash=P("/") -local bwslash=P("\\") -local slashes=S("\\/") -local noperiod=1-period -local noslashes=1-slashes -local name=noperiod^1 -local suffix=period/""*(1-period-slashes)^1*-1 -local pattern=C((1-(slashes^1*noslashes^1*-1))^1)*P(1) -local function pathpart(name,default) - return name and lpegmatch(pattern,name) or default or "" -end -local pattern=(noslashes^0*slashes)^1*C(noslashes^1)*-1 -local function basename(name) - return name and lpegmatch(pattern,name) or name -end -local pattern=(noslashes^0*slashes^1)^0*Cs((1-suffix)^1)*suffix^0 -local function nameonly(name) - return name and lpegmatch(pattern,name) or name -end -local pattern=(noslashes^0*slashes)^0*(noperiod^1*period)^1*C(noperiod^1)*-1 -local function suffixonly(name) - return name and lpegmatch(pattern,name) or "" -end -local pattern=(noslashes^0*slashes)^0*noperiod^1*((period*C(noperiod^1))^1)*-1+Cc("") -local function suffixesonly(name) - if name then - return lpegmatch(pattern,name) - else - return "" - end -end -file.pathpart=pathpart -file.basename=basename -file.nameonly=nameonly -file.suffixonly=suffixonly -file.suffix=suffixonly -file.suffixesonly=suffixesonly -file.suffixes=suffixesonly -file.dirname=pathpart -file.extname=suffixonly -local drive=C(R("az","AZ"))*colon -local path=C((noslashes^0*slashes)^0) -local suffix=period*C(P(1-period)^0*P(-1)) -local base=C((1-suffix)^0) -local rest=C(P(1)^0) -drive=drive+Cc("") -path=path+Cc("") -base=base+Cc("") -suffix=suffix+Cc("") -local pattern_a=drive*path*base*suffix -local pattern_b=path*base*suffix -local pattern_c=C(drive*path)*C(base*suffix) -local pattern_d=path*rest -function file.splitname(str,splitdrive) - if not str then - elseif splitdrive then - return lpegmatch(pattern_a,str) - else - return lpegmatch(pattern_b,str) - end -end -function file.splitbase(str) - if str then - return lpegmatch(pattern_d,str) - else - return "",str - end -end -function file.nametotable(str,splitdrive) - if str then - local path,drive,subpath,name,base,suffix=lpegmatch(pattern_c,str) - if splitdrive then - return { - path=path, - drive=drive, - subpath=subpath, - name=name, - base=base, - suffix=suffix, - } - else - return { - path=path, - name=name, - base=base, - suffix=suffix, - } - end - end -end -local pattern=Cs(((period*(1-period-slashes)^1*-1)/""+1)^1) -function file.removesuffix(name) - return name and lpegmatch(pattern,name) -end -local suffix=period/""*(1-period-slashes)^1*-1 -local pattern=Cs((noslashes^0*slashes^1)^0*((1-suffix)^1))*Cs(suffix) -function file.addsuffix(filename,suffix,criterium) - if not filename or not suffix or suffix=="" then - return filename - elseif criterium==true then - return filename.."."..suffix - elseif not criterium then - local n,s=lpegmatch(pattern,filename) - if not s or s=="" then - return filename.."."..suffix - else - return filename - end - else - local n,s=lpegmatch(pattern,filename) - if s and s~="" then - local t=type(criterium) - if t=="table" then - for i=1,#criterium do - if s==criterium[i] then - return filename - end - end - elseif t=="string" then - if s==criterium then - return filename - end - end - end - return (n or filename).."."..suffix - end -end -local suffix=period*(1-period-slashes)^1*-1 -local pattern=Cs((1-suffix)^0) -function file.replacesuffix(name,suffix) - if name and suffix and suffix~="" then - return lpegmatch(pattern,name).."."..suffix - else - return name - end -end -local reslasher=lpeg.replacer(P("\\"),"/") -function file.reslash(str) - return str and lpegmatch(reslasher,str) -end -function file.is_writable(name) - if not name then - elseif lfs.isdir(name) then - name=name.."/m_t_x_t_e_s_t.tmp" - local f=io.open(name,"wb") - if f then - f:close() - os.remove(name) - return true - end - elseif lfs.isfile(name) then - local f=io.open(name,"ab") - if f then - f:close() - return true - end - else - local f=io.open(name,"ab") - if f then - f:close() - os.remove(name) - return true - end - end - return false -end -local readable=P("r")*Cc(true) -function file.is_readable(name) - if name then - local a=attributes(name) - return a and lpegmatch(readable,a.permissions) or false - else - return false - end -end -file.isreadable=file.is_readable -file.iswritable=file.is_writable -function file.size(name) - if name then - local a=attributes(name) - return a and a.size or 0 - else - return 0 - end -end -function file.splitpath(str,separator) - return str and checkedsplit(lpegmatch(reslasher,str),separator or io.pathseparator) -end -function file.joinpath(tab,separator) - return tab and concat(tab,separator or io.pathseparator) -end -local someslash=S("\\/") -local stripper=Cs(P(fwslash)^0/""*reslasher) -local isnetwork=someslash*someslash*(1-someslash)+(1-fwslash-colon)^1*colon -local isroot=fwslash^1*-1 -local hasroot=fwslash^1 -local reslasher=lpeg.replacer(S("\\/"),"/") -local deslasher=lpeg.replacer(S("\\/")^1,"/") -function file.join(one,two,three,...) - if not two then - return one=="" and one or lpegmatch(stripper,one) - end - if one=="" then - return lpegmatch(stripper,three and concat({ two,three,... },"/") or two) - end - if lpegmatch(isnetwork,one) then - local one=lpegmatch(reslasher,one) - local two=lpegmatch(deslasher,three and concat({ two,three,... },"/") or two) - if lpegmatch(hasroot,two) then - return one..two - else - return one.."/"..two - end - elseif lpegmatch(isroot,one) then - local two=lpegmatch(deslasher,three and concat({ two,three,... },"/") or two) - if lpegmatch(hasroot,two) then - return two - else - return "/"..two - end - else - return lpegmatch(deslasher,concat({ one,two,three,... },"/")) - end -end -local drivespec=R("az","AZ")^1*colon -local anchors=fwslash+drivespec -local untouched=periods+(1-period)^1*P(-1) -local mswindrive=Cs(drivespec*(bwslash/"/"+fwslash)^0) -local mswinuncpath=(bwslash+fwslash)*(bwslash+fwslash)*Cc("//") -local splitstarter=(mswindrive+mswinuncpath+Cc(false))*Ct(lpeg.splitat(S("/\\")^1)) -local absolute=fwslash -function file.collapsepath(str,anchor) - if not str then - return - end - if anchor==true and not lpegmatch(anchors,str) then - str=getcurrentdir().."/"..str - end - if str=="" or str=="." then - return "." - elseif lpegmatch(untouched,str) then - return lpegmatch(reslasher,str) - end - local starter,oldelements=lpegmatch(splitstarter,str) - local newelements={} - local i=#oldelements - while i>0 do - local element=oldelements[i] - if element=='.' then - elseif element=='..' then - local n=i-1 - while n>0 do - local element=oldelements[n] - if element~='..' and element~='.' then - oldelements[n]='.' - break - else - n=n-1 - end - end - if n<1 then - insert(newelements,1,'..') - end - elseif element~="" then - insert(newelements,1,element) - end - i=i-1 - end - if #newelements==0 then - return starter or "." - elseif starter then - return starter..concat(newelements,'/') - elseif lpegmatch(absolute,str) then - return "/"..concat(newelements,'/') - else - newelements=concat(newelements,'/') - if anchor=="." and find(str,"^%./") then - return "./"..newelements - else - return newelements - end - end -end -local tricky=S("/\\")*P(-1) -local attributes=lfs.attributes -function lfs.isdir(name) - if lpegmatch(tricky,name) then - return attributes(name,"mode")=="directory" - else - return attributes(name.."/.","mode")=="directory" - end -end -function lfs.isfile(name) - return attributes(name,"mode")=="file" -end -local validchars=R("az","09","AZ","--","..") -local pattern_a=lpeg.replacer(1-validchars) -local pattern_a=Cs((validchars+P(1)/"-")^1) -local whatever=P("-")^0/"" -local pattern_b=Cs(whatever*(1-whatever*-1)^1) -function file.robustname(str,strict) - if str then - str=lpegmatch(pattern_a,str) or str - if strict then - return lpegmatch(pattern_b,str) or str - else - return str - end - end -end -file.readdata=io.loaddata -file.savedata=io.savedata -function file.copy(oldname,newname) - if oldname and newname then - local data=io.loaddata(oldname) - if data and data~="" then - file.savedata(newname,data) - end - end -end -local letter=R("az","AZ")+S("_-+") -local separator=P("://") -local qualified=period^0*fwslash+letter*colon+letter^1*separator+letter^1*fwslash -local rootbased=fwslash+letter*colon -lpeg.patterns.qualified=qualified -lpeg.patterns.rootbased=rootbased -function file.is_qualified_path(filename) - return filename and lpegmatch(qualified,filename)~=nil -end -function file.is_rootbased_path(filename) - return filename and lpegmatch(rootbased,filename)~=nil -end -function file.strip(name,dir) - if name then - local b,a=match(name,"^(.-)"..dir.."(.*)$") - return a~="" and a or name - end -end -function lfs.mkdirs(path) - local full="" - for sub in gmatch(path,"(/*[^\\/]+)") do - full=full..sub - lfs.mkdir(full) - end -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['l-boolean']={ - version=1.001, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local type,tonumber=type,tonumber -boolean=boolean or {} -local boolean=boolean -function boolean.tonumber(b) - if b then return 1 else return 0 end -end -function toboolean(str,tolerant) - if str==nil then - return false - elseif str==false then - return false - elseif str==true then - return true - elseif str=="true" then - return true - elseif str=="false" then - return false - elseif not tolerant then - return false - elseif str==0 then - return false - elseif (tonumber(str) or 0)>0 then - return true - else - return str=="yes" or str=="on" or str=="t" - end -end -string.toboolean=toboolean -function string.booleanstring(str) - if str=="0" then - return false - elseif str=="1" then - return true - elseif str=="" then - return false - elseif str=="false" then - return false - elseif str=="true" then - return true - elseif (tonumber(str) or 0)>0 then - return true - else - return str=="yes" or str=="on" or str=="t" - end -end -function string.is_boolean(str,default,strict) - if type(str)=="string" then - if str=="true" or str=="yes" or str=="on" or str=="t" or (not strict and str=="1") then - return true - elseif str=="false" or str=="no" or str=="off" or str=="f" or (not strict and str=="0") then - return false - end - end - return default -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['l-math']={ - version=1.001, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local floor,sin,cos,tan=math.floor,math.sin,math.cos,math.tan -if not math.round then - function math.round(x) return floor(x+0.5) end -end -if not math.div then - function math.div(n,m) return floor(n/m) end -end -if not math.mod then - function math.mod(n,m) return n%m end -end -local pipi=2*math.pi/360 -if not math.sind then - function math.sind(d) return sin(d*pipi) end - function math.cosd(d) return cos(d*pipi) end - function math.tand(d) return tan(d*pipi) end -end -if not math.odd then - function math.odd (n) return n%2~=0 end - function math.even(n) return n%2==0 end -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['util-str']={ - version=1.001, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -utilities=utilities or {} -utilities.strings=utilities.strings or {} -local strings=utilities.strings -local format,gsub,rep,sub=string.format,string.gsub,string.rep,string.sub -local load,dump=load,string.dump -local tonumber,type,tostring=tonumber,type,tostring -local unpack,concat=table.unpack,table.concat -local P,V,C,S,R,Ct,Cs,Cp,Carg,Cc=lpeg.P,lpeg.V,lpeg.C,lpeg.S,lpeg.R,lpeg.Ct,lpeg.Cs,lpeg.Cp,lpeg.Carg,lpeg.Cc -local patterns,lpegmatch=lpeg.patterns,lpeg.match -local utfchar,utfbyte=utf.char,utf.byte -local loadstripped=nil -if _LUAVERSION<5.2 then - loadstripped=function(str,shortcuts) - return load(str) - end -else - loadstripped=function(str,shortcuts) - if shortcuts then - return load(dump(load(str),true),nil,nil,shortcuts) - else - return load(dump(load(str),true)) - end - end -end -if not number then number={} end -local stripper=patterns.stripzeros -local function points(n) - n=tonumber(n) - return (not n or n==0) and "0pt" or lpegmatch(stripper,format("%.5fpt",n/65536)) -end -local function basepoints(n) - n=tonumber(n) - return (not n or n==0) and "0bp" or lpegmatch(stripper,format("%.5fbp",n*(7200/7227)/65536)) -end -number.points=points -number.basepoints=basepoints -local rubish=patterns.spaceortab^0*patterns.newline -local anyrubish=patterns.spaceortab+patterns.newline -local anything=patterns.anything -local stripped=(patterns.spaceortab^1/"")*patterns.newline -local leading=rubish^0/"" -local trailing=(anyrubish^1*patterns.endofstring)/"" -local redundant=rubish^3/"\n" -local pattern=Cs(leading*(trailing+redundant+stripped+anything)^0) -function strings.collapsecrlf(str) - return lpegmatch(pattern,str) -end -local repeaters={} -function strings.newrepeater(str,offset) - offset=offset or 0 - local s=repeaters[str] - if not s then - s={} - repeaters[str]=s - end - local t=s[offset] - if t then - return t - end - t={} - setmetatable(t,{ __index=function(t,k) - if not k then - return "" - end - local n=k+offset - local s=n>0 and rep(str,n) or "" - t[k]=s - return s - end }) - s[offset]=t - return t -end -local extra,tab,start=0,0,4,0 -local nspaces=strings.newrepeater(" ") -string.nspaces=nspaces -local pattern=Carg(1)/function(t) - extra,tab,start=0,t or 7,1 - end*Cs(( - Cp()*patterns.tab/function(position) - local current=(position-start+1)+extra - local spaces=tab-(current-1)%tab - if spaces>0 then - extra=extra+spaces-1 - return nspaces[spaces] - else - return "" - end - end+patterns.newline*Cp()/function(position) - extra,start=0,position - end+patterns.anything - )^1) -function strings.tabtospace(str,tab) - return lpegmatch(pattern,str,1,tab or 7) -end -local newline=patterns.newline -local endofstring=patterns.endofstring -local whitespace=patterns.whitespace -local spacer=patterns.spacer -local space=spacer^0 -local nospace=space/"" -local endofline=nospace*newline -local stripend=(whitespace^1*endofstring)/"" -local normalline=(nospace*((1-space*(newline+endofstring))^1)*nospace) -local stripempty=endofline^1/"" -local normalempty=endofline^1 -local singleempty=endofline*(endofline^0/"") -local doubleempty=endofline*endofline^-1*(endofline^0/"") -local stripstart=stripempty^0 -local p_prune_normal=Cs (stripstart*(stripend+normalline+normalempty )^0 ) -local p_prune_collapse=Cs (stripstart*(stripend+normalline+doubleempty )^0 ) -local p_prune_noempty=Cs (stripstart*(stripend+normalline+singleempty )^0 ) -local p_retain_normal=Cs ((normalline+normalempty )^0 ) -local p_retain_collapse=Cs ((normalline+doubleempty )^0 ) -local p_retain_noempty=Cs ((normalline+singleempty )^0 ) -local striplinepatterns={ - ["prune"]=p_prune_normal, - ["prune and collapse"]=p_prune_collapse, - ["prune and no empty"]=p_prune_noempty, - ["retain"]=p_retain_normal, - ["retain and collapse"]=p_retain_collapse, - ["retain and no empty"]=p_retain_noempty, - ["collapse"]=patterns.collapser, -} -strings.striplinepatterns=striplinepatterns -function strings.striplines(str,how) - return str and lpegmatch(how and striplinepatterns[how] or p_prune_collapse,str) or str -end -strings.striplong=strings.striplines -function strings.nice(str) - str=gsub(str,"[:%-+_]+"," ") - return str -end -local n=0 -local sequenced=table.sequenced -function string.autodouble(s,sep) - if s==nil then - return '""' - end - local t=type(s) - if t=="number" then - return tostring(s) - end - if t=="table" then - return ('"'..sequenced(s,sep or ",")..'"') - end - return ('"'..tostring(s)..'"') -end -function string.autosingle(s,sep) - if s==nil then - return "''" - end - local t=type(s) - if t=="number" then - return tostring(s) - end - if t=="table" then - return ("'"..sequenced(s,sep or ",").."'") - end - return ("'"..tostring(s).."'") -end -local tracedchars={} -string.tracedchars=tracedchars -strings.tracers=tracedchars -function string.tracedchar(b) - if type(b)=="number" then - return tracedchars[b] or (utfchar(b).." (U+"..format('%05X',b)..")") - else - local c=utfbyte(b) - return tracedchars[c] or (b.." (U+"..format('%05X',c)..")") - end -end -function number.signed(i) - if i>0 then - return "+",i - else - return "-",-i - end -end -local zero=P("0")^1/"" -local plus=P("+")/"" -local minus=P("-") -local separator=S(".") -local digit=R("09") -local trailing=zero^1*#S("eE") -local exponent=(S("eE")*(plus+Cs((minus*zero^0*P(-1))/"")+minus)*zero^0*(P(-1)*Cc("0")+P(1)^1)) -local pattern_a=Cs(minus^0*digit^1*(separator/""*trailing+separator*(trailing+digit)^0)*exponent) -local pattern_b=Cs((exponent+P(1))^0) -function number.sparseexponent(f,n) - if not n then - n=f - f="%e" - end - local tn=type(n) - if tn=="string" then - local m=tonumber(n) - if m then - return lpegmatch((f=="%e" or f=="%E") and pattern_a or pattern_b,format(f,m)) - end - elseif tn=="number" then - return lpegmatch((f=="%e" or f=="%E") and pattern_a or pattern_b,format(f,n)) - end - return tostring(n) -end -local template=[[ -%s -%s -return function(%s) return %s end -]] -local preamble,environment="",{} -if _LUAVERSION<5.2 then - preamble=[[ -local lpeg=lpeg -local type=type -local tostring=tostring -local tonumber=tonumber -local format=string.format -local concat=table.concat -local signed=number.signed -local points=number.points -local basepoints= number.basepoints -local utfchar=utf.char -local utfbyte=utf.byte -local lpegmatch=lpeg.match -local nspaces=string.nspaces -local tracedchar=string.tracedchar -local autosingle=string.autosingle -local autodouble=string.autodouble -local sequenced=table.sequenced -local formattednumber=number.formatted -local sparseexponent=number.sparseexponent - ]] -else - environment={ - global=global or _G, - lpeg=lpeg, - type=type, - tostring=tostring, - tonumber=tonumber, - format=string.format, - concat=table.concat, - signed=number.signed, - points=number.points, - basepoints=number.basepoints, - utfchar=utf.char, - utfbyte=utf.byte, - lpegmatch=lpeg.match, - nspaces=string.nspaces, - tracedchar=string.tracedchar, - autosingle=string.autosingle, - autodouble=string.autodouble, - sequenced=table.sequenced, - formattednumber=number.formatted, - sparseexponent=number.sparseexponent, - } -end -local arguments={ "a1" } -setmetatable(arguments,{ __index=function(t,k) - local v=t[k-1]..",a"..k - t[k]=v - return v - end -}) -local prefix_any=C((S("+- .")+R("09"))^0) -local prefix_tab=P("{")*C((1-P("}"))^0)*P("}")+C((1-R("az","AZ","09","%%"))^0) -local format_s=function(f) - n=n+1 - if f and f~="" then - return format("format('%%%ss',a%s)",f,n) - else - return format("(a%s or '')",n) - end -end -local format_S=function(f) - n=n+1 - if f and f~="" then - return format("format('%%%ss',tostring(a%s))",f,n) - else - return format("tostring(a%s)",n) - end -end -local format_q=function() - n=n+1 - return format("(a%s and format('%%q',a%s) or '')",n,n) -end -local format_Q=function() - n=n+1 - return format("format('%%q',tostring(a%s))",n) -end -local format_i=function(f) - n=n+1 - if f and f~="" then - return format("format('%%%si',a%s)",f,n) - else - return format("format('%%i',a%s)",n) - end -end -local format_d=format_i -local format_I=function(f) - n=n+1 - return format("format('%%s%%%si',signed(a%s))",f,n) -end -local format_f=function(f) - n=n+1 - return format("format('%%%sf',a%s)",f,n) -end -local format_F=function(f) - n=n+1 - if not f or f=="" then - return format("(((a%s > -0.0000000005 and a%s < 0.0000000005) and '0') or format((a%s %% 1 == 0) and '%%i' or '%%.9f',a%s))",n,n,n,n) - else - return format("format((a%s %% 1 == 0) and '%%i' or '%%%sf',a%s)",n,f,n) - end -end -local format_g=function(f) - n=n+1 - return format("format('%%%sg',a%s)",f,n) -end -local format_G=function(f) - n=n+1 - return format("format('%%%sG',a%s)",f,n) -end -local format_e=function(f) - n=n+1 - return format("format('%%%se',a%s)",f,n) -end -local format_E=function(f) - n=n+1 - return format("format('%%%sE',a%s)",f,n) -end -local format_j=function(f) - n=n+1 - return format("sparseexponent('%%%se',a%s)",f,n) -end -local format_J=function(f) - n=n+1 - return format("sparseexponent('%%%sE',a%s)",f,n) -end -local format_x=function(f) - n=n+1 - return format("format('%%%sx',a%s)",f,n) -end -local format_X=function(f) - n=n+1 - return format("format('%%%sX',a%s)",f,n) -end -local format_o=function(f) - n=n+1 - return format("format('%%%so',a%s)",f,n) -end -local format_c=function() - n=n+1 - return format("utfchar(a%s)",n) -end -local format_C=function() - n=n+1 - return format("tracedchar(a%s)",n) -end -local format_r=function(f) - n=n+1 - return format("format('%%%s.0f',a%s)",f,n) -end -local format_h=function(f) - n=n+1 - if f=="-" then - f=sub(f,2) - return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) - else - return format("format('0x%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) - end -end -local format_H=function(f) - n=n+1 - if f=="-" then - f=sub(f,2) - return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) - else - return format("format('0x%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) - end -end -local format_u=function(f) - n=n+1 - if f=="-" then - f=sub(f,2) - return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) - else - return format("format('u+%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) - end -end -local format_U=function(f) - n=n+1 - if f=="-" then - f=sub(f,2) - return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) - else - return format("format('U+%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) - end -end -local format_p=function() - n=n+1 - return format("points(a%s)",n) -end -local format_b=function() - n=n+1 - return format("basepoints(a%s)",n) -end -local format_t=function(f) - n=n+1 - if f and f~="" then - return format("concat(a%s,%q)",n,f) - else - return format("concat(a%s)",n) - end -end -local format_T=function(f) - n=n+1 - if f and f~="" then - return format("sequenced(a%s,%q)",n,f) - else - return format("sequenced(a%s)",n) - end -end -local format_l=function() - n=n+1 - return format("(a%s and 'true' or 'false')",n) -end -local format_L=function() - n=n+1 - return format("(a%s and 'TRUE' or 'FALSE')",n) -end -local format_N=function() - n=n+1 - return format("tostring(tonumber(a%s) or a%s)",n,n) -end -local format_a=function(f) - n=n+1 - if f and f~="" then - return format("autosingle(a%s,%q)",n,f) - else - return format("autosingle(a%s)",n) - end -end -local format_A=function(f) - n=n+1 - if f and f~="" then - return format("autodouble(a%s,%q)",n,f) - else - return format("autodouble(a%s)",n) - end -end -local format_w=function(f) - n=n+1 - f=tonumber(f) - if f then - return format("nspaces[%s+a%s]",f,n) - else - return format("nspaces[a%s]",n) - end -end -local format_W=function(f) - return format("nspaces[%s]",tonumber(f) or 0) -end -local digit=patterns.digit -local period=patterns.period -local three=digit*digit*digit -local splitter=Cs ( - (((1-(three^1*period))^1+C(three))*(Carg(1)*three)^1+C((1-period)^1))*(P(1)/""*Carg(2))*C(2) -) -patterns.formattednumber=splitter -function number.formatted(n,sep1,sep2) - local s=type(s)=="string" and n or format("%0.2f",n) - if sep1==true then - return lpegmatch(splitter,s,1,".",",") - elseif sep1=="." then - return lpegmatch(splitter,s,1,sep1,sep2 or ",") - elseif sep1=="," then - return lpegmatch(splitter,s,1,sep1,sep2 or ".") - else - return lpegmatch(splitter,s,1,sep1 or ",",sep2 or ".") - end -end -local format_m=function(f) - n=n+1 - if not f or f=="" then - f="," - end - return format([[formattednumber(a%s,%q,".")]],n,f) -end -local format_M=function(f) - n=n+1 - if not f or f=="" then - f="." - end - return format([[formattednumber(a%s,%q,",")]],n,f) -end -local format_z=function(f) - n=n+(tonumber(f) or 1) - return "''" -end -local format_rest=function(s) - return format("%q",s) -end -local format_extension=function(extensions,f,name) - local extension=extensions[name] or "tostring(%s)" - local f=tonumber(f) or 1 - if f==0 then - return extension - elseif f==1 then - n=n+1 - local a="a"..n - return format(extension,a,a) - elseif f<0 then - local a="a"..(n+f+1) - return format(extension,a,a) - else - local t={} - for i=1,f do - n=n+1 - t[#t+1]="a"..n - end - return format(extension,unpack(t)) - end -end -local builder=Cs { "start", - start=( - ( - P("%")/""*( - V("!") -+V("s")+V("q")+V("i")+V("d")+V("f")+V("F")+V("g")+V("G")+V("e")+V("E")+V("x")+V("X")+V("o") -+V("c")+V("C")+V("S") -+V("Q") -+V("N") -+V("r")+V("h")+V("H")+V("u")+V("U")+V("p")+V("b")+V("t")+V("T")+V("l")+V("L")+V("I")+V("w") -+V("W") -+V("a") -+V("A") -+V("j")+V("J") -+V("m")+V("M") -+V("z") - )+V("*") - )*(P(-1)+Carg(1)) - )^0, - ["s"]=(prefix_any*P("s"))/format_s, - ["q"]=(prefix_any*P("q"))/format_q, - ["i"]=(prefix_any*P("i"))/format_i, - ["d"]=(prefix_any*P("d"))/format_d, - ["f"]=(prefix_any*P("f"))/format_f, - ["F"]=(prefix_any*P("F"))/format_F, - ["g"]=(prefix_any*P("g"))/format_g, - ["G"]=(prefix_any*P("G"))/format_G, - ["e"]=(prefix_any*P("e"))/format_e, - ["E"]=(prefix_any*P("E"))/format_E, - ["x"]=(prefix_any*P("x"))/format_x, - ["X"]=(prefix_any*P("X"))/format_X, - ["o"]=(prefix_any*P("o"))/format_o, - ["S"]=(prefix_any*P("S"))/format_S, - ["Q"]=(prefix_any*P("Q"))/format_S, - ["N"]=(prefix_any*P("N"))/format_N, - ["c"]=(prefix_any*P("c"))/format_c, - ["C"]=(prefix_any*P("C"))/format_C, - ["r"]=(prefix_any*P("r"))/format_r, - ["h"]=(prefix_any*P("h"))/format_h, - ["H"]=(prefix_any*P("H"))/format_H, - ["u"]=(prefix_any*P("u"))/format_u, - ["U"]=(prefix_any*P("U"))/format_U, - ["p"]=(prefix_any*P("p"))/format_p, - ["b"]=(prefix_any*P("b"))/format_b, - ["t"]=(prefix_tab*P("t"))/format_t, - ["T"]=(prefix_tab*P("T"))/format_T, - ["l"]=(prefix_any*P("l"))/format_l, - ["L"]=(prefix_any*P("L"))/format_L, - ["I"]=(prefix_any*P("I"))/format_I, - ["w"]=(prefix_any*P("w"))/format_w, - ["W"]=(prefix_any*P("W"))/format_W, - ["j"]=(prefix_any*P("j"))/format_j, - ["J"]=(prefix_any*P("J"))/format_J, - ["m"]=(prefix_tab*P("m"))/format_m, - ["M"]=(prefix_tab*P("M"))/format_M, - ["z"]=(prefix_any*P("z"))/format_z, - ["a"]=(prefix_any*P("a"))/format_a, - ["A"]=(prefix_any*P("A"))/format_A, - ["*"]=Cs(((1-P("%"))^1+P("%%")/"%%")^1)/format_rest, - ["?"]=Cs(((1-P("%"))^1 )^1)/format_rest, - ["!"]=Carg(2)*prefix_any*P("!")*C((1-P("!"))^1)*P("!")/format_extension, -} -local direct=Cs ( - P("%")*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*P(-1)/[[local format = string.format return function(str) return format("%0",str) end]] -) -local function make(t,str) - local f - local p - local p=lpegmatch(direct,str) - if p then - f=loadstripped(p)() - else - n=0 - p=lpegmatch(builder,str,1,t._connector_,t._extensions_) - if n>0 then - p=format(template,preamble,t._preamble_,arguments[n],p) - f=loadstripped(p,t._environment_)() - else - f=function() return str end - end - end - t[str]=f - return f -end -local function use(t,fmt,...) - return t[fmt](...) -end -strings.formatters={} -if _LUAVERSION<5.2 then - function strings.formatters.new(noconcat) - local t={ _type_="formatter",_connector_=noconcat and "," or "..",_extensions_={},_preamble_=preamble,_environment_={} } - setmetatable(t,{ __index=make,__call=use }) - return t - end -else - function strings.formatters.new(noconcat) - local e={} - for k,v in next,environment do - e[k]=v - end - local t={ _type_="formatter",_connector_=noconcat and "," or "..",_extensions_={},_preamble_="",_environment_=e } - setmetatable(t,{ __index=make,__call=use }) - return t - end -end -local formatters=strings.formatters.new() -string.formatters=formatters -string.formatter=function(str,...) return formatters[str](...) end -local function add(t,name,template,preamble) - if type(t)=="table" and t._type_=="formatter" then - t._extensions_[name]=template or "%s" - if type(preamble)=="string" then - t._preamble_=preamble.."\n"..t._preamble_ - elseif type(preamble)=="table" then - for k,v in next,preamble do - t._environment_[k]=v - end - end - end -end -strings.formatters.add=add -patterns.xmlescape=Cs((P("<")/"<"+P(">")/">"+P("&")/"&"+P('"')/"""+P(1))^0) -patterns.texescape=Cs((C(S("#$%\\{}"))/"\\%1"+P(1))^0) -patterns.luaescape=Cs(((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0) -patterns.luaquoted=Cs(Cc('"')*((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0*Cc('"')) -if _LUAVERSION<5.2 then - add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],"local xmlescape = lpeg.patterns.xmlescape") - add(formatters,"tex",[[lpegmatch(texescape,%s)]],"local texescape = lpeg.patterns.texescape") - add(formatters,"lua",[[lpegmatch(luaescape,%s)]],"local luaescape = lpeg.patterns.luaescape") -else - add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],{ xmlescape=lpeg.patterns.xmlescape }) - add(formatters,"tex",[[lpegmatch(texescape,%s)]],{ texescape=lpeg.patterns.texescape }) - add(formatters,"lua",[[lpegmatch(luaescape,%s)]],{ luaescape=lpeg.patterns.luaescape }) -end -local dquote=patterns.dquote -local equote=patterns.escaped+dquote/'\\"'+1 -local space=patterns.space -local cquote=Cc('"') -local pattern=Cs(dquote*(equote-P(-2))^0*dquote) -+Cs(cquote*(equote-space)^0*space*equote^0*cquote) -function string.optionalquoted(str) - return lpegmatch(pattern,str) or str -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['luat-basics-gen']={ - version=1.100, - comment="companion to luatex-*.tex", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end -local dummyfunction=function() -end -local dummyreporter=function(c) - return function(...) - (texio.reporter or texio.write_nl)(c.." : "..string.formatters(...)) - end -end -statistics={ - register=dummyfunction, - starttiming=dummyfunction, - stoptiming=dummyfunction, - elapsedtime=nil, -} -directives={ - register=dummyfunction, - enable=dummyfunction, - disable=dummyfunction, -} -trackers={ - register=dummyfunction, - enable=dummyfunction, - disable=dummyfunction, -} -experiments={ - register=dummyfunction, - enable=dummyfunction, - disable=dummyfunction, -} -storage={ - register=dummyfunction, - shared={}, -} -logs={ - new=dummyreporter, - reporter=dummyreporter, - messenger=dummyreporter, - report=dummyfunction, -} -callbacks={ - register=function(n,f) return callback.register(n,f) end, -} -utilities={ - storage={ - allocate=function(t) return t or {} end, - mark=function(t) return t or {} end, - }, -} -characters=characters or { - data={} -} -texconfig.kpse_init=true -resolvers=resolvers or {} -local remapper={ - otf="opentype fonts", - ttf="truetype fonts", - ttc="truetype fonts", - dfont="truetype fonts", - cid="cid maps", - cidmap="cid maps", - fea="font feature files", - pfa="type1 fonts", - pfb="type1 fonts", - afm="afm", -} -function resolvers.findfile(name,fileformat) - name=string.gsub(name,"\\","/") - if not fileformat or fileformat=="" then - fileformat=file.suffix(name) - if fileformat=="" then - fileformat="tex" - end - end - fileformat=string.lower(fileformat) - fileformat=remapper[fileformat] or fileformat - local found=kpse.find_file(name,fileformat) - if not found or found=="" then - found=kpse.find_file(name,"other text files") - end - return found -end -resolvers.findbinfile=resolvers.findfile -function resolvers.loadbinfile(filename,filetype) - local data=io.loaddata(filename) - return true,data,#data -end -function resolvers.resolve(s) - return s -end -function resolvers.unresolve(s) - return s -end -caches={} -local writable=nil -local readables={} -local usingjit=jit -if not caches.namespace or caches.namespace=="" or caches.namespace=="context" then - caches.namespace='generic' -end -do - local cachepaths=kpse.expand_var('$TEXMFCACHE') or "" - if cachepaths=="" or cachepaths=="$TEXMFCACHE" then - cachepaths=kpse.expand_var('$TEXMFVAR') or "" - end - if cachepaths=="" or cachepaths=="$TEXMFVAR" then - cachepaths=kpse.expand_var('$VARTEXMF') or "" - end - if cachepaths=="" then - local fallbacks={ "TMPDIR","TEMPDIR","TMP","TEMP","HOME","HOMEPATH" } - for i=1,#fallbacks do - cachepaths=os.getenv(fallbacks[i]) or "" - if cachepath~="" and lfs.isdir(cachepath) then - break - end - end - end - if cachepaths=="" then - cachepaths="." - end - cachepaths=string.split(cachepaths,os.type=="windows" and ";" or ":") - for i=1,#cachepaths do - local cachepath=cachepaths[i] - if not lfs.isdir(cachepath) then - lfs.mkdirs(cachepath) - if lfs.isdir(cachepath) then - texio.write(string.format("(created cache path: %s)",cachepath)) - end - end - if file.is_writable(cachepath) then - writable=file.join(cachepath,"luatex-cache") - lfs.mkdir(writable) - writable=file.join(writable,caches.namespace) - lfs.mkdir(writable) - break - end - end - for i=1,#cachepaths do - if file.is_readable(cachepaths[i]) then - readables[#readables+1]=file.join(cachepaths[i],"luatex-cache",caches.namespace) - end - end - if not writable then - texio.write_nl("quiting: fix your writable cache path") - os.exit() - elseif #readables==0 then - texio.write_nl("quiting: fix your readable cache path") - os.exit() - elseif #readables==1 and readables[1]==writable then - texio.write(string.format("(using cache: %s)",writable)) - else - texio.write(string.format("(using write cache: %s)",writable)) - texio.write(string.format("(using read cache: %s)",table.concat(readables," "))) - end -end -function caches.getwritablepath(category,subcategory) - local path=file.join(writable,category) - lfs.mkdir(path) - path=file.join(path,subcategory) - lfs.mkdir(path) - return path -end -function caches.getreadablepaths(category,subcategory) - local t={} - for i=1,#readables do - t[i]=file.join(readables[i],category,subcategory) - end - return t -end -local function makefullname(path,name) - if path and path~="" then - return file.addsuffix(file.join(path,name),"lua"),file.addsuffix(file.join(path,name),usingjit and "lub" or "luc") - end -end -function caches.is_writable(path,name) - local fullname=makefullname(path,name) - return fullname and file.is_writable(fullname) -end -function caches.loaddata(paths,name) - for i=1,#paths do - local data=false - local luaname,lucname=makefullname(paths[i],name) - if lucname and not lfs.isfile(lucname) and type(caches.compile)=="function" then - texio.write(string.format("(compiling luc: %s)",lucname)) - data=loadfile(luaname) - if data then - data=data() - end - if data then - caches.compile(data,luaname,lucname) - return data - end - end - if lucname and lfs.isfile(lucname) then - texio.write(string.format("(load luc: %s)",lucname)) - data=loadfile(lucname) - if data then - data=data() - end - if data then - return data - else - texio.write(string.format("(loading failed: %s)",lucname)) - end - end - if luaname and lfs.isfile(luaname) then - texio.write(string.format("(load lua: %s)",luaname)) - data=loadfile(luaname) - if data then - data=data() - end - if data then - return data - end - end - end -end -function caches.savedata(path,name,data) - local luaname,lucname=makefullname(path,name) - if luaname then - texio.write(string.format("(save: %s)",luaname)) - table.tofile(luaname,data,true) - if lucname and type(caches.compile)=="function" then - os.remove(lucname) - texio.write(string.format("(save: %s)",lucname)) - caches.compile(data,luaname,lucname) - end - end -end -function caches.compile(data,luaname,lucname) - local d=io.loaddata(luaname) - if not d or d=="" then - d=table.serialize(data,true) - end - if d and d~="" then - local f=io.open(lucname,'wb') - if f then - local s=loadstring(d) - if s then - f:write(string.dump(s,true)) - end - f:close() - end - end -end -function table.setmetatableindex(t,f) - if type(t)~="table" then - f=f or t - t={} - end - setmetatable(t,{ __index=f }) - return t -end -arguments={} -if arg then - for i=1,#arg do - local k,v=string.match(arg[i],"^%-%-([^=]+)=?(.-)$") - if k and v then - arguments[k]=v - end - end -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['data-con']={ - version=1.100, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local format,lower,gsub=string.format,string.lower,string.gsub -local trace_cache=false trackers.register("resolvers.cache",function(v) trace_cache=v end) -local trace_containers=false trackers.register("resolvers.containers",function(v) trace_containers=v end) -local trace_storage=false trackers.register("resolvers.storage",function(v) trace_storage=v end) -containers=containers or {} -local containers=containers -containers.usecache=true -local report_containers=logs.reporter("resolvers","containers") -local allocated={} -local mt={ - __index=function(t,k) - if k=="writable" then - local writable=caches.getwritablepath(t.category,t.subcategory) or { "." } - t.writable=writable - return writable - elseif k=="readables" then - local readables=caches.getreadablepaths(t.category,t.subcategory) or { "." } - t.readables=readables - return readables - end - end, - __storage__=true -} -function containers.define(category,subcategory,version,enabled) - if category and subcategory then - local c=allocated[category] - if not c then - c={} - allocated[category]=c - end - local s=c[subcategory] - if not s then - s={ - category=category, - subcategory=subcategory, - storage={}, - enabled=enabled, - version=version or math.pi, - trace=false, - } - setmetatable(s,mt) - c[subcategory]=s - end - return s - end -end -function containers.is_usable(container,name) - return container.enabled and caches and caches.is_writable(container.writable,name) -end -function containers.is_valid(container,name) - if name and name~="" then - local storage=container.storage[name] - return storage and storage.cache_version==container.version - else - return false - end -end -function containers.read(container,name) - local storage=container.storage - local stored=storage[name] - if not stored and container.enabled and caches and containers.usecache then - stored=caches.loaddata(container.readables,name) - if stored and stored.cache_version==container.version then - if trace_cache or trace_containers then - report_containers("action %a, category %a, name %a","load",container.subcategory,name) - end - else - stored=nil - end - storage[name]=stored - elseif stored then - if trace_cache or trace_containers then - report_containers("action %a, category %a, name %a","reuse",container.subcategory,name) - end - end - return stored -end -function containers.write(container,name,data) - if data then - data.cache_version=container.version - if container.enabled and caches then - local unique,shared=data.unique,data.shared - data.unique,data.shared=nil,nil - caches.savedata(container.writable,name,data) - if trace_cache or trace_containers then - report_containers("action %a, category %a, name %a","save",container.subcategory,name) - end - data.unique,data.shared=unique,shared - end - if trace_cache or trace_containers then - report_containers("action %a, category %a, name %a","store",container.subcategory,name) - end - container.storage[name]=data - end - return data -end -function containers.content(container,name) - return container.storage[name] -end -function containers.cleanname(name) - return (gsub(lower(name),"[^%w\128-\255]+","-")) -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['luatex-fonts-nod']={ - version=1.001, - comment="companion to luatex-fonts.lua", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end -if tex.attribute[0]~=0 then - texio.write_nl("log","!") - texio.write_nl("log","! Attribute 0 is reserved for ConTeXt's font feature management and has to be") - texio.write_nl("log","! set to zero. Also, some attributes in the range 1-255 are used for special") - texio.write_nl("log","! purposes so setting them at the TeX end might break the font handler.") - texio.write_nl("log","!") - tex.attribute[0]=0 -end -attributes=attributes or {} -attributes.unsetvalue=-0x7FFFFFFF -local numbers,last={},127 -attributes.private=attributes.private or function(name) - local number=numbers[name] - if not number then - if last<255 then - last=last+1 - end - number=last - numbers[name]=number - end - return number -end -nodes={} -nodes.pool={} -nodes.handlers={} -local nodecodes={} for k,v in next,node.types () do nodecodes[string.gsub(v,"_","")]=k end -local whatcodes={} for k,v in next,node.whatsits() do whatcodes[string.gsub(v,"_","")]=k end -local glyphcodes={ [0]="character","glyph","ligature","ghost","left","right" } -local disccodes={ [0]="discretionary","explicit","automatic","regular","first","second" } -nodes.nodecodes=nodecodes -nodes.whatcodes=whatcodes -nodes.whatsitcodes=whatcodes -nodes.glyphcodes=glyphcodes -nodes.disccodes=disccodes -local free_node=node.free -local remove_node=node.remove -local new_node=node.new -local traverse_id=node.traverse_id -nodes.handlers.protectglyphs=node.protect_glyphs -nodes.handlers.unprotectglyphs=node.unprotect_glyphs -local math_code=nodecodes.math -local end_of_math=node.end_of_math -function node.end_of_math(n) - if n.id==math_code and n.subtype==1 then - return n - else - return end_of_math(n) - end -end -function nodes.remove(head,current,free_too) - local t=current - head,current=remove_node(head,current) - if t then - if free_too then - free_node(t) - t=nil - else - t.next,t.prev=nil,nil - end - end - return head,current,t -end -function nodes.delete(head,current) - return nodes.remove(head,current,true) -end -function nodes.pool.kern(k) - local n=new_node("kern",1) - n.kern=k - return n -end -local getfield=node.getfield -local setfield=node.setfield -nodes.getfield=getfield -nodes.setfield=setfield -nodes.getattr=getfield -nodes.setattr=setfield -nodes.tostring=node.tostring or tostring -nodes.copy=node.copy -nodes.copy_list=node.copy_list -nodes.delete=node.delete -nodes.dimensions=node.dimensions -nodes.end_of_math=node.end_of_math -nodes.flush_list=node.flush_list -nodes.flush_node=node.flush_node -nodes.free=node.free -nodes.insert_after=node.insert_after -nodes.insert_before=node.insert_before -nodes.hpack=node.hpack -nodes.new=node.new -nodes.tail=node.tail -nodes.traverse=node.traverse -nodes.traverse_id=node.traverse_id -nodes.slide=node.slide -nodes.vpack=node.vpack -nodes.first_glyph=node.first_glyph -nodes.first_character=node.first_character -nodes.has_glyph=node.has_glyph or node.first_glyph -nodes.current_attr=node.current_attr -nodes.do_ligature_n=node.do_ligature_n -nodes.has_field=node.has_field -nodes.last_node=node.last_node -nodes.usedlist=node.usedlist -nodes.protrusion_skippable=node.protrusion_skippable -nodes.write=node.write -nodes.has_attribute=node.has_attribute -nodes.set_attribute=node.set_attribute -nodes.unset_attribute=node.unset_attribute -nodes.protect_glyphs=node.protect_glyphs -nodes.unprotect_glyphs=node.unprotect_glyphs -nodes.kerning=node.kerning -nodes.ligaturing=node.ligaturing -nodes.mlist_to_hlist=node.mlist_to_hlist -local direct=node.direct -local nuts={} -nodes.nuts=nuts -local tonode=direct.tonode -local tonut=direct.todirect -nodes.tonode=tonode -nodes.tonut=tonut -nuts.tonode=tonode -nuts.tonut=tonut -local getfield=direct.getfield -local setfield=direct.setfield -nuts.getfield=getfield -nuts.setfield=setfield -nuts.getnext=direct.getnext -nuts.getprev=direct.getprev -nuts.getid=direct.getid -nuts.getattr=getfield -nuts.setattr=setfield -nuts.getfont=direct.getfont -nuts.getsubtype=direct.getsubtype -nuts.getchar=direct.getchar -nuts.insert_before=direct.insert_before -nuts.insert_after=direct.insert_after -nuts.delete=direct.delete -nuts.copy=direct.copy -nuts.tail=direct.tail -nuts.flush_list=direct.flush_list -nuts.end_of_math=direct.end_of_math -nuts.traverse=direct.traverse -nuts.traverse_id=direct.traverse_id -nuts.getprop=nuts.getattr -nuts.setprop=nuts.setattr -local new_nut=direct.new -nuts.new=new_nut -nuts.pool={} -function nuts.pool.kern(k) - local n=new_nut("kern",1) - setfield(n,"kern",k) - return n -end -local propertydata=direct.get_properties_table() -nodes.properties={ data=propertydata } -direct.set_properties_mode(true,true) -function direct.set_properties_mode() end -nuts.getprop=function(n,k) - local p=propertydata[n] - if p then - return p[k] - end -end -nuts.setprop=function(n,k,v) - if v then - local p=propertydata[n] - if p then - p[k]=v - else - propertydata[n]={ [k]=v } - end - end -end -nodes.setprop=nodes.setproperty -nodes.getprop=nodes.getproperty - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-ini']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local allocate=utilities.storage.allocate -local report_defining=logs.reporter("fonts","defining") -fonts=fonts or {} -local fonts=fonts -fonts.hashes={ identifiers=allocate() } -fonts.tables=fonts.tables or {} -fonts.helpers=fonts.helpers or {} -fonts.tracers=fonts.tracers or {} -fonts.specifiers=fonts.specifiers or {} -fonts.analyzers={} -fonts.readers={} -fonts.definers={ methods={} } -fonts.loggers={ register=function() end } -fontloader.totable=fontloader.to_table - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-con']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local next,tostring,rawget=next,tostring,rawget -local format,match,lower,gsub=string.format,string.match,string.lower,string.gsub -local utfbyte=utf.byte -local sort,insert,concat,sortedkeys,serialize,fastcopy=table.sort,table.insert,table.concat,table.sortedkeys,table.serialize,table.fastcopy -local derivetable=table.derive -local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end) -local trace_scaling=false trackers.register("fonts.scaling",function(v) trace_scaling=v end) -local report_defining=logs.reporter("fonts","defining") -local fonts=fonts -local constructors=fonts.constructors or {} -fonts.constructors=constructors -local handlers=fonts.handlers or {} -fonts.handlers=handlers -local allocate=utilities.storage.allocate -local setmetatableindex=table.setmetatableindex -constructors.dontembed=allocate() -constructors.autocleanup=true -constructors.namemode="fullpath" -constructors.version=1.01 -constructors.cache=containers.define("fonts","constructors",constructors.version,false) -constructors.privateoffset=0xF0000 -constructors.cacheintex=true -constructors.keys={ - properties={ - encodingbytes="number", - embedding="number", - cidinfo={}, - format="string", - fontname="string", - fullname="string", - filename="filename", - psname="string", - name="string", - virtualized="boolean", - hasitalics="boolean", - autoitalicamount="basepoints", - nostackmath="boolean", - noglyphnames="boolean", - mode="string", - hasmath="boolean", - mathitalics="boolean", - textitalics="boolean", - finalized="boolean", - }, - parameters={ - mathsize="number", - scriptpercentage="float", - scriptscriptpercentage="float", - units="cardinal", - designsize="scaledpoints", - expansion={ - stretch="integerscale", - shrink="integerscale", - step="integerscale", - auto="boolean", - }, - protrusion={ - auto="boolean", - }, - slantfactor="float", - extendfactor="float", - factor="float", - hfactor="float", - vfactor="float", - size="scaledpoints", - units="scaledpoints", - scaledpoints="scaledpoints", - slantperpoint="scaledpoints", - spacing={ - width="scaledpoints", - stretch="scaledpoints", - shrink="scaledpoints", - extra="scaledpoints", - }, - xheight="scaledpoints", - quad="scaledpoints", - ascender="scaledpoints", - descender="scaledpoints", - synonyms={ - space="spacing.width", - spacestretch="spacing.stretch", - spaceshrink="spacing.shrink", - extraspace="spacing.extra", - x_height="xheight", - space_stretch="spacing.stretch", - space_shrink="spacing.shrink", - extra_space="spacing.extra", - em="quad", - ex="xheight", - slant="slantperpoint", - }, - }, - description={ - width="basepoints", - height="basepoints", - depth="basepoints", - boundingbox={}, - }, - character={ - width="scaledpoints", - height="scaledpoints", - depth="scaledpoints", - italic="scaledpoints", - }, -} -local designsizes=allocate() -constructors.designsizes=designsizes -local loadedfonts=allocate() -constructors.loadedfonts=loadedfonts -local factors={ - pt=65536.0, - bp=65781.8, -} -function constructors.setfactor(f) - constructors.factor=factors[f or 'pt'] or factors.pt -end -constructors.setfactor() -function constructors.scaled(scaledpoints,designsize) - if scaledpoints<0 then - if designsize then - local factor=constructors.factor - if designsize>factor then - return (- scaledpoints/1000)*designsize - else - return (- scaledpoints/1000)*designsize*factor - end - else - return (- scaledpoints/1000)*10*factor - end - else - return scaledpoints - end -end -function constructors.cleanuptable(tfmdata) - if constructors.autocleanup and tfmdata.properties.virtualized then - for k,v in next,tfmdata.characters do - if v.commands then v.commands=nil end - end - end -end -function constructors.calculatescale(tfmdata,scaledpoints) - local parameters=tfmdata.parameters - if scaledpoints<0 then - scaledpoints=(- scaledpoints/1000)*(tfmdata.designsize or parameters.designsize) - end - return scaledpoints,scaledpoints/(parameters.units or 1000) -end -local unscaled={ - ScriptPercentScaleDown=true, - ScriptScriptPercentScaleDown=true, - RadicalDegreeBottomRaisePercent=true -} -function constructors.assignmathparameters(target,original) - local mathparameters=original.mathparameters - if mathparameters and next(mathparameters) then - local targetparameters=target.parameters - local targetproperties=target.properties - local targetmathparameters={} - local factor=targetproperties.math_is_scaled and 1 or targetparameters.factor - for name,value in next,mathparameters do - if unscaled[name] then - targetmathparameters[name]=value - else - targetmathparameters[name]=value*factor - end - end - if not targetmathparameters.FractionDelimiterSize then - targetmathparameters.FractionDelimiterSize=1.01*targetparameters.size - end - if not mathparameters.FractionDelimiterDisplayStyleSize then - targetmathparameters.FractionDelimiterDisplayStyleSize=2.40*targetparameters.size - end - target.mathparameters=targetmathparameters - end -end -function constructors.beforecopyingcharacters(target,original) -end -function constructors.aftercopyingcharacters(target,original) -end -constructors.sharefonts=false -constructors.nofsharedfonts=0 -local sharednames={} -function constructors.trytosharefont(target,tfmdata) - if constructors.sharefonts then - local characters=target.characters - local n=1 - local t={ target.psname } - local u=sortedkeys(characters) - for i=1,#u do - local k=u[i] - n=n+1;t[n]=k - n=n+1;t[n]=characters[k].index or k - end - local h=md5.HEX(concat(t," ")) - local s=sharednames[h] - if s then - if trace_defining then - report_defining("font %a uses backend resources of font %a",target.fullname,s) - end - target.fullname=s - constructors.nofsharedfonts=constructors.nofsharedfonts+1 - target.properties.sharedwith=s - else - sharednames[h]=target.fullname - end - end -end -function constructors.enhanceparameters(parameters) - local xheight=parameters.x_height - local quad=parameters.quad - local space=parameters.space - local stretch=parameters.space_stretch - local shrink=parameters.space_shrink - local extra=parameters.extra_space - local slant=parameters.slant - parameters.xheight=xheight - parameters.spacestretch=stretch - parameters.spaceshrink=shrink - parameters.extraspace=extra - parameters.em=quad - parameters.ex=xheight - parameters.slantperpoint=slant - parameters.spacing={ - width=space, - stretch=stretch, - shrink=shrink, - extra=extra, - } -end -function constructors.scale(tfmdata,specification) - local target={} - if tonumber(specification) then - specification={ size=specification } - end - target.specification=specification - local scaledpoints=specification.size - local relativeid=specification.relativeid - local properties=tfmdata.properties or {} - local goodies=tfmdata.goodies or {} - local resources=tfmdata.resources or {} - local descriptions=tfmdata.descriptions or {} - local characters=tfmdata.characters or {} - local changed=tfmdata.changed or {} - local shared=tfmdata.shared or {} - local parameters=tfmdata.parameters or {} - local mathparameters=tfmdata.mathparameters or {} - local targetcharacters={} - local targetdescriptions=derivetable(descriptions) - local targetparameters=derivetable(parameters) - local targetproperties=derivetable(properties) - local targetgoodies=goodies - target.characters=targetcharacters - target.descriptions=targetdescriptions - target.parameters=targetparameters - target.properties=targetproperties - target.goodies=targetgoodies - target.shared=shared - target.resources=resources - target.unscaled=tfmdata - local mathsize=tonumber(specification.mathsize) or 0 - local textsize=tonumber(specification.textsize) or scaledpoints - local forcedsize=tonumber(parameters.mathsize ) or 0 - local extrafactor=tonumber(specification.factor ) or 1 - if (mathsize==2 or forcedsize==2) and parameters.scriptpercentage then - scaledpoints=parameters.scriptpercentage*textsize/100 - elseif (mathsize==3 or forcedsize==3) and parameters.scriptscriptpercentage then - scaledpoints=parameters.scriptscriptpercentage*textsize/100 - elseif forcedsize>1000 then - scaledpoints=forcedsize - end - targetparameters.mathsize=mathsize - targetparameters.textsize=textsize - targetparameters.forcedsize=forcedsize - targetparameters.extrafactor=extrafactor - local tounicode=fonts.mappings.tounicode - local defaultwidth=resources.defaultwidth or 0 - local defaultheight=resources.defaultheight or 0 - local defaultdepth=resources.defaultdepth or 0 - local units=parameters.units or 1000 - if target.fonts then - target.fonts=fastcopy(target.fonts) - end - targetproperties.language=properties.language or "dflt" - targetproperties.script=properties.script or "dflt" - targetproperties.mode=properties.mode or "base" - local askedscaledpoints=scaledpoints - local scaledpoints,delta=constructors.calculatescale(tfmdata,scaledpoints,nil,specification) - local hdelta=delta - local vdelta=delta - target.designsize=parameters.designsize - target.units_per_em=units - local direction=properties.direction or tfmdata.direction or 0 - target.direction=direction - properties.direction=direction - target.size=scaledpoints - target.encodingbytes=properties.encodingbytes or 1 - target.embedding=properties.embedding or "subset" - target.tounicode=1 - target.cidinfo=properties.cidinfo - target.format=properties.format - target.cache=constructors.cacheintex and "yes" or "renew" - local fontname=properties.fontname or tfmdata.fontname - local fullname=properties.fullname or tfmdata.fullname - local filename=properties.filename or tfmdata.filename - local psname=properties.psname or tfmdata.psname - local name=properties.name or tfmdata.name - if not psname or psname=="" then - psname=fontname or (fullname and fonts.names.cleanname(fullname)) - end - target.fontname=fontname - target.fullname=fullname - target.filename=filename - target.psname=psname - target.name=name - properties.fontname=fontname - properties.fullname=fullname - properties.filename=filename - properties.psname=psname - properties.name=name - local expansion=parameters.expansion - if expansion then - target.stretch=expansion.stretch - target.shrink=expansion.shrink - target.step=expansion.step - target.auto_expand=expansion.auto - end - local protrusion=parameters.protrusion - if protrusion then - target.auto_protrude=protrusion.auto - end - local extendfactor=parameters.extendfactor or 0 - if extendfactor~=0 and extendfactor~=1 then - hdelta=hdelta*extendfactor - target.extend=extendfactor*1000 - else - target.extend=1000 - end - local slantfactor=parameters.slantfactor or 0 - if slantfactor~=0 then - target.slant=slantfactor*1000 - else - target.slant=0 - end - targetparameters.factor=delta - targetparameters.hfactor=hdelta - targetparameters.vfactor=vdelta - targetparameters.size=scaledpoints - targetparameters.units=units - targetparameters.scaledpoints=askedscaledpoints - local isvirtual=properties.virtualized or tfmdata.type=="virtual" - local hasquality=target.auto_expand or target.auto_protrude - local hasitalics=properties.hasitalics - local autoitalicamount=properties.autoitalicamount - local stackmath=not properties.nostackmath - local nonames=properties.noglyphnames - local haskerns=properties.haskerns or properties.mode=="base" - local hasligatures=properties.hasligatures or properties.mode=="base" - if changed and not next(changed) then - changed=false - end - target.type=isvirtual and "virtual" or "real" - target.postprocessors=tfmdata.postprocessors - local targetslant=(parameters.slant or parameters[1] or 0)*factors.pt - local targetspace=(parameters.space or parameters[2] or 0)*hdelta - local targetspace_stretch=(parameters.space_stretch or parameters[3] or 0)*hdelta - local targetspace_shrink=(parameters.space_shrink or parameters[4] or 0)*hdelta - local targetx_height=(parameters.x_height or parameters[5] or 0)*vdelta - local targetquad=(parameters.quad or parameters[6] or 0)*hdelta - local targetextra_space=(parameters.extra_space or parameters[7] or 0)*hdelta - targetparameters.slant=targetslant - targetparameters.space=targetspace - targetparameters.space_stretch=targetspace_stretch - targetparameters.space_shrink=targetspace_shrink - targetparameters.x_height=targetx_height - targetparameters.quad=targetquad - targetparameters.extra_space=targetextra_space - local ascender=parameters.ascender - if ascender then - targetparameters.ascender=delta*ascender - end - local descender=parameters.descender - if descender then - targetparameters.descender=delta*descender - end - constructors.enhanceparameters(targetparameters) - local protrusionfactor=(targetquad~=0 and 1000/targetquad) or 0 - local scaledwidth=defaultwidth*hdelta - local scaledheight=defaultheight*vdelta - local scaleddepth=defaultdepth*vdelta - local hasmath=(properties.hasmath or next(mathparameters)) and true - if hasmath then - constructors.assignmathparameters(target,tfmdata) - properties.hasmath=true - target.nomath=false - target.MathConstants=target.mathparameters - else - properties.hasmath=false - target.nomath=true - target.mathparameters=nil - end - local italickey="italic" - local useitalics=true - if hasmath then - autoitalicamount=false - elseif properties.textitalics then - italickey="italic_correction" - useitalics=false - if properties.delaytextitalics then - autoitalicamount=false - end - end - if trace_defining then - report_defining("defining tfm, name %a, fullname %a, filename %a, hscale %a, vscale %a, math %a, italics %a", - name,fullname,filename,hdelta,vdelta, - hasmath and "enabled" or "disabled",useitalics and "enabled" or "disabled") - end - constructors.beforecopyingcharacters(target,tfmdata) - local sharedkerns={} - for unicode,character in next,characters do - local chr,description,index - if changed then - local c=changed[unicode] - if c then - description=descriptions[c] or descriptions[unicode] or character - character=characters[c] or character - index=description.index or c - else - description=descriptions[unicode] or character - index=description.index or unicode - end - else - description=descriptions[unicode] or character - index=description.index or unicode - end - local width=description.width - local height=description.height - local depth=description.depth - if width then width=hdelta*width else width=scaledwidth end - if height then height=vdelta*height else height=scaledheight end - if depth and depth~=0 then - depth=delta*depth - if nonames then - chr={ - index=index, - height=height, - depth=depth, - width=width, - } - else - chr={ - name=description.name, - index=index, - height=height, - depth=depth, - width=width, - } - end - else - if nonames then - chr={ - index=index, - height=height, - width=width, - } - else - chr={ - name=description.name, - index=index, - height=height, - width=width, - } - end - end - local isunicode=description.unicode - if isunicode then - chr.unicode=isunicode - chr.tounicode=tounicode(isunicode) - end - if hasquality then - local ve=character.expansion_factor - if ve then - chr.expansion_factor=ve*1000 - end - local vl=character.left_protruding - if vl then - chr.left_protruding=protrusionfactor*width*vl - end - local vr=character.right_protruding - if vr then - chr.right_protruding=protrusionfactor*width*vr - end - end - if autoitalicamount then - local vi=description.italic - if not vi then - local vi=description.boundingbox[3]-description.width+autoitalicamount - if vi>0 then - chr[italickey]=vi*hdelta - end - elseif vi~=0 then - chr[italickey]=vi*hdelta - end - elseif hasitalics then - local vi=description.italic - if vi and vi~=0 then - chr[italickey]=vi*hdelta - end - end - if hasmath then - local vn=character.next - if vn then - chr.next=vn - else - local vv=character.vert_variants - if vv then - local t={} - for i=1,#vv do - local vvi=vv[i] - t[i]={ - ["start"]=(vvi["start"] or 0)*vdelta, - ["end"]=(vvi["end"] or 0)*vdelta, - ["advance"]=(vvi["advance"] or 0)*vdelta, - ["extender"]=vvi["extender"], - ["glyph"]=vvi["glyph"], - } - end - chr.vert_variants=t - else - local hv=character.horiz_variants - if hv then - local t={} - for i=1,#hv do - local hvi=hv[i] - t[i]={ - ["start"]=(hvi["start"] or 0)*hdelta, - ["end"]=(hvi["end"] or 0)*hdelta, - ["advance"]=(hvi["advance"] or 0)*hdelta, - ["extender"]=hvi["extender"], - ["glyph"]=hvi["glyph"], - } - end - chr.horiz_variants=t - end - end - end - local va=character.top_accent - if va then - chr.top_accent=vdelta*va - end - if stackmath then - local mk=character.mathkerns - if mk then - local kerns={} - local v=mk.top_right if v then local k={} for i=1,#v do local vi=v[i] - k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern } - end kerns.top_right=k end - local v=mk.top_left if v then local k={} for i=1,#v do local vi=v[i] - k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern } - end kerns.top_left=k end - local v=mk.bottom_left if v then local k={} for i=1,#v do local vi=v[i] - k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern } - end kerns.bottom_left=k end - local v=mk.bottom_right if v then local k={} for i=1,#v do local vi=v[i] - k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern } - end kerns.bottom_right=k end - chr.mathkern=kerns - end - end - end - if haskerns then - local vk=character.kerns - if vk then - local s=sharedkerns[vk] - if not s then - s={} - for k,v in next,vk do s[k]=v*hdelta end - sharedkerns[vk]=s - end - chr.kerns=s - end - end - if hasligatures then - local vl=character.ligatures - if vl then - if true then - chr.ligatures=vl - else - local tt={} - for i,l in next,vl do - tt[i]=l - end - chr.ligatures=tt - end - end - end - if isvirtual then - local vc=character.commands - if vc then - local ok=false - for i=1,#vc do - local key=vc[i][1] - if key=="right" or key=="down" then - ok=true - break - end - end - if ok then - local tt={} - for i=1,#vc do - local ivc=vc[i] - local key=ivc[1] - if key=="right" then - tt[i]={ key,ivc[2]*hdelta } - elseif key=="down" then - tt[i]={ key,ivc[2]*vdelta } - elseif key=="rule" then - tt[i]={ key,ivc[2]*vdelta,ivc[3]*hdelta } - else - tt[i]=ivc - end - end - chr.commands=tt - else - chr.commands=vc - end - chr.index=nil - end - end - targetcharacters[unicode]=chr - end - constructors.aftercopyingcharacters(target,tfmdata) - constructors.trytosharefont(target,tfmdata) - return target -end -function constructors.finalize(tfmdata) - if tfmdata.properties and tfmdata.properties.finalized then - return - end - if not tfmdata.characters then - return nil - end - if not tfmdata.goodies then - tfmdata.goodies={} - end - local parameters=tfmdata.parameters - if not parameters then - return nil - end - if not parameters.expansion then - parameters.expansion={ - stretch=tfmdata.stretch or 0, - shrink=tfmdata.shrink or 0, - step=tfmdata.step or 0, - auto=tfmdata.auto_expand or false, - } - end - if not parameters.protrusion then - parameters.protrusion={ - auto=auto_protrude - } - end - if not parameters.size then - parameters.size=tfmdata.size - end - if not parameters.extendfactor then - parameters.extendfactor=tfmdata.extend or 0 - end - if not parameters.slantfactor then - parameters.slantfactor=tfmdata.slant or 0 - end - if not parameters.designsize then - parameters.designsize=tfmdata.designsize or (factors.pt*10) - end - if not parameters.units then - parameters.units=tfmdata.units_per_em or 1000 - end - if not tfmdata.descriptions then - local descriptions={} - setmetatableindex(descriptions,function(t,k) local v={} t[k]=v return v end) - tfmdata.descriptions=descriptions - end - local properties=tfmdata.properties - if not properties then - properties={} - tfmdata.properties=properties - end - if not properties.virtualized then - properties.virtualized=tfmdata.type=="virtual" - end - if not tfmdata.properties then - tfmdata.properties={ - fontname=tfmdata.fontname, - filename=tfmdata.filename, - fullname=tfmdata.fullname, - name=tfmdata.name, - psname=tfmdata.psname, - encodingbytes=tfmdata.encodingbytes or 1, - embedding=tfmdata.embedding or "subset", - tounicode=tfmdata.tounicode or 1, - cidinfo=tfmdata.cidinfo or nil, - format=tfmdata.format or "type1", - direction=tfmdata.direction or 0, - } - end - if not tfmdata.resources then - tfmdata.resources={} - end - if not tfmdata.shared then - tfmdata.shared={} - end - if not properties.hasmath then - properties.hasmath=not tfmdata.nomath - end - tfmdata.MathConstants=nil - tfmdata.postprocessors=nil - tfmdata.fontname=nil - tfmdata.filename=nil - tfmdata.fullname=nil - tfmdata.name=nil - tfmdata.psname=nil - tfmdata.encodingbytes=nil - tfmdata.embedding=nil - tfmdata.tounicode=nil - tfmdata.cidinfo=nil - tfmdata.format=nil - tfmdata.direction=nil - tfmdata.type=nil - tfmdata.nomath=nil - tfmdata.designsize=nil - tfmdata.size=nil - tfmdata.stretch=nil - tfmdata.shrink=nil - tfmdata.step=nil - tfmdata.auto_expand=nil - tfmdata.auto_protrude=nil - tfmdata.extend=nil - tfmdata.slant=nil - tfmdata.units_per_em=nil - tfmdata.cache=nil - properties.finalized=true - return tfmdata -end -local hashmethods={} -constructors.hashmethods=hashmethods -function constructors.hashfeatures(specification) - local features=specification.features - if features then - local t,tn={},0 - for category,list in next,features do - if next(list) then - local hasher=hashmethods[category] - if hasher then - local hash=hasher(list) - if hash then - tn=tn+1 - t[tn]=category..":"..hash - end - end - end - end - if tn>0 then - return concat(t," & ") - end - end - return "unknown" -end -hashmethods.normal=function(list) - local s={} - local n=0 - for k,v in next,list do - if not k then - elseif k=="number" or k=="features" then - else - n=n+1 - s[n]=k - end - end - if n>0 then - sort(s) - for i=1,n do - local k=s[i] - s[i]=k..'='..tostring(list[k]) - end - return concat(s,"+") - end -end -function constructors.hashinstance(specification,force) - local hash,size,fallbacks=specification.hash,specification.size,specification.fallbacks - if force or not hash then - hash=constructors.hashfeatures(specification) - specification.hash=hash - end - if size<1000 and designsizes[hash] then - size=math.round(constructors.scaled(size,designsizes[hash])) - specification.size=size - end - if fallbacks then - return hash..' @ '..tostring(size)..' @ '..fallbacks - else - return hash..' @ '..tostring(size) - end -end -function constructors.setname(tfmdata,specification) - if constructors.namemode=="specification" then - local specname=specification.specification - if specname then - tfmdata.properties.name=specname - if trace_defining then - report_otf("overloaded fontname %a",specname) - end - end - end -end -function constructors.checkedfilename(data) - local foundfilename=data.foundfilename - if not foundfilename then - local askedfilename=data.filename or "" - if askedfilename~="" then - askedfilename=resolvers.resolve(askedfilename) - foundfilename=resolvers.findbinfile(askedfilename,"") or "" - if foundfilename=="" then - report_defining("source file %a is not found",askedfilename) - foundfilename=resolvers.findbinfile(file.basename(askedfilename),"") or "" - if foundfilename~="" then - report_defining("using source file %a due to cache mismatch",foundfilename) - end - end - end - data.foundfilename=foundfilename - end - return foundfilename -end -local formats=allocate() -fonts.formats=formats -setmetatableindex(formats,function(t,k) - local l=lower(k) - if rawget(t,k) then - t[k]=l - return l - end - return rawget(t,file.suffix(l)) -end) -local locations={} -local function setindeed(mode,target,group,name,action,position) - local t=target[mode] - if not t then - report_defining("fatal error in setting feature %a, group %a, mode %a",name,group,mode) - os.exit() - elseif position then - insert(t,position,{ name=name,action=action }) - else - for i=1,#t do - local ti=t[i] - if ti.name==name then - ti.action=action - return - end - end - insert(t,{ name=name,action=action }) - end -end -local function set(group,name,target,source) - target=target[group] - if not target then - report_defining("fatal target error in setting feature %a, group %a",name,group) - os.exit() - end - local source=source[group] - if not source then - report_defining("fatal source error in setting feature %a, group %a",name,group) - os.exit() - end - local node=source.node - local base=source.base - local position=source.position - if node then - setindeed("node",target,group,name,node,position) - end - if base then - setindeed("base",target,group,name,base,position) - end -end -local function register(where,specification) - local name=specification.name - if name and name~="" then - local default=specification.default - local description=specification.description - local initializers=specification.initializers - local processors=specification.processors - local manipulators=specification.manipulators - local modechecker=specification.modechecker - if default then - where.defaults[name]=default - end - if description and description~="" then - where.descriptions[name]=description - end - if initializers then - set('initializers',name,where,specification) - end - if processors then - set('processors',name,where,specification) - end - if manipulators then - set('manipulators',name,where,specification) - end - if modechecker then - where.modechecker=modechecker - end - end -end -constructors.registerfeature=register -function constructors.getfeatureaction(what,where,mode,name) - what=handlers[what].features - if what then - where=what[where] - if where then - mode=where[mode] - if mode then - for i=1,#mode do - local m=mode[i] - if m.name==name then - return m.action - end - end - end - end - end -end -function constructors.newhandler(what) - local handler=handlers[what] - if not handler then - handler={} - handlers[what]=handler - end - return handler -end -function constructors.newfeatures(what) - local handler=handlers[what] - local features=handler.features - if not features then - local tables=handler.tables - local statistics=handler.statistics - features=allocate { - defaults={}, - descriptions=tables and tables.features or {}, - used=statistics and statistics.usedfeatures or {}, - initializers={ base={},node={} }, - processors={ base={},node={} }, - manipulators={ base={},node={} }, - } - features.register=function(specification) return register(features,specification) end - handler.features=features - end - return features -end -function constructors.checkedfeatures(what,features) - local defaults=handlers[what].features.defaults - if features and next(features) then - features=fastcopy(features) - for key,value in next,defaults do - if features[key]==nil then - features[key]=value - end - end - return features - else - return fastcopy(defaults) - end -end -function constructors.initializefeatures(what,tfmdata,features,trace,report) - if features and next(features) then - local properties=tfmdata.properties or {} - local whathandler=handlers[what] - local whatfeatures=whathandler.features - local whatinitializers=whatfeatures.initializers - local whatmodechecker=whatfeatures.modechecker - local mode=properties.mode or (whatmodechecker and whatmodechecker(tfmdata,features,features.mode)) or features.mode or "base" - properties.mode=mode - features.mode=mode - local done={} - while true do - local redo=false - local initializers=whatfeatures.initializers[mode] - if initializers then - for i=1,#initializers do - local step=initializers[i] - local feature=step.name - local value=features[feature] - if not value then - elseif done[feature] then - else - local action=step.action - if trace then - report("initializing feature %a to %a for mode %a for font %a",feature, - value,mode,tfmdata.properties.fullname) - end - action(tfmdata,value,features) - if mode~=properties.mode or mode~=features.mode then - if whatmodechecker then - properties.mode=whatmodechecker(tfmdata,features,properties.mode) - features.mode=properties.mode - end - if mode~=properties.mode then - mode=properties.mode - redo=true - end - end - done[feature]=true - end - if redo then - break - end - end - if not redo then - break - end - else - break - end - end - properties.mode=mode - return true - else - return false - end -end -function constructors.collectprocessors(what,tfmdata,features,trace,report) - local processes,nofprocesses={},0 - if features and next(features) then - local properties=tfmdata.properties - local whathandler=handlers[what] - local whatfeatures=whathandler.features - local whatprocessors=whatfeatures.processors - local mode=properties.mode - local processors=whatprocessors[mode] - if processors then - for i=1,#processors do - local step=processors[i] - local feature=step.name - if features[feature] then - local action=step.action - if trace then - report("installing feature processor %a for mode %a for font %a",feature,mode,tfmdata.properties.fullname) - end - if action then - nofprocesses=nofprocesses+1 - processes[nofprocesses]=action - end - end - end - elseif trace then - report("no feature processors for mode %a for font %a",mode,properties.fullname) - end - end - return processes -end -function constructors.applymanipulators(what,tfmdata,features,trace,report) - if features and next(features) then - local properties=tfmdata.properties - local whathandler=handlers[what] - local whatfeatures=whathandler.features - local whatmanipulators=whatfeatures.manipulators - local mode=properties.mode - local manipulators=whatmanipulators[mode] - if manipulators then - for i=1,#manipulators do - local step=manipulators[i] - local feature=step.name - local value=features[feature] - if value then - local action=step.action - if trace then - report("applying feature manipulator %a for mode %a for font %a",feature,mode,properties.fullname) - end - if action then - action(tfmdata,feature,value) - end - end - end - end - end -end -function constructors.addcoreunicodes(unicodes) - if not unicodes then - unicodes={} - end - unicodes.space=0x0020 - unicodes.hyphen=0x002D - unicodes.zwj=0x200D - unicodes.zwnj=0x200C - return unicodes -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['luatex-font-enc']={ - version=1.001, - comment="companion to luatex-*.tex", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end -local fonts=fonts -fonts.encodings={} -fonts.encodings.agl={} -fonts.encodings.known={} -setmetatable(fonts.encodings.agl,{ __index=function(t,k) - if k=="unicodes" then - texio.write(" ") - local unicodes=dofile(resolvers.findfile("font-age.lua")) - fonts.encodings.agl={ unicodes=unicodes } - return unicodes - else - return nil - end -end }) - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-cid']={ - version=1.001, - comment="companion to font-otf.lua (cidmaps)", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local format,match,lower=string.format,string.match,string.lower -local tonumber=tonumber -local P,S,R,C,V,lpegmatch=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.V,lpeg.match -local fonts,logs,trackers=fonts,logs,trackers -local trace_loading=false trackers.register("otf.loading",function(v) trace_loading=v end) -local report_otf=logs.reporter("fonts","otf loading") -local cid={} -fonts.cid=cid -local cidmap={} -local cidmax=10 -local number=C(R("09","af","AF")^1) -local space=S(" \n\r\t") -local spaces=space^0 -local period=P(".") -local periods=period*period -local name=P("/")*C((1-space)^1) -local unicodes,names={},{} -local function do_one(a,b) - unicodes[tonumber(a)]=tonumber(b,16) -end -local function do_range(a,b,c) - c=tonumber(c,16) - for i=tonumber(a),tonumber(b) do - unicodes[i]=c - c=c+1 - end -end -local function do_name(a,b) - names[tonumber(a)]=b -end -local grammar=P { "start", - start=number*spaces*number*V("series"), - series=(spaces*(V("one")+V("range")+V("named")))^1, - one=(number*spaces*number)/do_one, - range=(number*periods*number*spaces*number)/do_range, - named=(number*spaces*name)/do_name -} -local function loadcidfile(filename) - local data=io.loaddata(filename) - if data then - unicodes,names={},{} - lpegmatch(grammar,data) - local supplement,registry,ordering=match(filename,"^(.-)%-(.-)%-()%.(.-)$") - return { - supplement=supplement, - registry=registry, - ordering=ordering, - filename=filename, - unicodes=unicodes, - names=names, - } - end -end -cid.loadfile=loadcidfile -local template="%s-%s-%s.cidmap" -local function locate(registry,ordering,supplement) - local filename=format(template,registry,ordering,supplement) - local hashname=lower(filename) - local found=cidmap[hashname] - if not found then - if trace_loading then - report_otf("checking cidmap, registry %a, ordering %a, supplement %a, filename %a",registry,ordering,supplement,filename) - end - local fullname=resolvers.findfile(filename,'cid') or "" - if fullname~="" then - found=loadcidfile(fullname) - if found then - if trace_loading then - report_otf("using cidmap file %a",filename) - end - cidmap[hashname]=found - found.usedname=file.basename(filename) - end - end - end - return found -end -function cid.getmap(specification) - if not specification then - report_otf("invalid cidinfo specification, table expected") - return - end - local registry=specification.registry - local ordering=specification.ordering - local supplement=specification.supplement - local filename=format(registry,ordering,supplement) - local lowername=lower(filename) - local found=cidmap[lowername] - if found then - return found - end - if ordering=="Identity" then - local found={ - supplement=supplement, - registry=registry, - ordering=ordering, - filename=filename, - unicodes={}, - names={}, - } - cidmap[lowername]=found - return found - end - if trace_loading then - report_otf("cidmap needed, registry %a, ordering %a, supplement %a",registry,ordering,supplement) - end - found=locate(registry,ordering,supplement) - if not found then - local supnum=tonumber(supplement) - local cidnum=nil - if supnum0 then - for s=supnum-1,0,-1 do - local c=locate(registry,ordering,s) - if c then - found,cidnum=c,s - break - end - end - end - registry=lower(registry) - ordering=lower(ordering) - if found and cidnum>0 then - for s=0,cidnum-1 do - local filename=format(template,registry,ordering,s) - if not cidmap[filename] then - cidmap[filename]=found - end - end - end - end - return found -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-map']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local tonumber,next,type=tonumber,next,type -local match,format,find,concat,gsub,lower=string.match,string.format,string.find,table.concat,string.gsub,string.lower -local P,R,S,C,Ct,Cc,lpegmatch=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cc,lpeg.match -local utfbyte=utf.byte -local floor=math.floor -local formatters=string.formatters -local trace_loading=false trackers.register("fonts.loading",function(v) trace_loading=v end) -local trace_mapping=false trackers.register("fonts.mapping",function(v) trace_unimapping=v end) -local report_fonts=logs.reporter("fonts","loading") -local fonts=fonts or {} -local mappings=fonts.mappings or {} -fonts.mappings=mappings -local function loadlumtable(filename) - local lumname=file.replacesuffix(file.basename(filename),"lum") - local lumfile=resolvers.findfile(lumname,"map") or "" - if lumfile~="" and lfs.isfile(lumfile) then - if trace_loading or trace_mapping then - report_fonts("loading map table %a",lumfile) - end - lumunic=dofile(lumfile) - return lumunic,lumfile - end -end -local hex=R("AF","09") -local hexfour=(hex*hex*hex*hex)/function(s) return tonumber(s,16) end -local hexsix=(hex*hex*hex*hex*hex*hex)/function(s) return tonumber(s,16) end -local dec=(R("09")^1)/tonumber -local period=P(".") -local unicode=P("uni")*(hexfour*(period+P(-1))*Cc(false)+Ct(hexfour^1)*Cc(true)) -local ucode=P("u")*(hexsix*(period+P(-1))*Cc(false)+Ct(hexsix^1)*Cc(true)) -local index=P("index")*dec*Cc(false) -local parser=unicode+ucode+index -local parsers={} -local function makenameparser(str) - if not str or str=="" then - return parser - else - local p=parsers[str] - if not p then - p=P(str)*period*dec*Cc(false) - parsers[str]=p - end - return p - end -end -local f_single=formatters["%04X"] -local f_double=formatters["%04X%04X"] -local function tounicode16(unicode,name) - if unicode<0x10000 then - return f_single(unicode) - elseif unicode<0x1FFFFFFFFF then - return f_double(floor(unicode/1024),unicode%1024+0xDC00) - else - report_fonts("can't convert %a in %a into tounicode",unicode,name) - end -end -local function tounicode16sequence(unicodes,name) - local t={} - for l=1,#unicodes do - local u=unicodes[l] - if u<0x10000 then - t[l]=f_single(u) - elseif unicode<0x1FFFFFFFFF then - t[l]=f_double(floor(u/1024),u%1024+0xDC00) - else - report_fonts ("can't convert %a in %a into tounicode",u,name) - return - end - end - return concat(t) -end -local function tounicode(unicode,name) - if type(unicode)=="table" then - local t={} - for l=1,#unicode do - local u=unicode[l] - if u<0x10000 then - t[l]=f_single(u) - elseif u<0x1FFFFFFFFF then - t[l]=f_double(floor(u/1024),u%1024+0xDC00) - else - report_fonts ("can't convert %a in %a into tounicode",u,name) - return - end - end - return concat(t) - else - if unicode<0x10000 then - return f_single(unicode) - elseif unicode<0x1FFFFFFFFF then - return f_double(floor(unicode/1024),unicode%1024+0xDC00) - else - report_fonts("can't convert %a in %a into tounicode",unicode,name) - end - end -end -local function fromunicode16(str) - if #str==4 then - return tonumber(str,16) - else - local l,r=match(str,"(....)(....)") - return (tonumber(l,16))*0x400+tonumber(r,16)-0xDC00 - end -end -mappings.loadlumtable=loadlumtable -mappings.makenameparser=makenameparser -mappings.tounicode=tounicode -mappings.tounicode16=tounicode16 -mappings.tounicode16sequence=tounicode16sequence -mappings.fromunicode16=fromunicode16 -local ligseparator=P("_") -local varseparator=P(".") -local namesplitter=Ct(C((1-ligseparator-varseparator)^1)*(ligseparator*C((1-ligseparator-varseparator)^1))^0) -local overloads={ - IJ={ name="I_J",unicode={ 0x49,0x4A },mess=0x0132 }, - ij={ name="i_j",unicode={ 0x69,0x6A },mess=0x0133 }, - ff={ name="f_f",unicode={ 0x66,0x66 },mess=0xFB00 }, - fi={ name="f_i",unicode={ 0x66,0x69 },mess=0xFB01 }, - fl={ name="f_l",unicode={ 0x66,0x6C },mess=0xFB02 }, - ffi={ name="f_f_i",unicode={ 0x66,0x66,0x69 },mess=0xFB03 }, - ffl={ name="f_f_l",unicode={ 0x66,0x66,0x6C },mess=0xFB04 }, - fj={ name="f_j",unicode={ 0x66,0x6A } }, - fk={ name="f_k",unicode={ 0x66,0x6B } }, -} -for k,v in next,overloads do - local name=v.name - local mess=v.mess - if name then - overloads[name]=v - end - if mess then - overloads[mess]=v - end -end -mappings.overloads=overloads -function mappings.addtounicode(data,filename) - local resources=data.resources - local properties=data.properties - local descriptions=data.descriptions - local unicodes=resources.unicodes - local lookuptypes=resources.lookuptypes - if not unicodes then - return - end - unicodes['space']=unicodes['space'] or 32 - unicodes['hyphen']=unicodes['hyphen'] or 45 - unicodes['zwj']=unicodes['zwj'] or 0x200D - unicodes['zwnj']=unicodes['zwnj'] or 0x200C - local private=fonts.constructors.privateoffset - local unicodevector=fonts.encodings.agl.unicodes - local missing={} - local lumunic,uparser,oparser - local cidinfo,cidnames,cidcodes,usedmap - cidinfo=properties.cidinfo - usedmap=cidinfo and fonts.cid.getmap(cidinfo) - if usedmap then - oparser=usedmap and makenameparser(cidinfo.ordering) - cidnames=usedmap.names - cidcodes=usedmap.unicodes - end - uparser=makenameparser() - local ns,nl=0,0 - for unic,glyph in next,descriptions do - local index=glyph.index - local name=glyph.name - local r=overloads[name] - if r then - glyph.unicode=r.unicode - elseif unic==-1 or unic>=private or (unic>=0xE000 and unic<=0xF8FF) or unic==0xFFFE or unic==0xFFFF then - local unicode=lumunic and lumunic[name] or unicodevector[name] - if unicode then - glyph.unicode=unicode - ns=ns+1 - end - if (not unicode) and usedmap then - local foundindex=lpegmatch(oparser,name) - if foundindex then - unicode=cidcodes[foundindex] - if unicode then - glyph.unicode=unicode - ns=ns+1 - else - local reference=cidnames[foundindex] - if reference then - local foundindex=lpegmatch(oparser,reference) - if foundindex then - unicode=cidcodes[foundindex] - if unicode then - glyph.unicode=unicode - ns=ns+1 - end - end - if not unicode or unicode=="" then - local foundcodes,multiple=lpegmatch(uparser,reference) - if foundcodes then - glyph.unicode=foundcodes - if multiple then - nl=nl+1 - unicode=true - else - ns=ns+1 - unicode=foundcodes - end - end - end - end - end - end - end - if not unicode or unicode=="" then - local split=lpegmatch(namesplitter,name) - local nsplit=split and #split or 0 - local t,n={},0 - unicode=true - for l=1,nsplit do - local base=split[l] - local u=unicodes[base] or unicodevector[base] - if not u then - break - elseif type(u)=="table" then - if u[1]>=private then - unicode=false - break - end - n=n+1 - t[n]=u[1] - else - if u>=private then - unicode=false - break - end - n=n+1 - t[n]=u - end - end - if n==0 then - elseif n==1 then - glyph.unicode=t[1] - else - glyph.unicode=t - end - nl=nl+1 - end - if not unicode or unicode=="" then - local foundcodes,multiple=lpegmatch(uparser,name) - if foundcodes then - glyph.unicode=foundcodes - if multiple then - nl=nl+1 - unicode=true - else - ns=ns+1 - unicode=foundcodes - end - end - end - local r=overloads[unicode] - if r then - unicode=r.unicode - glyph.unicode=unicode - end - if not unicode then - missing[name]=true - end - end - end - if next(missing) then - local guess={} - local function check(gname,code,unicode) - local description=descriptions[code] - local variant=description.name - if variant==gname then - return - end - local unic=unicodes[variant] - if unic==-1 or unic>=private or (unic>=0xE000 and unic<=0xF8FF) or unic==0xFFFE or unic==0xFFFF then - else - return - end - if descriptions[code].unicode then - return - end - local g=guess[variant] - if g then - g[gname]=unicode - else - guess[variant]={ [gname]=unicode } - end - end - for unicode,description in next,descriptions do - local slookups=description.slookups - if slookups then - local gname=description.name - for tag,data in next,slookups do - local lookuptype=lookuptypes[tag] - if lookuptype=="alternate" then - for i=1,#data do - check(gname,data[i],unicode) - end - elseif lookuptype=="substitution" then - check(gname,data,unicode) - end - end - end - local mlookups=description.mlookups - if mlookups then - local gname=description.name - for tag,list in next,mlookups do - local lookuptype=lookuptypes[tag] - if lookuptype=="alternate" then - for i=1,#list do - local data=list[i] - for i=1,#data do - check(gname,data[i],unicode) - end - end - elseif lookuptype=="substitution" then - for i=1,#list do - check(gname,list[i],unicode) - end - end - end - end - end - local done=true - while done do - done=false - for k,v in next,guess do - if type(v)~="number" then - for kk,vv in next,v do - if vv==-1 or vv>=private or (vv>=0xE000 and vv<=0xF8FF) or vv==0xFFFE or vv==0xFFFF then - local uu=guess[kk] - if type(uu)=="number" then - guess[k]=uu - done=true - end - else - guess[k]=vv - done=true - end - end - end - end - end - local orphans=0 - local guessed=0 - for k,v in next,guess do - if type(v)=="number" then - descriptions[unicodes[k]].unicode=descriptions[v].unicode or v - guessed=guessed+1 - else - local t=nil - local l=lower(k) - local u=unicodes[l] - if not u then - orphans=orphans+1 - elseif u==-1 or u>=private or (u>=0xE000 and u<=0xF8FF) or u==0xFFFE or u==0xFFFF then - local unicode=descriptions[u].unicode - if unicode then - descriptions[unicodes[k]].unicode=unicode - guessed=guessed+1 - else - orphans=orphans+1 - end - else - orphans=orphans+1 - end - end - end - if trace_loading and orphans>0 or guessed>0 then - report_fonts("%s glyphs with no related unicode, %s guessed, %s orphans",guessed+orphans,guessed,orphans) - end - end - if trace_mapping then - for unic,glyph in table.sortedhash(descriptions) do - local name=glyph.name - local index=glyph.index - local unicode=glyph.unicode - if unicode then - if type(unicode)=="table" then - local unicodes={} - for i=1,#unicode do - unicodes[i]=formatters("%U",unicode[i]) - end - report_fonts("internal slot %U, name %a, unicode %U, tounicode % t",index,name,unic,unicodes) - else - report_fonts("internal slot %U, name %a, unicode %U, tounicode %U",index,name,unic,unicode) - end - else - report_fonts("internal slot %U, name %a, unicode %U",index,name,unic) - end - end - end - if trace_loading and (ns>0 or nl>0) then - report_fonts("%s tounicode entries added, ligatures %s",nl+ns,ns) - end -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['luatex-fonts-syn']={ - version=1.001, - comment="companion to luatex-*.tex", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end -local fonts=fonts -fonts.names=fonts.names or {} -fonts.names.version=1.001 -fonts.names.basename="luatex-fonts-names" -fonts.names.new_to_old={} -fonts.names.old_to_new={} -fonts.names.cache=containers.define("fonts","data",fonts.names.version,true) -local data,loaded=nil,false -local fileformats={ "lua","tex","other text files" } -function fonts.names.reportmissingbase() - texio.write("") - fonts.names.reportmissingbase=nil -end -function fonts.names.reportmissingname() - texio.write("") - fonts.names.reportmissingname=nil -end -function fonts.names.resolve(name,sub) - if not loaded then - local basename=fonts.names.basename - if basename and basename~="" then - data=containers.read(fonts.names.cache,basename) - if not data then - basename=file.addsuffix(basename,"lua") - for i=1,#fileformats do - local format=fileformats[i] - local foundname=resolvers.findfile(basename,format) or "" - if foundname~="" then - data=dofile(foundname) - texio.write("") - break - end - end - end - end - loaded=true - end - if type(data)=="table" and data.version==fonts.names.version then - local condensed=string.gsub(string.lower(name),"[^%a%d]","") - local found=data.mappings and data.mappings[condensed] - if found then - local fontname,filename,subfont=found[1],found[2],found[3] - if subfont then - return filename,fontname - else - return filename,false - end - elseif fonts.names.reportmissingname then - fonts.names.reportmissingname() - return name,false - end - elseif fonts.names.reportmissingbase then - fonts.names.reportmissingbase() - end -end -fonts.names.resolvespec=fonts.names.resolve -function fonts.names.getfilename(askedname,suffix) - return "" -end -function fonts.names.ignoredfile(filename) - return false -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-tfm']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local next=next -local match=string.match -local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end) -local trace_features=false trackers.register("tfm.features",function(v) trace_features=v end) -local report_defining=logs.reporter("fonts","defining") -local report_tfm=logs.reporter("fonts","tfm loading") -local findbinfile=resolvers.findbinfile -local fonts=fonts -local handlers=fonts.handlers -local readers=fonts.readers -local constructors=fonts.constructors -local encodings=fonts.encodings -local tfm=constructors.newhandler("tfm") -local tfmfeatures=constructors.newfeatures("tfm") -local registertfmfeature=tfmfeatures.register -constructors.resolvevirtualtoo=false -fonts.formats.tfm="type1" -function tfm.setfeatures(tfmdata,features) - local okay=constructors.initializefeatures("tfm",tfmdata,features,trace_features,report_tfm) - if okay then - return constructors.collectprocessors("tfm",tfmdata,features,trace_features,report_tfm) - else - return {} - end -end -local function read_from_tfm(specification) - local filename=specification.filename - local size=specification.size - if trace_defining then - report_defining("loading tfm file %a at size %s",filename,size) - end - local tfmdata=font.read_tfm(filename,size) - if tfmdata then - local features=specification.features and specification.features.normal or {} - local resources=tfmdata.resources or {} - local properties=tfmdata.properties or {} - local parameters=tfmdata.parameters or {} - local shared=tfmdata.shared or {} - properties.name=tfmdata.name - properties.fontname=tfmdata.fontname - properties.psname=tfmdata.psname - properties.filename=specification.filename - properties.format=fonts.formats.tfm - parameters.size=size - shared.rawdata={} - shared.features=features - shared.processes=next(features) and tfm.setfeatures(tfmdata,features) or nil - tfmdata.properties=properties - tfmdata.resources=resources - tfmdata.parameters=parameters - tfmdata.shared=shared - parameters.slant=parameters.slant or parameters[1] or 0 - parameters.space=parameters.space or parameters[2] or 0 - parameters.space_stretch=parameters.space_stretch or parameters[3] or 0 - parameters.space_shrink=parameters.space_shrink or parameters[4] or 0 - parameters.x_height=parameters.x_height or parameters[5] or 0 - parameters.quad=parameters.quad or parameters[6] or 0 - parameters.extra_space=parameters.extra_space or parameters[7] or 0 - constructors.enhanceparameters(parameters) - if constructors.resolvevirtualtoo then - fonts.loggers.register(tfmdata,file.suffix(filename),specification) - local vfname=findbinfile(specification.name,'ovf') - if vfname and vfname~="" then - local vfdata=font.read_vf(vfname,size) - if vfdata then - local chars=tfmdata.characters - for k,v in next,vfdata.characters do - chars[k].commands=v.commands - end - properties.virtualized=true - tfmdata.fonts=vfdata.fonts - end - end - end - local allfeatures=tfmdata.shared.features or specification.features.normal - constructors.applymanipulators("tfm",tfmdata,allfeatures.normal,trace_features,report_tfm) - if not features.encoding then - local encoding,filename=match(properties.filename,"^(.-)%-(.*)$") - if filename and encoding and encodings.known and encodings.known[encoding] then - features.encoding=encoding - end - end - properties.haskerns=true - properties.haslogatures=true - resources.unicodes={} - resources.lookuptags={} - return tfmdata - end -end -local function check_tfm(specification,fullname) - local foundname=findbinfile(fullname,'tfm') or "" - if foundname=="" then - foundname=findbinfile(fullname,'ofm') or "" - end - if foundname=="" then - foundname=fonts.names.getfilename(fullname,"tfm") or "" - end - if foundname~="" then - specification.filename=foundname - specification.format="ofm" - return read_from_tfm(specification) - elseif trace_defining then - report_defining("loading tfm with name %a fails",specification.name) - end -end -readers.check_tfm=check_tfm -function readers.tfm(specification) - local fullname=specification.filename or "" - if fullname=="" then - local forced=specification.forced or "" - if forced~="" then - fullname=specification.name.."."..forced - else - fullname=specification.name - end - end - return check_tfm(specification,fullname) -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-afm']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local fonts,logs,trackers,containers,resolvers=fonts,logs,trackers,containers,resolvers -local next,type,tonumber=next,type,tonumber -local format,match,gmatch,lower,gsub,strip=string.format,string.match,string.gmatch,string.lower,string.gsub,string.strip -local abs=math.abs -local P,S,C,R,lpegmatch,patterns=lpeg.P,lpeg.S,lpeg.C,lpeg.R,lpeg.match,lpeg.patterns -local derivetable=table.derive -local trace_features=false trackers.register("afm.features",function(v) trace_features=v end) -local trace_indexing=false trackers.register("afm.indexing",function(v) trace_indexing=v end) -local trace_loading=false trackers.register("afm.loading",function(v) trace_loading=v end) -local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end) -local report_afm=logs.reporter("fonts","afm loading") -local setmetatableindex=table.setmetatableindex -local findbinfile=resolvers.findbinfile -local definers=fonts.definers -local readers=fonts.readers -local constructors=fonts.constructors -local afm=constructors.newhandler("afm") -local pfb=constructors.newhandler("pfb") -local afmfeatures=constructors.newfeatures("afm") -local registerafmfeature=afmfeatures.register -afm.version=1.500 -afm.cache=containers.define("fonts","afm",afm.version,true) -afm.autoprefixed=true -afm.helpdata={} -afm.syncspace=true -afm.addligatures=true -afm.addtexligatures=true -afm.addkerns=true -local overloads=fonts.mappings.overloads -local applyruntimefixes=fonts.treatments and fonts.treatments.applyfixes -local function setmode(tfmdata,value) - if value then - tfmdata.properties.mode=lower(value) - end -end -registerafmfeature { - name="mode", - description="mode", - initializers={ - base=setmode, - node=setmode, - } -} -local comment=P("Comment") -local spacing=patterns.spacer -local lineend=patterns.newline -local words=C((1-lineend)^1) -local number=C((R("09")+S("."))^1)/tonumber*spacing^0 -local data=lpeg.Carg(1) -local pattern=( - comment*spacing*( - data*( - ("CODINGSCHEME"*spacing*words )/function(fd,a) end+("DESIGNSIZE"*spacing*number*words )/function(fd,a) fd[ 1]=a end+("CHECKSUM"*spacing*number*words )/function(fd,a) fd[ 2]=a end+("SPACE"*spacing*number*"plus"*number*"minus"*number)/function(fd,a,b,c) fd[ 3],fd[ 4],fd[ 5]=a,b,c end+("QUAD"*spacing*number )/function(fd,a) fd[ 6]=a end+("EXTRASPACE"*spacing*number )/function(fd,a) fd[ 7]=a end+("NUM"*spacing*number*number*number )/function(fd,a,b,c) fd[ 8],fd[ 9],fd[10]=a,b,c end+("DENOM"*spacing*number*number )/function(fd,a,b ) fd[11],fd[12]=a,b end+("SUP"*spacing*number*number*number )/function(fd,a,b,c) fd[13],fd[14],fd[15]=a,b,c end+("SUB"*spacing*number*number )/function(fd,a,b) fd[16],fd[17]=a,b end+("SUPDROP"*spacing*number )/function(fd,a) fd[18]=a end+("SUBDROP"*spacing*number )/function(fd,a) fd[19]=a end+("DELIM"*spacing*number*number )/function(fd,a,b) fd[20],fd[21]=a,b end+("AXISHEIGHT"*spacing*number )/function(fd,a) fd[22]=a end - )+(1-lineend)^0 - )+(1-comment)^1 -)^0 -local function scan_comment(str) - local fd={} - lpegmatch(pattern,str,1,fd) - return fd -end -local keys={} -function keys.FontName (data,line) data.metadata.fontname=strip (line) - data.metadata.fullname=strip (line) end -function keys.ItalicAngle (data,line) data.metadata.italicangle=tonumber (line) end -function keys.IsFixedPitch(data,line) data.metadata.isfixedpitch=toboolean(line,true) end -function keys.CharWidth (data,line) data.metadata.charwidth=tonumber (line) end -function keys.XHeight (data,line) data.metadata.xheight=tonumber (line) end -function keys.Descender (data,line) data.metadata.descender=tonumber (line) end -function keys.Ascender (data,line) data.metadata.ascender=tonumber (line) end -function keys.Comment (data,line) - line=lower(line) - local designsize=match(line,"designsize[^%d]*(%d+)") - if designsize then data.metadata.designsize=tonumber(designsize) end -end -local function get_charmetrics(data,charmetrics,vector) - local characters=data.characters - local chr,ind={},0 - for k,v in gmatch(charmetrics,"([%a]+) +(.-) *;") do - if k=='C' then - v=tonumber(v) - if v<0 then - ind=ind+1 - else - ind=v - end - chr={ - index=ind - } - elseif k=='WX' then - chr.width=tonumber(v) - elseif k=='N' then - characters[v]=chr - elseif k=='B' then - local llx,lly,urx,ury=match(v,"^ *(.-) +(.-) +(.-) +(.-)$") - chr.boundingbox={ tonumber(llx),tonumber(lly),tonumber(urx),tonumber(ury) } - elseif k=='L' then - local plus,becomes=match(v,"^(.-) +(.-)$") - local ligatures=chr.ligatures - if ligatures then - ligatures[plus]=becomes - else - chr.ligatures={ [plus]=becomes } - end - end - end -end -local function get_kernpairs(data,kernpairs) - local characters=data.characters - for one,two,value in gmatch(kernpairs,"KPX +(.-) +(.-) +(.-)\n") do - local chr=characters[one] - if chr then - local kerns=chr.kerns - if kerns then - kerns[two]=tonumber(value) - else - chr.kerns={ [two]=tonumber(value) } - end - end - end -end -local function get_variables(data,fontmetrics) - for key,rest in gmatch(fontmetrics,"(%a+) *(.-)[\n\r]") do - local keyhandler=keys[key] - if keyhandler then - keyhandler(data,rest) - end - end -end -local function get_indexes(data,pfbname) - data.resources.filename=resolvers.unresolve(pfbname) - local pfbblob=fontloader.open(pfbname) - if pfbblob then - local characters=data.characters - local pfbdata=fontloader.to_table(pfbblob) - if pfbdata then - local glyphs=pfbdata.glyphs - if glyphs then - if trace_loading then - report_afm("getting index data from %a",pfbname) - end - for index,glyph in next,glyphs do - local name=glyph.name - if name then - local char=characters[name] - if char then - if trace_indexing then - report_afm("glyph %a has index %a",name,index) - end - char.index=index - end - end - end - elseif trace_loading then - report_afm("no glyph data in pfb file %a",pfbname) - end - elseif trace_loading then - report_afm("no data in pfb file %a",pfbname) - end - fontloader.close(pfbblob) - elseif trace_loading then - report_afm("invalid pfb file %a",pfbname) - end -end -local function readafm(filename) - local ok,afmblob,size=resolvers.loadbinfile(filename) - if ok and afmblob then - local data={ - resources={ - filename=resolvers.unresolve(filename), - version=afm.version, - creator="context mkiv", - }, - properties={ - hasitalics=false, - }, - goodies={}, - metadata={ - filename=file.removesuffix(file.basename(filename)) - }, - characters={ - }, - descriptions={ - }, - } - afmblob=gsub(afmblob,"StartCharMetrics(.-)EndCharMetrics",function(charmetrics) - if trace_loading then - report_afm("loading char metrics") - end - get_charmetrics(data,charmetrics,vector) - return "" - end) - afmblob=gsub(afmblob,"StartKernPairs(.-)EndKernPairs",function(kernpairs) - if trace_loading then - report_afm("loading kern pairs") - end - get_kernpairs(data,kernpairs) - return "" - end) - afmblob=gsub(afmblob,"StartFontMetrics%s+([%d%.]+)(.-)EndFontMetrics",function(version,fontmetrics) - if trace_loading then - report_afm("loading variables") - end - data.afmversion=version - get_variables(data,fontmetrics) - data.fontdimens=scan_comment(fontmetrics) - return "" - end) - return data - else - if trace_loading then - report_afm("no valid afm file %a",filename) - end - return nil - end -end -local addkerns,addligatures,addtexligatures,unify,normalize,fixnames -function afm.load(filename) - filename=resolvers.findfile(filename,'afm') or "" - if filename~="" and not fonts.names.ignoredfile(filename) then - local name=file.removesuffix(file.basename(filename)) - local data=containers.read(afm.cache,name) - local attr=lfs.attributes(filename) - local size,time=attr.size or 0,attr.modification or 0 - local pfbfile=file.replacesuffix(name,"pfb") - local pfbname=resolvers.findfile(pfbfile,"pfb") or "" - if pfbname=="" then - pfbname=resolvers.findfile(file.basename(pfbfile),"pfb") or "" - end - local pfbsize,pfbtime=0,0 - if pfbname~="" then - local attr=lfs.attributes(pfbname) - pfbsize=attr.size or 0 - pfbtime=attr.modification or 0 - end - if not data or data.size~=size or data.time~=time or data.pfbsize~=pfbsize or data.pfbtime~=pfbtime then - report_afm("reading %a",filename) - data=readafm(filename) - if data then - if pfbname~="" then - get_indexes(data,pfbname) - elseif trace_loading then - report_afm("no pfb file for %a",filename) - end - report_afm("unifying %a",filename) - unify(data,filename) - if afm.addligatures then - report_afm("add ligatures") - addligatures(data) - end - if afm.addtexligatures then - report_afm("add tex ligatures") - addtexligatures(data) - end - if afm.addkerns then - report_afm("add extra kerns") - addkerns(data) - end - normalize(data) - fixnames(data) - report_afm("add tounicode data") - fonts.mappings.addtounicode(data,filename) - data.size=size - data.time=time - data.pfbsize=pfbsize - data.pfbtime=pfbtime - report_afm("saving %a in cache",name) - data.resources.unicodes=nil - data=containers.write(afm.cache,name,data) - data=containers.read(afm.cache,name) - end - if applyruntimefixes and data then - applyruntimefixes(filename,data) - end - end - return data - else - return nil - end -end -local uparser=fonts.mappings.makenameparser() -unify=function(data,filename) - local unicodevector=fonts.encodings.agl.unicodes - local unicodes,names={},{} - local private=constructors.privateoffset - local descriptions=data.descriptions - for name,blob in next,data.characters do - local code=unicodevector[name] - if not code then - code=lpegmatch(uparser,name) - if not code then - code=private - private=private+1 - report_afm("assigning private slot %U for unknown glyph name %a",code,name) - end - end - local index=blob.index - unicodes[name]=code - names[name]=index - blob.name=name - descriptions[code]={ - boundingbox=blob.boundingbox, - width=blob.width, - kerns=blob.kerns, - index=index, - name=name, - } - end - for unicode,description in next,descriptions do - local kerns=description.kerns - if kerns then - local krn={} - for name,kern in next,kerns do - local unicode=unicodes[name] - if unicode then - krn[unicode]=kern - else - end - end - description.kerns=krn - end - end - data.characters=nil - local resources=data.resources - local filename=resources.filename or file.removesuffix(file.basename(filename)) - resources.filename=resolvers.unresolve(filename) - resources.unicodes=unicodes - resources.marks={} - resources.private=private -end -normalize=function(data) -end -fixnames=function(data) - for k,v in next,data.descriptions do - local n=v.name - local r=overloads[n] - if r then - local name=r.name - if trace_indexing then - report_afm("renaming characters %a to %a",n,name) - end - v.name=name - v.unicode=r.unicode - end - end -end -local addthem=function(rawdata,ligatures) - if ligatures then - local descriptions=rawdata.descriptions - local resources=rawdata.resources - local unicodes=resources.unicodes - for ligname,ligdata in next,ligatures do - local one=descriptions[unicodes[ligname]] - if one then - for _,pair in next,ligdata do - local two,three=unicodes[pair[1]],unicodes[pair[2]] - if two and three then - local ol=one.ligatures - if ol then - if not ol[two] then - ol[two]=three - end - else - one.ligatures={ [two]=three } - end - end - end - end - end - end -end -addligatures=function(rawdata) addthem(rawdata,afm.helpdata.ligatures ) end -addtexligatures=function(rawdata) addthem(rawdata,afm.helpdata.texligatures) end -addkerns=function(rawdata) - local descriptions=rawdata.descriptions - local resources=rawdata.resources - local unicodes=resources.unicodes - local function do_it_left(what) - if what then - for unicode,description in next,descriptions do - local kerns=description.kerns - if kerns then - local extrakerns - for complex,simple in next,what do - complex=unicodes[complex] - simple=unicodes[simple] - if complex and simple then - local ks=kerns[simple] - if ks and not kerns[complex] then - if extrakerns then - extrakerns[complex]=ks - else - extrakerns={ [complex]=ks } - end - end - end - end - if extrakerns then - description.extrakerns=extrakerns - end - end - end - end - end - local function do_it_copy(what) - if what then - for complex,simple in next,what do - complex=unicodes[complex] - simple=unicodes[simple] - if complex and simple then - local complexdescription=descriptions[complex] - if complexdescription then - local simpledescription=descriptions[complex] - if simpledescription then - local extrakerns - local kerns=simpledescription.kerns - if kerns then - for unicode,kern in next,kerns do - if extrakerns then - extrakerns[unicode]=kern - else - extrakerns={ [unicode]=kern } - end - end - end - local extrakerns=simpledescription.extrakerns - if extrakerns then - for unicode,kern in next,extrakerns do - if extrakerns then - extrakerns[unicode]=kern - else - extrakerns={ [unicode]=kern } - end - end - end - if extrakerns then - complexdescription.extrakerns=extrakerns - end - end - end - end - end - end - end - do_it_left(afm.helpdata.leftkerned) - do_it_left(afm.helpdata.bothkerned) - do_it_copy(afm.helpdata.bothkerned) - do_it_copy(afm.helpdata.rightkerned) -end -local function adddimensions(data) - if data then - for unicode,description in next,data.descriptions do - local bb=description.boundingbox - if bb then - local ht,dp=bb[4],-bb[2] - if ht==0 or ht<0 then - else - description.height=ht - end - if dp==0 or dp<0 then - else - description.depth=dp - end - end - end - end -end -local function copytotfm(data) - if data and data.descriptions then - local metadata=data.metadata - local resources=data.resources - local properties=derivetable(data.properties) - local descriptions=derivetable(data.descriptions) - local goodies=derivetable(data.goodies) - local characters={} - local parameters={} - local unicodes=resources.unicodes - for unicode,description in next,data.descriptions do - characters[unicode]={} - end - local filename=constructors.checkedfilename(resources) - local fontname=metadata.fontname or metadata.fullname - local fullname=metadata.fullname or metadata.fontname - local endash=0x0020 - local emdash=0x2014 - local spacer="space" - local spaceunits=500 - local monospaced=metadata.isfixedpitch - local charwidth=metadata.charwidth - local italicangle=metadata.italicangle - local charxheight=metadata.xheight and metadata.xheight>0 and metadata.xheight - properties.monospaced=monospaced - parameters.italicangle=italicangle - parameters.charwidth=charwidth - parameters.charxheight=charxheight - if properties.monospaced then - if descriptions[endash] then - spaceunits,spacer=descriptions[endash].width,"space" - end - if not spaceunits and descriptions[emdash] then - spaceunits,spacer=descriptions[emdash].width,"emdash" - end - if not spaceunits and charwidth then - spaceunits,spacer=charwidth,"charwidth" - end - else - if descriptions[endash] then - spaceunits,spacer=descriptions[endash].width,"space" - end - if not spaceunits and charwidth then - spaceunits,spacer=charwidth,"charwidth" - end - end - spaceunits=tonumber(spaceunits) - if spaceunits<200 then - end - parameters.slant=0 - parameters.space=spaceunits - parameters.space_stretch=500 - parameters.space_shrink=333 - parameters.x_height=400 - parameters.quad=1000 - if italicangle and italicangle~=0 then - parameters.italicangle=italicangle - parameters.italicfactor=math.cos(math.rad(90+italicangle)) - parameters.slant=- math.tan(italicangle*math.pi/180) - end - if monospaced then - parameters.space_stretch=0 - parameters.space_shrink=0 - elseif afm.syncspace then - parameters.space_stretch=spaceunits/2 - parameters.space_shrink=spaceunits/3 - end - parameters.extra_space=parameters.space_shrink - if charxheight then - parameters.x_height=charxheight - else - local x=0x0078 - if x then - local x=descriptions[x] - if x then - parameters.x_height=x.height - end - end - end - local fd=data.fontdimens - if fd and fd[8] and fd[9] and fd[10] then - for k,v in next,fd do - parameters[k]=v - end - end - parameters.designsize=(metadata.designsize or 10)*65536 - parameters.ascender=abs(metadata.ascender or 0) - parameters.descender=abs(metadata.descender or 0) - parameters.units=1000 - properties.spacer=spacer - properties.encodingbytes=2 - properties.format=fonts.formats[filename] or "type1" - properties.filename=filename - properties.fontname=fontname - properties.fullname=fullname - properties.psname=fullname - properties.name=filename or fullname or fontname - if next(characters) then - return { - characters=characters, - descriptions=descriptions, - parameters=parameters, - resources=resources, - properties=properties, - goodies=goodies, - } - end - end - return nil -end -function afm.setfeatures(tfmdata,features) - local okay=constructors.initializefeatures("afm",tfmdata,features,trace_features,report_afm) - if okay then - return constructors.collectprocessors("afm",tfmdata,features,trace_features,report_afm) - else - return {} - end -end -local function addtables(data) - local resources=data.resources - local lookuptags=resources.lookuptags - local unicodes=resources.unicodes - if not lookuptags then - lookuptags={} - resources.lookuptags=lookuptags - end - setmetatableindex(lookuptags,function(t,k) - local v=type(k)=="number" and ("lookup "..k) or k - t[k]=v - return v - end) - if not unicodes then - unicodes={} - resources.unicodes=unicodes - setmetatableindex(unicodes,function(t,k) - setmetatableindex(unicodes,nil) - for u,d in next,data.descriptions do - local n=d.name - if n then - t[n]=u - end - end - return rawget(t,k) - end) - end - constructors.addcoreunicodes(unicodes) -end -local function afmtotfm(specification) - local afmname=specification.filename or specification.name - if specification.forced=="afm" or specification.format=="afm" then - if trace_loading then - report_afm("forcing afm format for %a",afmname) - end - else - local tfmname=findbinfile(afmname,"ofm") or "" - if tfmname~="" then - if trace_loading then - report_afm("fallback from afm to tfm for %a",afmname) - end - return - end - end - if afmname~="" then - local features=constructors.checkedfeatures("afm",specification.features.normal) - specification.features.normal=features - constructors.hashinstance(specification,true) - specification=definers.resolve(specification) - local cache_id=specification.hash - local tfmdata=containers.read(constructors.cache,cache_id) - if not tfmdata then - local rawdata=afm.load(afmname) - if rawdata and next(rawdata) then - addtables(rawdata) - adddimensions(rawdata) - tfmdata=copytotfm(rawdata) - if tfmdata and next(tfmdata) then - local shared=tfmdata.shared - if not shared then - shared={} - tfmdata.shared=shared - end - shared.rawdata=rawdata - shared.features=features - shared.processes=afm.setfeatures(tfmdata,features) - end - elseif trace_loading then - report_afm("no (valid) afm file found with name %a",afmname) - end - tfmdata=containers.write(constructors.cache,cache_id,tfmdata) - end - return tfmdata - end -end -local function read_from_afm(specification) - local tfmdata=afmtotfm(specification) - if tfmdata then - tfmdata.properties.name=specification.name - tfmdata=constructors.scale(tfmdata,specification) - local allfeatures=tfmdata.shared.features or specification.features.normal - constructors.applymanipulators("afm",tfmdata,allfeatures,trace_features,report_afm) - fonts.loggers.register(tfmdata,'afm',specification) - end - return tfmdata -end -local function prepareligatures(tfmdata,ligatures,value) - if value then - local descriptions=tfmdata.descriptions - local hasligatures=false - for unicode,character in next,tfmdata.characters do - local description=descriptions[unicode] - local dligatures=description.ligatures - if dligatures then - local cligatures=character.ligatures - if not cligatures then - cligatures={} - character.ligatures=cligatures - end - for unicode,ligature in next,dligatures do - cligatures[unicode]={ - char=ligature, - type=0 - } - end - hasligatures=true - end - end - tfmdata.properties.hasligatures=hasligatures - end -end -local function preparekerns(tfmdata,kerns,value) - if value then - local rawdata=tfmdata.shared.rawdata - local resources=rawdata.resources - local unicodes=resources.unicodes - local descriptions=tfmdata.descriptions - local haskerns=false - for u,chr in next,tfmdata.characters do - local d=descriptions[u] - local newkerns=d[kerns] - if newkerns then - local kerns=chr.kerns - if not kerns then - kerns={} - chr.kerns=kerns - end - for k,v in next,newkerns do - local uk=unicodes[k] - if uk then - kerns[uk]=v - end - end - haskerns=true - end - end - tfmdata.properties.haskerns=haskerns - end -end -local list={ - [0x0027]=0x2019, -} -local function texreplacements(tfmdata,value) - local descriptions=tfmdata.descriptions - local characters=tfmdata.characters - for k,v in next,list do - characters [k]=characters [v] - descriptions[k]=descriptions[v] - end -end -local function ligatures (tfmdata,value) prepareligatures(tfmdata,'ligatures',value) end -local function texligatures(tfmdata,value) prepareligatures(tfmdata,'texligatures',value) end -local function kerns (tfmdata,value) preparekerns (tfmdata,'kerns',value) end -local function extrakerns (tfmdata,value) preparekerns (tfmdata,'extrakerns',value) end -registerafmfeature { - name="liga", - description="traditional ligatures", - initializers={ - base=ligatures, - node=ligatures, - } -} -registerafmfeature { - name="kern", - description="intercharacter kerning", - initializers={ - base=kerns, - node=kerns, - } -} -registerafmfeature { - name="extrakerns", - description="additional intercharacter kerning", - initializers={ - base=extrakerns, - node=extrakerns, - } -} -registerafmfeature { - name='tlig', - description='tex ligatures', - initializers={ - base=texligatures, - node=texligatures, - } -} -registerafmfeature { - name='trep', - description='tex replacements', - initializers={ - base=texreplacements, - node=texreplacements, - } -} -local check_tfm=readers.check_tfm -fonts.formats.afm="type1" -fonts.formats.pfb="type1" -local function check_afm(specification,fullname) - local foundname=findbinfile(fullname,'afm') or "" - if foundname=="" then - foundname=fonts.names.getfilename(fullname,"afm") or "" - end - if foundname=="" and afm.autoprefixed then - local encoding,shortname=match(fullname,"^(.-)%-(.*)$") - if encoding and shortname and fonts.encodings.known[encoding] then - shortname=findbinfile(shortname,'afm') or "" - if shortname~="" then - foundname=shortname - if trace_defining then - report_afm("stripping encoding prefix from filename %a",afmname) - end - end - end - end - if foundname~="" then - specification.filename=foundname - specification.format="afm" - return read_from_afm(specification) - end -end -function readers.afm(specification,method) - local fullname,tfmdata=specification.filename or "",nil - if fullname=="" then - local forced=specification.forced or "" - if forced~="" then - tfmdata=check_afm(specification,specification.name.."."..forced) - end - if not tfmdata then - method=method or definers.method or "afm or tfm" - if method=="tfm" then - tfmdata=check_tfm(specification,specification.name) - elseif method=="afm" then - tfmdata=check_afm(specification,specification.name) - elseif method=="tfm or afm" then - tfmdata=check_tfm(specification,specification.name) or check_afm(specification,specification.name) - else - tfmdata=check_afm(specification,specification.name) or check_tfm(specification,specification.name) - end - end - else - tfmdata=check_afm(specification,fullname) - end - return tfmdata -end -function readers.pfb(specification,method) - local original=specification.specification - if trace_defining then - report_afm("using afm reader for %a",original) - end - specification.specification=gsub(original,"%.pfb",".afm") - specification.forced="afm" - return readers.afm(specification,method) -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-afk']={ - version=1.001, - comment="companion to font-afm.lua", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files", - dataonly=true, -} -local allocate=utilities.storage.allocate -fonts.handlers.afm.helpdata={ - ligatures=allocate { - ['f']={ - { 'f','ff' }, - { 'i','fi' }, - { 'l','fl' }, - }, - ['ff']={ - { 'i','ffi' } - }, - ['fi']={ - { 'i','fii' } - }, - ['fl']={ - { 'i','fli' } - }, - ['s']={ - { 't','st' } - }, - ['i']={ - { 'j','ij' } - }, - }, - texligatures=allocate { - ['quoteleft']={ - { 'quoteleft','quotedblleft' } - }, - ['quoteright']={ - { 'quoteright','quotedblright' } - }, - ['hyphen']={ - { 'hyphen','endash' } - }, - ['endash']={ - { 'hyphen','emdash' } - } - }, - leftkerned=allocate { - AEligature="A",aeligature="a", - OEligature="O",oeligature="o", - IJligature="I",ijligature="i", - AE="A",ae="a", - OE="O",oe="o", - IJ="I",ij="i", - Ssharp="S",ssharp="s", - }, - rightkerned=allocate { - AEligature="E",aeligature="e", - OEligature="E",oeligature="e", - IJligature="J",ijligature="j", - AE="E",ae="e", - OE="E",oe="e", - IJ="J",ij="j", - Ssharp="S",ssharp="s", - }, - bothkerned=allocate { - Acircumflex="A",acircumflex="a", - Ccircumflex="C",ccircumflex="c", - Ecircumflex="E",ecircumflex="e", - Gcircumflex="G",gcircumflex="g", - Hcircumflex="H",hcircumflex="h", - Icircumflex="I",icircumflex="i", - Jcircumflex="J",jcircumflex="j", - Ocircumflex="O",ocircumflex="o", - Scircumflex="S",scircumflex="s", - Ucircumflex="U",ucircumflex="u", - Wcircumflex="W",wcircumflex="w", - Ycircumflex="Y",ycircumflex="y", - Agrave="A",agrave="a", - Egrave="E",egrave="e", - Igrave="I",igrave="i", - Ograve="O",ograve="o", - Ugrave="U",ugrave="u", - Ygrave="Y",ygrave="y", - Atilde="A",atilde="a", - Itilde="I",itilde="i", - Otilde="O",otilde="o", - Utilde="U",utilde="u", - Ntilde="N",ntilde="n", - Adiaeresis="A",adiaeresis="a",Adieresis="A",adieresis="a", - Ediaeresis="E",ediaeresis="e",Edieresis="E",edieresis="e", - Idiaeresis="I",idiaeresis="i",Idieresis="I",idieresis="i", - Odiaeresis="O",odiaeresis="o",Odieresis="O",odieresis="o", - Udiaeresis="U",udiaeresis="u",Udieresis="U",udieresis="u", - Ydiaeresis="Y",ydiaeresis="y",Ydieresis="Y",ydieresis="y", - Aacute="A",aacute="a", - Cacute="C",cacute="c", - Eacute="E",eacute="e", - Iacute="I",iacute="i", - Lacute="L",lacute="l", - Nacute="N",nacute="n", - Oacute="O",oacute="o", - Racute="R",racute="r", - Sacute="S",sacute="s", - Uacute="U",uacute="u", - Yacute="Y",yacute="y", - Zacute="Z",zacute="z", - Dstroke="D",dstroke="d", - Hstroke="H",hstroke="h", - Tstroke="T",tstroke="t", - Cdotaccent="C",cdotaccent="c", - Edotaccent="E",edotaccent="e", - Gdotaccent="G",gdotaccent="g", - Idotaccent="I",idotaccent="i", - Zdotaccent="Z",zdotaccent="z", - Amacron="A",amacron="a", - Emacron="E",emacron="e", - Imacron="I",imacron="i", - Omacron="O",omacron="o", - Umacron="U",umacron="u", - Ccedilla="C",ccedilla="c", - Kcedilla="K",kcedilla="k", - Lcedilla="L",lcedilla="l", - Ncedilla="N",ncedilla="n", - Rcedilla="R",rcedilla="r", - Scedilla="S",scedilla="s", - Tcedilla="T",tcedilla="t", - Ohungarumlaut="O",ohungarumlaut="o", - Uhungarumlaut="U",uhungarumlaut="u", - Aogonek="A",aogonek="a", - Eogonek="E",eogonek="e", - Iogonek="I",iogonek="i", - Uogonek="U",uogonek="u", - Aring="A",aring="a", - Uring="U",uring="u", - Abreve="A",abreve="a", - Ebreve="E",ebreve="e", - Gbreve="G",gbreve="g", - Ibreve="I",ibreve="i", - Obreve="O",obreve="o", - Ubreve="U",ubreve="u", - Ccaron="C",ccaron="c", - Dcaron="D",dcaron="d", - Ecaron="E",ecaron="e", - Lcaron="L",lcaron="l", - Ncaron="N",ncaron="n", - Rcaron="R",rcaron="r", - Scaron="S",scaron="s", - Tcaron="T",tcaron="t", - Zcaron="Z",zcaron="z", - dotlessI="I",dotlessi="i", - dotlessJ="J",dotlessj="j", - AEligature="AE",aeligature="ae",AE="AE",ae="ae", - OEligature="OE",oeligature="oe",OE="OE",oe="oe", - IJligature="IJ",ijligature="ij",IJ="IJ",ij="ij", - Lstroke="L",lstroke="l",Lslash="L",lslash="l", - Ostroke="O",ostroke="o",Oslash="O",oslash="o", - Ssharp="SS",ssharp="ss", - Aumlaut="A",aumlaut="a", - Eumlaut="E",eumlaut="e", - Iumlaut="I",iumlaut="i", - Oumlaut="O",oumlaut="o", - Uumlaut="U",uumlaut="u", - } -} - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['luatex-fonts-tfm']={ - version=1.001, - comment="companion to luatex-*.tex", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end -local fonts=fonts -local tfm={} -fonts.handlers.tfm=tfm -fonts.formats.tfm="type1" -function fonts.readers.tfm(specification) - local fullname=specification.filename or "" - if fullname=="" then - local forced=specification.forced or "" - if forced~="" then - fullname=specification.name.."."..forced - else - fullname=specification.name - end - end - local foundname=resolvers.findbinfile(fullname,'tfm') or "" - if foundname=="" then - foundname=resolvers.findbinfile(fullname,'ofm') or "" - end - if foundname~="" then - specification.filename=foundname - specification.format="ofm" - return font.read_tfm(specification.filename,specification.size) - end -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-oti']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local lower=string.lower -local fonts=fonts -local constructors=fonts.constructors -local otf=constructors.newhandler("otf") -local otffeatures=constructors.newfeatures("otf") -local otftables=otf.tables -local registerotffeature=otffeatures.register -local allocate=utilities.storage.allocate -registerotffeature { - name="features", - description="initialization of feature handler", - default=true, -} -local function setmode(tfmdata,value) - if value then - tfmdata.properties.mode=lower(value) - end -end -local function setlanguage(tfmdata,value) - if value then - local cleanvalue=lower(value) - local languages=otftables and otftables.languages - local properties=tfmdata.properties - if not languages then - properties.language=cleanvalue - elseif languages[value] then - properties.language=cleanvalue - else - properties.language="dflt" - end - end -end -local function setscript(tfmdata,value) - if value then - local cleanvalue=lower(value) - local scripts=otftables and otftables.scripts - local properties=tfmdata.properties - if not scripts then - properties.script=cleanvalue - elseif scripts[value] then - properties.script=cleanvalue - else - properties.script="dflt" - end - end -end -registerotffeature { - name="mode", - description="mode", - initializers={ - base=setmode, - node=setmode, - } -} -registerotffeature { - name="language", - description="language", - initializers={ - base=setlanguage, - node=setlanguage, - } -} -registerotffeature { - name="script", - description="script", - initializers={ - base=setscript, - node=setscript, - } -} - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-otf']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local utfbyte=utf.byte -local format,gmatch,gsub,find,match,lower,strip=string.format,string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip -local type,next,tonumber,tostring=type,next,tonumber,tostring -local abs=math.abs -local insert=table.insert -local lpegmatch=lpeg.match -local reversed,concat,remove,sortedkeys=table.reversed,table.concat,table.remove,table.sortedkeys -local ioflush=io.flush -local fastcopy,tohash,derivetable=table.fastcopy,table.tohash,table.derive -local formatters=string.formatters -local P,R,S,C,Ct,lpegmatch=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Ct,lpeg.match -local setmetatableindex=table.setmetatableindex -local allocate=utilities.storage.allocate -local registertracker=trackers.register -local registerdirective=directives.register -local starttiming=statistics.starttiming -local stoptiming=statistics.stoptiming -local elapsedtime=statistics.elapsedtime -local findbinfile=resolvers.findbinfile -local trace_private=false registertracker("otf.private",function(v) trace_private=v end) -local trace_loading=false registertracker("otf.loading",function(v) trace_loading=v end) -local trace_features=false registertracker("otf.features",function(v) trace_features=v end) -local trace_dynamics=false registertracker("otf.dynamics",function(v) trace_dynamics=v end) -local trace_sequences=false registertracker("otf.sequences",function(v) trace_sequences=v end) -local trace_markwidth=false registertracker("otf.markwidth",function(v) trace_markwidth=v end) -local trace_defining=false registertracker("fonts.defining",function(v) trace_defining=v end) -local compact_lookups=true registertracker("otf.compactlookups",function(v) compact_lookups=v end) -local purge_names=true registertracker("otf.purgenames",function(v) purge_names=v end) -local report_otf=logs.reporter("fonts","otf loading") -local fonts=fonts -local otf=fonts.handlers.otf -otf.glists={ "gsub","gpos" } -otf.version=2.802 -otf.cache=containers.define("fonts","otf",otf.version,true) -local fontdata=fonts.hashes.identifiers -local chardata=characters and characters.data -local definers=fonts.definers -local readers=fonts.readers -local constructors=fonts.constructors -local otffeatures=constructors.newfeatures("otf") -local registerotffeature=otffeatures.register -local enhancers=allocate() -otf.enhancers=enhancers -local patches={} -enhancers.patches=patches -local forceload=false -local cleanup=0 -local packdata=true -local syncspace=true -local forcenotdef=false -local includesubfonts=false -local overloadkerns=false -local applyruntimefixes=fonts.treatments and fonts.treatments.applyfixes -local wildcard="*" -local default="dflt" -local fontloaderfields=fontloader.fields -local mainfields=nil -local glyphfields=nil -local formats=fonts.formats -formats.otf="opentype" -formats.ttf="truetype" -formats.ttc="truetype" -formats.dfont="truetype" -registerdirective("fonts.otf.loader.cleanup",function(v) cleanup=tonumber(v) or (v and 1) or 0 end) -registerdirective("fonts.otf.loader.force",function(v) forceload=v end) -registerdirective("fonts.otf.loader.pack",function(v) packdata=v end) -registerdirective("fonts.otf.loader.syncspace",function(v) syncspace=v end) -registerdirective("fonts.otf.loader.forcenotdef",function(v) forcenotdef=v end) -registerdirective("fonts.otf.loader.overloadkerns",function(v) overloadkerns=v end) -function otf.fileformat(filename) - local leader=lower(io.loadchunk(filename,4)) - local suffix=lower(file.suffix(filename)) - if leader=="otto" then - return formats.otf,suffix=="otf" - elseif leader=="ttcf" then - return formats.ttc,suffix=="ttc" - elseif suffix=="ttc" then - return formats.ttc,true - elseif suffix=="dfont" then - return formats.dfont,true - else - return formats.ttf,suffix=="ttf" - end -end -local function otf_format(filename) - local format,okay=otf.fileformat(filename) - if not okay then - report_otf("font %a is actually an %a file",filename,format) - end - return format -end -local function load_featurefile(raw,featurefile) - if featurefile and featurefile~="" then - if trace_loading then - report_otf("using featurefile %a",featurefile) - end - fontloader.apply_featurefile(raw,featurefile) - end -end -local function showfeatureorder(rawdata,filename) - local sequences=rawdata.resources.sequences - if sequences and #sequences>0 then - if trace_loading then - report_otf("font %a has %s sequences",filename,#sequences) - report_otf(" ") - end - for nos=1,#sequences do - local sequence=sequences[nos] - local typ=sequence.type or "no-type" - local name=sequence.name or "no-name" - local subtables=sequence.subtables or { "no-subtables" } - local features=sequence.features - if trace_loading then - report_otf("%3i %-15s %-20s [% t]",nos,name,typ,subtables) - end - if features then - for feature,scripts in next,features do - local tt={} - if type(scripts)=="table" then - for script,languages in next,scripts do - local ttt={} - for language,_ in next,languages do - ttt[#ttt+1]=language - end - tt[#tt+1]=formatters["[%s: % t]"](script,ttt) - end - if trace_loading then - report_otf(" %s: % t",feature,tt) - end - else - if trace_loading then - report_otf(" %s: %S",feature,scripts) - end - end - end - end - end - if trace_loading then - report_otf("\n") - end - elseif trace_loading then - report_otf("font %a has no sequences",filename) - end -end -local valid_fields=table.tohash { - "ascent", - "cidinfo", - "copyright", - "descent", - "design_range_bottom", - "design_range_top", - "design_size", - "encodingchanged", - "extrema_bound", - "familyname", - "fontname", - "fontstyle_id", - "fontstyle_name", - "fullname", - "hasvmetrics", - "horiz_base", - "issans", - "isserif", - "italicangle", - "macstyle", - "onlybitmaps", - "origname", - "os2_version", - "pfminfo", - "serifcheck", - "sfd_version", - "strokedfont", - "strokewidth", - "table_version", - "ttf_tables", - "uni_interp", - "uniqueid", - "units_per_em", - "upos", - "use_typo_metrics", - "uwidth", - "validation_state", - "version", - "vert_base", - "weight", - "weight_width_slope_only", -} -local ordered_enhancers={ - "prepare tables", - "prepare glyphs", - "prepare lookups", - "analyze glyphs", - "analyze math", - "reorganize lookups", - "reorganize mark classes", - "reorganize anchor classes", - "reorganize glyph kerns", - "reorganize glyph lookups", - "reorganize glyph anchors", - "merge kern classes", - "reorganize features", - "reorganize subtables", - "check glyphs", - "check metadata", - "check extra features", - "prepare tounicode", - "check encoding", - "add duplicates", - "cleanup tables", - "compact lookups", - "purge names", -} -local actions=allocate() -local before=allocate() -local after=allocate() -patches.before=before -patches.after=after -local function enhance(name,data,filename,raw) - local enhancer=actions[name] - if enhancer then - if trace_loading then - report_otf("apply enhancement %a to file %a",name,filename) - ioflush() - end - enhancer(data,filename,raw) - else - end -end -function enhancers.apply(data,filename,raw) - local basename=file.basename(lower(filename)) - if trace_loading then - report_otf("%s enhancing file %a","start",filename) - end - ioflush() - for e=1,#ordered_enhancers do - local enhancer=ordered_enhancers[e] - local b=before[enhancer] - if b then - for pattern,action in next,b do - if find(basename,pattern) then - action(data,filename,raw) - end - end - end - enhance(enhancer,data,filename,raw) - local a=after[enhancer] - if a then - for pattern,action in next,a do - if find(basename,pattern) then - action(data,filename,raw) - end - end - end - ioflush() - end - if trace_loading then - report_otf("%s enhancing file %a","stop",filename) - end - ioflush() -end -function patches.register(what,where,pattern,action) - local pw=patches[what] - if pw then - local ww=pw[where] - if ww then - ww[pattern]=action - else - pw[where]={ [pattern]=action} - end - end -end -function patches.report(fmt,...) - if trace_loading then - report_otf("patching: %s",formatters[fmt](...)) - end -end -function enhancers.register(what,action) - actions[what]=action -end -function otf.load(filename,sub,featurefile) - local base=file.basename(file.removesuffix(filename)) - local name=file.removesuffix(base) - local attr=lfs.attributes(filename) - local size=attr and attr.size or 0 - local time=attr and attr.modification or 0 - if featurefile then - name=name.."@"..file.removesuffix(file.basename(featurefile)) - end - if sub=="" then - sub=false - end - local hash=name - if sub then - hash=hash.."-"..sub - end - hash=containers.cleanname(hash) - local featurefiles - if featurefile then - featurefiles={} - for s in gmatch(featurefile,"[^,]+") do - local name=resolvers.findfile(file.addsuffix(s,'fea'),'fea') or "" - if name=="" then - report_otf("loading error, no featurefile %a",s) - else - local attr=lfs.attributes(name) - featurefiles[#featurefiles+1]={ - name=name, - size=attr and attr.size or 0, - time=attr and attr.modification or 0, - } - end - end - if #featurefiles==0 then - featurefiles=nil - end - end - local data=containers.read(otf.cache,hash) - local reload=not data or data.size~=size or data.time~=time - if forceload then - report_otf("forced reload of %a due to hard coded flag",filename) - reload=true - end - if not reload then - local featuredata=data.featuredata - if featurefiles then - if not featuredata or #featuredata~=#featurefiles then - reload=true - else - for i=1,#featurefiles do - local fi,fd=featurefiles[i],featuredata[i] - if fi.name~=fd.name or fi.size~=fd.size or fi.time~=fd.time then - reload=true - break - end - end - end - elseif featuredata then - reload=true - end - if reload then - report_otf("loading: forced reload due to changed featurefile specification %a",featurefile) - end - end - if reload then - report_otf("loading %a, hash %a",filename,hash) - local fontdata,messages - if sub then - fontdata,messages=fontloader.open(filename,sub) - else - fontdata,messages=fontloader.open(filename) - end - if fontdata then - mainfields=mainfields or (fontloaderfields and fontloaderfields(fontdata)) - end - if trace_loading and messages and #messages>0 then - if type(messages)=="string" then - report_otf("warning: %s",messages) - else - for m=1,#messages do - report_otf("warning: %S",messages[m]) - end - end - else - report_otf("loading done") - end - if fontdata then - if featurefiles then - for i=1,#featurefiles do - load_featurefile(fontdata,featurefiles[i].name) - end - end - local unicodes={ - } - local splitter=lpeg.splitter(" ",unicodes) - data={ - size=size, - time=time, - format=otf_format(filename), - featuredata=featurefiles, - resources={ - filename=resolvers.unresolve(filename), - version=otf.version, - creator="context mkiv", - unicodes=unicodes, - indices={ - }, - duplicates={ - }, - variants={ - }, - lookuptypes={}, - }, - warnings={}, - metadata={ - }, - properties={ - }, - descriptions={}, - goodies={}, - helpers={ - tounicodelist=splitter, - tounicodetable=Ct(splitter), - }, - } - starttiming(data) - report_otf("file size: %s",size) - enhancers.apply(data,filename,fontdata) - local packtime={} - if packdata then - if cleanup>0 then - collectgarbage("collect") - end - starttiming(packtime) - enhance("pack",data,filename,nil) - stoptiming(packtime) - end - report_otf("saving %a in cache",filename) - data=containers.write(otf.cache,hash,data) - if cleanup>1 then - collectgarbage("collect") - end - stoptiming(data) - if elapsedtime then - report_otf("preprocessing and caching time %s, packtime %s", - elapsedtime(data),packdata and elapsedtime(packtime) or 0) - end - fontloader.close(fontdata) - if cleanup>3 then - collectgarbage("collect") - end - data=containers.read(otf.cache,hash) - if cleanup>2 then - collectgarbage("collect") - end - else - data=nil - report_otf("loading failed due to read error") - end - end - if data then - if trace_defining then - report_otf("loading from cache using hash %a",hash) - end - enhance("unpack",data,filename,nil,false) - local resources=data.resources - local lookuptags=resources.lookuptags - local unicodes=resources.unicodes - if not lookuptags then - lookuptags={} - resources.lookuptags=lookuptags - end - setmetatableindex(lookuptags,function(t,k) - local v=type(k)=="number" and ("lookup "..k) or k - t[k]=v - return v - end) - if not unicodes then - unicodes={} - resources.unicodes=unicodes - setmetatableindex(unicodes,function(t,k) - setmetatableindex(unicodes,nil) - for u,d in next,data.descriptions do - local n=d.name - if n then - t[n]=u - else - end - end - return rawget(t,k) - end) - end - constructors.addcoreunicodes(unicodes) - if applyruntimefixes then - applyruntimefixes(filename,data) - end - enhance("add dimensions",data,filename,nil,false) - if trace_sequences then - showfeatureorder(data,filename) - end - end - return data -end -local mt={ - __index=function(t,k) - if k=="height" then - local ht=t.boundingbox[4] - return ht<0 and 0 or ht - elseif k=="depth" then - local dp=-t.boundingbox[2] - return dp<0 and 0 or dp - elseif k=="width" then - return 0 - elseif k=="name" then - return forcenotdef and ".notdef" - end - end -} -actions["prepare tables"]=function(data,filename,raw) - data.properties.hasitalics=false -end -actions["add dimensions"]=function(data,filename) - if data then - local descriptions=data.descriptions - local resources=data.resources - local defaultwidth=resources.defaultwidth or 0 - local defaultheight=resources.defaultheight or 0 - local defaultdepth=resources.defaultdepth or 0 - local basename=trace_markwidth and file.basename(filename) - for _,d in next,descriptions do - local bb,wd=d.boundingbox,d.width - if not wd then - d.width=defaultwidth - elseif trace_markwidth and wd~=0 and d.class=="mark" then - report_otf("mark %a with width %b found in %a",d.name or "",wd,basename) - end - if bb then - local ht,dp=bb[4],-bb[2] - if ht==0 or ht<0 then - else - d.height=ht - end - if dp==0 or dp<0 then - else - d.depth=dp - end - end - end - end -end -local function somecopy(old) - if old then - local new={} - if type(old)=="table" then - for k,v in next,old do - if k=="glyphs" then - elseif type(v)=="table" then - new[k]=somecopy(v) - else - new[k]=v - end - end - else - for i=1,#mainfields do - local k=mainfields[i] - local v=old[k] - if k=="glyphs" then - elseif type(v)=="table" then - new[k]=somecopy(v) - else - new[k]=v - end - end - end - return new - else - return {} - end -end -actions["prepare glyphs"]=function(data,filename,raw) - local rawglyphs=raw.glyphs - local rawsubfonts=raw.subfonts - local rawcidinfo=raw.cidinfo - local criterium=constructors.privateoffset - local private=criterium - local resources=data.resources - local metadata=data.metadata - local properties=data.properties - local descriptions=data.descriptions - local unicodes=resources.unicodes - local indices=resources.indices - local duplicates=resources.duplicates - local variants=resources.variants - if rawsubfonts then - metadata.subfonts=includesubfonts and {} - properties.cidinfo=rawcidinfo - if rawcidinfo.registry then - local cidmap=fonts.cid.getmap(rawcidinfo) - if cidmap then - rawcidinfo.usedname=cidmap.usedname - local nofnames,nofunicodes=0,0 - local cidunicodes,cidnames=cidmap.unicodes,cidmap.names - for cidindex=1,#rawsubfonts do - local subfont=rawsubfonts[cidindex] - local cidglyphs=subfont.glyphs - if includesubfonts then - metadata.subfonts[cidindex]=somecopy(subfont) - end - for index=0,subfont.glyphcnt-1 do - local glyph=cidglyphs[index] - if glyph then - local unicode=glyph.unicode - if unicode>=0x00E000 and unicode<=0x00F8FF then - unicode=-1 - elseif unicode>=0x0F0000 and unicode<=0x0FFFFD then - unicode=-1 - elseif unicode>=0x100000 and unicode<=0x10FFFD then - unicode=-1 - end - local name=glyph.name or cidnames[index] - if not unicode or unicode==-1 then - unicode=cidunicodes[index] - end - if unicode and descriptions[unicode] then - if trace_private then - report_otf("preventing glyph %a at index %H to overload unicode %U",name or "noname",index,unicode) - end - unicode=-1 - end - if not unicode or unicode==-1 then - if not name then - name=format("u%06X.ctx",private) - end - unicode=private - unicodes[name]=private - if trace_private then - report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private) - end - private=private+1 - nofnames=nofnames+1 - else - if not name then - name=format("u%06X.ctx",unicode) - end - unicodes[name]=unicode - nofunicodes=nofunicodes+1 - end - indices[index]=unicode - local description={ - boundingbox=glyph.boundingbox, - name=glyph.name or name or "unknown", - cidindex=cidindex, - index=index, - glyph=glyph, - } - descriptions[unicode]=description - else - end - end - end - if trace_loading then - report_otf("cid font remapped, %s unicode points, %s symbolic names, %s glyphs",nofunicodes,nofnames,nofunicodes+nofnames) - end - elseif trace_loading then - report_otf("unable to remap cid font, missing cid file for %a",filename) - end - elseif trace_loading then - report_otf("font %a has no glyphs",filename) - end - else - for index=0,raw.glyphcnt-1 do - local glyph=rawglyphs[index] - if glyph then - local unicode=glyph.unicode - local name=glyph.name - if not unicode or unicode==-1 then - unicode=private - unicodes[name]=private - if trace_private then - report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private) - end - private=private+1 - else - if unicode>criterium then - local taken=descriptions[unicode] - if taken then - if unicode>=private then - private=unicode+1 - else - private=private+1 - end - descriptions[private]=taken - unicodes[taken.name]=private - indices[taken.index]=private - if trace_private then - report_otf("slot %U is moved to %U due to private in font",unicode) - end - else - if unicode>=private then - private=unicode+1 - end - end - end - unicodes[name]=unicode - end - indices[index]=unicode - descriptions[unicode]={ - boundingbox=glyph.boundingbox, - name=name, - index=index, - glyph=glyph, - } - local altuni=glyph.altuni - if altuni then - for i=1,#altuni do - local a=altuni[i] - local u=a.unicode - local v=a.variant - if v then - local vv=variants[v] - if vv then - vv[u]=unicode - else - vv={ [u]=unicode } - variants[v]=vv - end - end - end - end - else - report_otf("potential problem: glyph %U is used but empty",index) - end - end - end - resources.private=private -end -actions["check encoding"]=function(data,filename,raw) - local descriptions=data.descriptions - local resources=data.resources - local properties=data.properties - local unicodes=resources.unicodes - local indices=resources.indices - local duplicates=resources.duplicates - local mapdata=raw.map or {} - local unicodetoindex=mapdata and mapdata.map or {} - local indextounicode=mapdata and mapdata.backmap or {} - local encname=lower(data.enc_name or mapdata.enc_name or "") - local criterium=0xFFFF - local privateoffset=constructors.privateoffset - if find(encname,"unicode") then - if trace_loading then - report_otf("checking embedded unicode map %a",encname) - end - local reported={} - for maybeunicode,index in next,unicodetoindex do - if descriptions[maybeunicode] then - else - local unicode=indices[index] - if not unicode then - elseif maybeunicode==unicode then - elseif unicode>privateoffset then - else - local d=descriptions[unicode] - if d then - local c=d.copies - if c then - c[maybeunicode]=true - else - d.copies={ [maybeunicode]=true } - end - elseif index and not reported[index] then - report_otf("missing index %i",index) - reported[index]=true - end - end - end - end - for unicode,data in next,descriptions do - local d=data.copies - if d then - duplicates[unicode]=sortedkeys(d) - data.copies=nil - end - end - elseif properties.cidinfo then - report_otf("warning: no unicode map, used cidmap %a",properties.cidinfo.usedname) - else - report_otf("warning: non unicode map %a, only using glyph unicode data",encname or "whatever") - end - if mapdata then - mapdata.map={} - mapdata.backmap={} - end -end -actions["add duplicates"]=function(data,filename,raw) - local descriptions=data.descriptions - local resources=data.resources - local properties=data.properties - local unicodes=resources.unicodes - local indices=resources.indices - local duplicates=resources.duplicates - for unicode,d in next,duplicates do - local nofduplicates=#d - if nofduplicates>4 then - if trace_loading then - report_otf("ignoring excessive duplicates of %U (n=%s)",unicode,nofduplicates) - end - else - for i=1,nofduplicates do - local u=d[i] - if not descriptions[u] then - local description=descriptions[unicode] - local n=0 - for _,description in next,descriptions do - local kerns=description.kerns - if kerns then - for _,k in next,kerns do - local ku=k[unicode] - if ku then - k[u]=ku - n=n+1 - end - end - end - end - if u>0 then - local duplicate=table.copy(description) - duplicate.comment=format("copy of U+%05X",unicode) - descriptions[u]=duplicate - if trace_loading then - report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n) - end - end - end - end - end - end -end -actions["analyze glyphs"]=function(data,filename,raw) - local descriptions=data.descriptions - local resources=data.resources - local metadata=data.metadata - local properties=data.properties - local hasitalics=false - local widths={} - local marks={} - for unicode,description in next,descriptions do - local glyph=description.glyph - local italic=glyph.italic_correction - if not italic then - elseif italic==0 then - else - description.italic=italic - hasitalics=true - end - local width=glyph.width - widths[width]=(widths[width] or 0)+1 - local class=glyph.class - if class then - if class=="mark" then - marks[unicode]=true - end - description.class=class - end - end - properties.hasitalics=hasitalics - resources.marks=marks - local wd,most=0,1 - for k,v in next,widths do - if v>most then - wd,most=k,v - end - end - if most>1000 then - if trace_loading then - report_otf("most common width: %s (%s times), sharing (cjk font)",wd,most) - end - for unicode,description in next,descriptions do - if description.width==wd then - else - description.width=description.glyph.width - end - end - resources.defaultwidth=wd - else - for unicode,description in next,descriptions do - description.width=description.glyph.width - end - end -end -actions["reorganize mark classes"]=function(data,filename,raw) - local mark_classes=raw.mark_classes - if mark_classes then - local resources=data.resources - local unicodes=resources.unicodes - local markclasses={} - resources.markclasses=markclasses - for name,class in next,mark_classes do - local t={} - for s in gmatch(class,"[^ ]+") do - t[unicodes[s]]=true - end - markclasses[name]=t - end - end -end -actions["reorganize features"]=function(data,filename,raw) - local features={} - data.resources.features=features - for k,what in next,otf.glists do - local dw=raw[what] - if dw then - local f={} - features[what]=f - for i=1,#dw do - local d=dw[i] - local dfeatures=d.features - if dfeatures then - for i=1,#dfeatures do - local df=dfeatures[i] - local tag=strip(lower(df.tag)) - local ft=f[tag] - if not ft then - ft={} - f[tag]=ft - end - local dscripts=df.scripts - for i=1,#dscripts do - local d=dscripts[i] - local languages=d.langs - local script=strip(lower(d.script)) - local fts=ft[script] if not fts then fts={} ft[script]=fts end - for i=1,#languages do - fts[strip(lower(languages[i]))]=true - end - end - end - end - end - end - end -end -actions["reorganize anchor classes"]=function(data,filename,raw) - local resources=data.resources - local anchor_to_lookup={} - local lookup_to_anchor={} - resources.anchor_to_lookup=anchor_to_lookup - resources.lookup_to_anchor=lookup_to_anchor - local classes=raw.anchor_classes - if classes then - for c=1,#classes do - local class=classes[c] - local anchor=class.name - local lookups=class.lookup - if type(lookups)~="table" then - lookups={ lookups } - end - local a=anchor_to_lookup[anchor] - if not a then - a={} - anchor_to_lookup[anchor]=a - end - for l=1,#lookups do - local lookup=lookups[l] - local l=lookup_to_anchor[lookup] - if l then - l[anchor]=true - else - l={ [anchor]=true } - lookup_to_anchor[lookup]=l - end - a[lookup]=true - end - end - end -end -actions["prepare tounicode"]=function(data,filename,raw) - fonts.mappings.addtounicode(data,filename) -end -local g_directions={ - gsub_contextchain=1, - gpos_contextchain=1, - gsub_reversecontextchain=-1, - gpos_reversecontextchain=-1, -} -actions["reorganize subtables"]=function(data,filename,raw) - local resources=data.resources - local sequences={} - local lookups={} - local chainedfeatures={} - resources.sequences=sequences - resources.lookups=lookups - for _,what in next,otf.glists do - local dw=raw[what] - if dw then - for k=1,#dw do - local gk=dw[k] - local features=gk.features - local typ=gk.type - local chain=g_directions[typ] or 0 - local subtables=gk.subtables - if subtables then - local t={} - for s=1,#subtables do - t[s]=subtables[s].name - end - subtables=t - end - local flags,markclass=gk.flags,nil - if flags then - local t={ - (flags.ignorecombiningmarks and "mark") or false, - (flags.ignoreligatures and "ligature") or false, - (flags.ignorebaseglyphs and "base") or false, - flags.r2l or false, - } - markclass=flags.mark_class - if markclass then - markclass=resources.markclasses[markclass] - end - flags=t - end - local name=gk.name - if not name then - report_otf("skipping weird lookup number %s",k) - elseif features then - local f={} - local o={} - for i=1,#features do - local df=features[i] - local tag=strip(lower(df.tag)) - local ft=f[tag] - if not ft then - ft={} - f[tag]=ft - o[#o+1]=tag - end - local dscripts=df.scripts - for i=1,#dscripts do - local d=dscripts[i] - local languages=d.langs - local script=strip(lower(d.script)) - local fts=ft[script] if not fts then fts={} ft[script]=fts end - for i=1,#languages do - fts[strip(lower(languages[i]))]=true - end - end - end - sequences[#sequences+1]={ - type=typ, - chain=chain, - flags=flags, - name=name, - subtables=subtables, - markclass=markclass, - features=f, - order=o, - } - else - lookups[name]={ - type=typ, - chain=chain, - flags=flags, - subtables=subtables, - markclass=markclass, - } - end - end - end - end -end -actions["prepare lookups"]=function(data,filename,raw) - local lookups=raw.lookups - if lookups then - data.lookups=lookups - end -end -local function t_uncover(splitter,cache,covers) - local result={} - for n=1,#covers do - local cover=covers[n] - local uncovered=cache[cover] - if not uncovered then - uncovered=lpegmatch(splitter,cover) - cache[cover]=uncovered - end - result[n]=uncovered - end - return result -end -local function s_uncover(splitter,cache,cover) - if cover=="" then - return nil - else - local uncovered=cache[cover] - if not uncovered then - uncovered=lpegmatch(splitter,cover) - cache[cover]=uncovered - end - return { uncovered } - end -end -local function t_hashed(t,cache) - if t then - local ht={} - for i=1,#t do - local ti=t[i] - local tih=cache[ti] - if not tih then - local tn=#ti - if tn==1 then - tih={ [ti[1]]=true } - else - tih={} - for i=1,tn do - tih[ti[i]]=true - end - end - cache[ti]=tih - end - ht[i]=tih - end - return ht - else - return nil - end -end -local function s_hashed(t,cache) - if t then - local tf=t[1] - local nf=#tf - if nf==1 then - return { [tf[1]]=true } - else - local ht={} - for i=1,nf do - ht[i]={ [tf[i]]=true } - end - return ht - end - else - return nil - end -end -local function r_uncover(splitter,cache,cover,replacements) - if cover=="" then - return nil - else - local uncovered=cover[1] - local replaced=cache[replacements] - if not replaced then - replaced=lpegmatch(splitter,replacements) - cache[replacements]=replaced - end - local nu,nr=#uncovered,#replaced - local r={} - if nu==nr then - for i=1,nu do - r[uncovered[i]]=replaced[i] - end - end - return r - end -end -actions["reorganize lookups"]=function(data,filename,raw) - if data.lookups then - local splitter=data.helpers.tounicodetable - local t_u_cache={} - local s_u_cache=t_u_cache - local t_h_cache={} - local s_h_cache=t_h_cache - local r_u_cache={} - for _,lookup in next,data.lookups do - local rules=lookup.rules - if rules then - local format=lookup.format - if format=="class" then - local before_class=lookup.before_class - if before_class then - before_class=t_uncover(splitter,t_u_cache,reversed(before_class)) - end - local current_class=lookup.current_class - if current_class then - current_class=t_uncover(splitter,t_u_cache,current_class) - end - local after_class=lookup.after_class - if after_class then - after_class=t_uncover(splitter,t_u_cache,after_class) - end - for i=1,#rules do - local rule=rules[i] - local class=rule.class - local before=class.before - if before then - for i=1,#before do - before[i]=before_class[before[i]] or {} - end - rule.before=t_hashed(before,t_h_cache) - end - local current=class.current - local lookups=rule.lookups - if current then - for i=1,#current do - current[i]=current_class[current[i]] or {} - if lookups and not lookups[i] then - lookups[i]="" - end - end - rule.current=t_hashed(current,t_h_cache) - end - local after=class.after - if after then - for i=1,#after do - after[i]=after_class[after[i]] or {} - end - rule.after=t_hashed(after,t_h_cache) - end - rule.class=nil - end - lookup.before_class=nil - lookup.current_class=nil - lookup.after_class=nil - lookup.format="coverage" - elseif format=="coverage" then - for i=1,#rules do - local rule=rules[i] - local coverage=rule.coverage - if coverage then - local before=coverage.before - if before then - before=t_uncover(splitter,t_u_cache,reversed(before)) - rule.before=t_hashed(before,t_h_cache) - end - local current=coverage.current - if current then - current=t_uncover(splitter,t_u_cache,current) - local lookups=rule.lookups - if lookups then - for i=1,#current do - if not lookups[i] then - lookups[i]="" - end - end - end - rule.current=t_hashed(current,t_h_cache) - end - local after=coverage.after - if after then - after=t_uncover(splitter,t_u_cache,after) - rule.after=t_hashed(after,t_h_cache) - end - rule.coverage=nil - end - end - elseif format=="reversecoverage" then - for i=1,#rules do - local rule=rules[i] - local reversecoverage=rule.reversecoverage - if reversecoverage then - local before=reversecoverage.before - if before then - before=t_uncover(splitter,t_u_cache,reversed(before)) - rule.before=t_hashed(before,t_h_cache) - end - local current=reversecoverage.current - if current then - current=t_uncover(splitter,t_u_cache,current) - rule.current=t_hashed(current,t_h_cache) - end - local after=reversecoverage.after - if after then - after=t_uncover(splitter,t_u_cache,after) - rule.after=t_hashed(after,t_h_cache) - end - local replacements=reversecoverage.replacements - if replacements then - rule.replacements=r_uncover(splitter,r_u_cache,current,replacements) - end - rule.reversecoverage=nil - end - end - elseif format=="glyphs" then - for i=1,#rules do - local rule=rules[i] - local glyphs=rule.glyphs - if glyphs then - local fore=glyphs.fore - if fore and fore~="" then - fore=s_uncover(splitter,s_u_cache,fore) - rule.after=s_hashed(fore,s_h_cache) - end - local back=glyphs.back - if back then - back=s_uncover(splitter,s_u_cache,back) - rule.before=s_hashed(back,s_h_cache) - end - local names=glyphs.names - if names then - names=s_uncover(splitter,s_u_cache,names) - rule.current=s_hashed(names,s_h_cache) - end - rule.glyphs=nil - local lookups=rule.lookups - if lookups then - for i=1,#names do - if not lookups[i] then - lookups[i]="" - end - end - end - end - end - end - end - end - end -end -local function check_variants(unicode,the_variants,splitter,unicodes) - local variants=the_variants.variants - if variants then - local glyphs=lpegmatch(splitter,variants) - local done={ [unicode]=true } - local n=0 - for i=1,#glyphs do - local g=glyphs[i] - if done[g] then - if i>1 then - report_otf("skipping cyclic reference %U in math variant %U",g,unicode) - end - else - if n==0 then - n=1 - variants={ g } - else - n=n+1 - variants[n]=g - end - done[g]=true - end - end - if n==0 then - variants=nil - end - end - local parts=the_variants.parts - if parts then - local p=#parts - if p>0 then - for i=1,p do - local pi=parts[i] - pi.glyph=unicodes[pi.component] or 0 - pi.component=nil - end - else - parts=nil - end - end - local italic_correction=the_variants.italic_correction - if italic_correction and italic_correction==0 then - italic_correction=nil - end - return variants,parts,italic_correction -end -actions["analyze math"]=function(data,filename,raw) - if raw.math then - data.metadata.math=raw.math - local unicodes=data.resources.unicodes - local splitter=data.helpers.tounicodetable - for unicode,description in next,data.descriptions do - local glyph=description.glyph - local mathkerns=glyph.mathkern - local horiz_variants=glyph.horiz_variants - local vert_variants=glyph.vert_variants - local top_accent=glyph.top_accent - if mathkerns or horiz_variants or vert_variants or top_accent then - local math={} - if top_accent then - math.top_accent=top_accent - end - if mathkerns then - for k,v in next,mathkerns do - if not next(v) then - mathkerns[k]=nil - else - for k,v in next,v do - if v==0 then - k[v]=nil - end - end - end - end - math.kerns=mathkerns - end - if horiz_variants then - math.horiz_variants,math.horiz_parts,math.horiz_italic_correction=check_variants(unicode,horiz_variants,splitter,unicodes) - end - if vert_variants then - math.vert_variants,math.vert_parts,math.vert_italic_correction=check_variants(unicode,vert_variants,splitter,unicodes) - end - local italic_correction=description.italic - if italic_correction and italic_correction~=0 then - math.italic_correction=italic_correction - end - description.math=math - end - end - end -end -actions["reorganize glyph kerns"]=function(data,filename,raw) - local descriptions=data.descriptions - local resources=data.resources - local unicodes=resources.unicodes - for unicode,description in next,descriptions do - local kerns=description.glyph.kerns - if kerns then - local newkerns={} - for k,kern in next,kerns do - local name=kern.char - local offset=kern.off - local lookup=kern.lookup - if name and offset and lookup then - local unicode=unicodes[name] - if unicode then - if type(lookup)=="table" then - for l=1,#lookup do - local lookup=lookup[l] - local lookupkerns=newkerns[lookup] - if lookupkerns then - lookupkerns[unicode]=offset - else - newkerns[lookup]={ [unicode]=offset } - end - end - else - local lookupkerns=newkerns[lookup] - if lookupkerns then - lookupkerns[unicode]=offset - else - newkerns[lookup]={ [unicode]=offset } - end - end - elseif trace_loading then - report_otf("problems with unicode %a of kern %a of glyph %U",name,k,unicode) - end - end - end - description.kerns=newkerns - end - end -end -actions["merge kern classes"]=function(data,filename,raw) - local gposlist=raw.gpos - if gposlist then - local descriptions=data.descriptions - local resources=data.resources - local unicodes=resources.unicodes - local splitter=data.helpers.tounicodetable - local ignored=0 - local blocked=0 - for gp=1,#gposlist do - local gpos=gposlist[gp] - local subtables=gpos.subtables - if subtables then - local first_done={} - local split={} - for s=1,#subtables do - local subtable=subtables[s] - local kernclass=subtable.kernclass - local lookup=subtable.lookup or subtable.name - if kernclass then - if #kernclass>0 then - kernclass=kernclass[1] - lookup=type(kernclass.lookup)=="string" and kernclass.lookup or lookup - report_otf("fixing kernclass table of lookup %a",lookup) - end - local firsts=kernclass.firsts - local seconds=kernclass.seconds - local offsets=kernclass.offsets - for n,s in next,firsts do - split[s]=split[s] or lpegmatch(splitter,s) - end - local maxseconds=0 - for n,s in next,seconds do - if n>maxseconds then - maxseconds=n - end - split[s]=split[s] or lpegmatch(splitter,s) - end - for fk=1,#firsts do - local fv=firsts[fk] - local splt=split[fv] - if splt then - local extrakerns={} - local baseoffset=(fk-1)*maxseconds - for sk=2,maxseconds do - local sv=seconds[sk] - local splt=split[sv] - if splt then - local offset=offsets[baseoffset+sk] - if offset then - for i=1,#splt do - extrakerns[splt[i]]=offset - end - end - end - end - for i=1,#splt do - local first_unicode=splt[i] - if first_done[first_unicode] then - report_otf("lookup %a: ignoring further kerns of %C",lookup,first_unicode) - blocked=blocked+1 - else - first_done[first_unicode]=true - local description=descriptions[first_unicode] - if description then - local kerns=description.kerns - if not kerns then - kerns={} - description.kerns=kerns - end - local lookupkerns=kerns[lookup] - if not lookupkerns then - lookupkerns={} - kerns[lookup]=lookupkerns - end - if overloadkerns then - for second_unicode,kern in next,extrakerns do - lookupkerns[second_unicode]=kern - end - else - for second_unicode,kern in next,extrakerns do - local k=lookupkerns[second_unicode] - if not k then - lookupkerns[second_unicode]=kern - elseif k~=kern then - if trace_loading then - report_otf("lookup %a: ignoring overload of kern between %C and %C, rejecting %a, keeping %a",lookup,first_unicode,second_unicode,k,kern) - end - ignored=ignored+1 - end - end - end - elseif trace_loading then - report_otf("no glyph data for %U",first_unicode) - end - end - end - end - end - subtable.kernclass={} - end - end - end - end - if ignored>0 then - report_otf("%s kern overloads ignored",ignored) - end - if blocked>0 then - report_otf("%s succesive kerns blocked",blocked) - end - end -end -actions["check glyphs"]=function(data,filename,raw) - for unicode,description in next,data.descriptions do - description.glyph=nil - end -end -local valid=(R("\x00\x7E")-S("(){}[]<>%/ \n\r\f\v"))^0*P(-1) -local function valid_ps_name(str) - return str and str~="" and #str<64 and lpegmatch(valid,str) and true or false -end -actions["check metadata"]=function(data,filename,raw) - local metadata=data.metadata - for _,k in next,mainfields do - if valid_fields[k] then - local v=raw[k] - if not metadata[k] then - metadata[k]=v - end - end - end - local ttftables=metadata.ttf_tables - if ttftables then - for i=1,#ttftables do - ttftables[i].data="deleted" - end - end - if metadata.validation_state and table.contains(metadata.validation_state,"bad_ps_fontname") then - local function valid(what) - local names=raw.names - for i=1,#names do - local list=names[i] - local names=list.names - if names then - local name=names[what] - if name and valid_ps_name(name) then - return name - end - end - end - end - local function check(what) - local oldname=metadata[what] - if valid_ps_name(oldname) then - report_otf("ignoring warning %a because %s %a is proper ASCII","bad_ps_fontname",what,oldname) - else - local newname=valid(what) - if not newname then - newname=formatters["bad-%s-%s"](what,file.nameonly(filename)) - end - local warning=formatters["overloading %s from invalid ASCII name %a to %a"](what,oldname,newname) - data.warnings[#data.warnings+1]=warning - report_otf(warning) - metadata[what]=newname - end - end - check("fontname") - check("fullname") - end -end -actions["cleanup tables"]=function(data,filename,raw) - local duplicates=data.resources.duplicates - if duplicates then - for k,v in next,duplicates do - if #v==1 then - duplicates[k]=v[1] - end - end - end - data.resources.indices=nil - data.resources.unicodes=nil - data.helpers=nil -end -actions["reorganize glyph lookups"]=function(data,filename,raw) - local resources=data.resources - local unicodes=resources.unicodes - local descriptions=data.descriptions - local splitter=data.helpers.tounicodelist - local lookuptypes=resources.lookuptypes - for unicode,description in next,descriptions do - local lookups=description.glyph.lookups - if lookups then - for tag,lookuplist in next,lookups do - for l=1,#lookuplist do - local lookup=lookuplist[l] - local specification=lookup.specification - local lookuptype=lookup.type - local lt=lookuptypes[tag] - if not lt then - lookuptypes[tag]=lookuptype - elseif lt~=lookuptype then - report_otf("conflicting lookuptypes, %a points to %a and %a",tag,lt,lookuptype) - end - if lookuptype=="ligature" then - lookuplist[l]={ lpegmatch(splitter,specification.components) } - elseif lookuptype=="alternate" then - lookuplist[l]={ lpegmatch(splitter,specification.components) } - elseif lookuptype=="substitution" then - lookuplist[l]=unicodes[specification.variant] - elseif lookuptype=="multiple" then - lookuplist[l]={ lpegmatch(splitter,specification.components) } - elseif lookuptype=="position" then - lookuplist[l]={ - specification.x or 0, - specification.y or 0, - specification.h or 0, - specification.v or 0 - } - elseif lookuptype=="pair" then - local one=specification.offsets[1] - local two=specification.offsets[2] - local paired=unicodes[specification.paired] - if one then - if two then - lookuplist[l]={ paired,{ one.x or 0,one.y or 0,one.h or 0,one.v or 0 },{ two.x or 0,two.y or 0,two.h or 0,two.v or 0 } } - else - lookuplist[l]={ paired,{ one.x or 0,one.y or 0,one.h or 0,one.v or 0 } } - end - else - if two then - lookuplist[l]={ paired,{},{ two.x or 0,two.y or 0,two.h or 0,two.v or 0} } - else - lookuplist[l]={ paired } - end - end - end - end - end - local slookups,mlookups - for tag,lookuplist in next,lookups do - if #lookuplist==1 then - if slookups then - slookups[tag]=lookuplist[1] - else - slookups={ [tag]=lookuplist[1] } - end - else - if mlookups then - mlookups[tag]=lookuplist - else - mlookups={ [tag]=lookuplist } - end - end - end - if slookups then - description.slookups=slookups - end - if mlookups then - description.mlookups=mlookups - end - end - end -end -actions["reorganize glyph anchors"]=function(data,filename,raw) - local descriptions=data.descriptions - for unicode,description in next,descriptions do - local anchors=description.glyph.anchors - if anchors then - for class,data in next,anchors do - if class=="baselig" then - for tag,specification in next,data do - for i=1,#specification do - local si=specification[i] - specification[i]={ si.x or 0,si.y or 0 } - end - end - else - for tag,specification in next,data do - data[tag]={ specification.x or 0,specification.y or 0 } - end - end - end - description.anchors=anchors - end - end -end -local bogusname=(P("uni")+P("u"))*R("AF","09")^4+(P("index")+P("glyph")+S("Ii")*P("dentity")*P(".")^0)*R("09")^1 -local uselessname=(1-bogusname)^0*bogusname -actions["purge names"]=function(data,filename,raw) - if purge_names then - local n=0 - for u,d in next,data.descriptions do - if lpegmatch(uselessname,d.name) then - n=n+1 - d.name=nil - end - end - if n>0 then - report_otf("%s bogus names removed",n) - end - end -end -actions["compact lookups"]=function(data,filename,raw) - if not compact_lookups then - report_otf("not compacting") - return - end - local last=0 - local tags=table.setmetatableindex({}, - function(t,k) - last=last+1 - t[k]=last - return last - end - ) - local descriptions=data.descriptions - local resources=data.resources - for u,d in next,descriptions do - local slookups=d.slookups - if type(slookups)=="table" then - local s={} - for k,v in next,slookups do - s[tags[k]]=v - end - d.slookups=s - end - local mlookups=d.mlookups - if type(mlookups)=="table" then - local m={} - for k,v in next,mlookups do - m[tags[k]]=v - end - d.mlookups=m - end - local kerns=d.kerns - if type(kerns)=="table" then - local t={} - for k,v in next,kerns do - t[tags[k]]=v - end - d.kerns=t - end - end - local lookups=data.lookups - if lookups then - local l={} - for k,v in next,lookups do - local rules=v.rules - if rules then - for i=1,#rules do - local l=rules[i].lookups - if type(l)=="table" then - for i=1,#l do - l[i]=tags[l[i]] - end - end - end - end - l[tags[k]]=v - end - data.lookups=l - end - local lookups=resources.lookups - if lookups then - local l={} - for k,v in next,lookups do - local s=v.subtables - if type(s)=="table" then - for i=1,#s do - s[i]=tags[s[i]] - end - end - l[tags[k]]=v - end - resources.lookups=l - end - local sequences=resources.sequences - if sequences then - for i=1,#sequences do - local s=sequences[i] - local n=s.name - if n then - s.name=tags[n] - end - local t=s.subtables - if type(t)=="table" then - for i=1,#t do - t[i]=tags[t[i]] - end - end - end - end - local lookuptypes=resources.lookuptypes - if lookuptypes then - local l={} - for k,v in next,lookuptypes do - l[tags[k]]=v - end - resources.lookuptypes=l - end - local anchor_to_lookup=resources.anchor_to_lookup - if anchor_to_lookup then - for anchor,lookups in next,anchor_to_lookup do - local l={} - for lookup,value in next,lookups do - l[tags[lookup]]=value - end - anchor_to_lookup[anchor]=l - end - end - local lookup_to_anchor=resources.lookup_to_anchor - if lookup_to_anchor then - local l={} - for lookup,value in next,lookup_to_anchor do - l[tags[lookup]]=value - end - resources.lookup_to_anchor=l - end - tags=table.swapped(tags) - report_otf("%s lookup tags compacted",#tags) - resources.lookuptags=tags -end -function otf.setfeatures(tfmdata,features) - local okay=constructors.initializefeatures("otf",tfmdata,features,trace_features,report_otf) - if okay then - return constructors.collectprocessors("otf",tfmdata,features,trace_features,report_otf) - else - return {} - end -end -local function copytotfm(data,cache_id) - if data then - local metadata=data.metadata - local warnings=data.warnings - local resources=data.resources - local properties=derivetable(data.properties) - local descriptions=derivetable(data.descriptions) - local goodies=derivetable(data.goodies) - local characters={} - local parameters={} - local mathparameters={} - local pfminfo=metadata.pfminfo or {} - local resources=data.resources - local unicodes=resources.unicodes - local spaceunits=500 - local spacer="space" - local designsize=metadata.designsize or metadata.design_size or 100 - local mathspecs=metadata.math - if designsize==0 then - designsize=100 - end - if mathspecs then - for name,value in next,mathspecs do - mathparameters[name]=value - end - end - for unicode,_ in next,data.descriptions do - characters[unicode]={} - end - if mathspecs then - for unicode,character in next,characters do - local d=descriptions[unicode] - local m=d.math - if m then - local variants=m.horiz_variants - local parts=m.horiz_parts - if variants then - local c=character - for i=1,#variants do - local un=variants[i] - c.next=un - c=characters[un] - end - c.horiz_variants=parts - elseif parts then - character.horiz_variants=parts - end - local variants=m.vert_variants - local parts=m.vert_parts - if variants then - local c=character - for i=1,#variants do - local un=variants[i] - c.next=un - c=characters[un] - end - c.vert_variants=parts - elseif parts then - character.vert_variants=parts - end - local italic_correction=m.vert_italic_correction - if italic_correction then - character.vert_italic_correction=italic_correction - end - local top_accent=m.top_accent - if top_accent then - character.top_accent=top_accent - end - local kerns=m.kerns - if kerns then - character.mathkerns=kerns - end - end - end - end - local filename=constructors.checkedfilename(resources) - local fontname=metadata.fontname - local fullname=metadata.fullname or fontname - local psname=fontname or fullname - local units=metadata.units_per_em or 1000 - if units==0 then - units=1000 - metadata.units_per_em=1000 - report_otf("changing %a units to %a",0,units) - end - local monospaced=metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose.proportion=="Monospaced") - local charwidth=pfminfo.avgwidth - local charxheight=pfminfo.os2_xheight and pfminfo.os2_xheight>0 and pfminfo.os2_xheight - local italicangle=metadata.italicangle - properties.monospaced=monospaced - parameters.italicangle=italicangle - parameters.charwidth=charwidth - parameters.charxheight=charxheight - local space=0x0020 - local emdash=0x2014 - if monospaced then - if descriptions[space] then - spaceunits,spacer=descriptions[space].width,"space" - end - if not spaceunits and descriptions[emdash] then - spaceunits,spacer=descriptions[emdash].width,"emdash" - end - if not spaceunits and charwidth then - spaceunits,spacer=charwidth,"charwidth" - end - else - if descriptions[space] then - spaceunits,spacer=descriptions[space].width,"space" - end - if not spaceunits and descriptions[emdash] then - spaceunits,spacer=descriptions[emdash].width/2,"emdash/2" - end - if not spaceunits and charwidth then - spaceunits,spacer=charwidth,"charwidth" - end - end - spaceunits=tonumber(spaceunits) or 500 - parameters.slant=0 - parameters.space=spaceunits - parameters.space_stretch=units/2 - parameters.space_shrink=1*units/3 - parameters.x_height=2*units/5 - parameters.quad=units - if spaceunits<2*units/5 then - end - if italicangle and italicangle~=0 then - parameters.italicangle=italicangle - parameters.italicfactor=math.cos(math.rad(90+italicangle)) - parameters.slant=- math.tan(italicangle*math.pi/180) - end - if monospaced then - parameters.space_stretch=0 - parameters.space_shrink=0 - elseif syncspace then - parameters.space_stretch=spaceunits/2 - parameters.space_shrink=spaceunits/3 - end - parameters.extra_space=parameters.space_shrink - if charxheight then - parameters.x_height=charxheight - else - local x=0x0078 - if x then - local x=descriptions[x] - if x then - parameters.x_height=x.height - end - end - end - parameters.designsize=(designsize/10)*65536 - parameters.ascender=abs(metadata.ascent or 0) - parameters.descender=abs(metadata.descent or 0) - parameters.units=units - properties.space=spacer - properties.encodingbytes=2 - properties.format=data.format or otf_format(filename) or formats.otf - properties.noglyphnames=true - properties.filename=filename - properties.fontname=fontname - properties.fullname=fullname - properties.psname=psname - properties.name=filename or fullname - if warnings and #warnings>0 then - report_otf("warnings for font: %s",filename) - report_otf() - for i=1,#warnings do - report_otf(" %s",warnings[i]) - end - report_otf() - end - return { - characters=characters, - descriptions=descriptions, - parameters=parameters, - mathparameters=mathparameters, - resources=resources, - properties=properties, - goodies=goodies, - warnings=warnings, - } - end -end -local function otftotfm(specification) - local cache_id=specification.hash - local tfmdata=containers.read(constructors.cache,cache_id) - if not tfmdata then - local name=specification.name - local sub=specification.sub - local filename=specification.filename - local features=specification.features.normal - local rawdata=otf.load(filename,sub,features and features.featurefile) - if rawdata and next(rawdata) then - local descriptions=rawdata.descriptions - local duplicates=rawdata.resources.duplicates - if duplicates then - local nofduplicates,nofduplicated=0,0 - for parent,list in next,duplicates do - if type(list)=="table" then - local n=#list - for i=1,n do - local unicode=list[i] - if not descriptions[unicode] then - descriptions[unicode]=descriptions[parent] - nofduplicated=nofduplicated+1 - end - end - nofduplicates=nofduplicates+n - else - if not descriptions[list] then - descriptions[list]=descriptions[parent] - nofduplicated=nofduplicated+1 - end - nofduplicates=nofduplicates+1 - end - end - if trace_otf and nofduplicated~=nofduplicates then - report_otf("%i extra duplicates copied out of %i",nofduplicated,nofduplicates) - end - end - rawdata.lookuphash={} - tfmdata=copytotfm(rawdata,cache_id) - if tfmdata and next(tfmdata) then - local features=constructors.checkedfeatures("otf",features) - local shared=tfmdata.shared - if not shared then - shared={} - tfmdata.shared=shared - end - shared.rawdata=rawdata - shared.dynamics={} - tfmdata.changed={} - shared.features=features - shared.processes=otf.setfeatures(tfmdata,features) - end - end - containers.write(constructors.cache,cache_id,tfmdata) - end - return tfmdata -end -local function read_from_otf(specification) - local tfmdata=otftotfm(specification) - if tfmdata then - tfmdata.properties.name=specification.name - tfmdata.properties.sub=specification.sub - tfmdata=constructors.scale(tfmdata,specification) - local allfeatures=tfmdata.shared.features or specification.features.normal - constructors.applymanipulators("otf",tfmdata,allfeatures,trace_features,report_otf) - constructors.setname(tfmdata,specification) - fonts.loggers.register(tfmdata,file.suffix(specification.filename),specification) - end - return tfmdata -end -local function checkmathsize(tfmdata,mathsize) - local mathdata=tfmdata.shared.rawdata.metadata.math - local mathsize=tonumber(mathsize) - if mathdata then - local parameters=tfmdata.parameters - parameters.scriptpercentage=mathdata.ScriptPercentScaleDown - parameters.scriptscriptpercentage=mathdata.ScriptScriptPercentScaleDown - parameters.mathsize=mathsize - end -end -registerotffeature { - name="mathsize", - description="apply mathsize specified in the font", - initializers={ - base=checkmathsize, - node=checkmathsize, - } -} -function otf.collectlookups(rawdata,kind,script,language) - local sequences=rawdata.resources.sequences - if sequences then - local featuremap,featurelist={},{} - for s=1,#sequences do - local sequence=sequences[s] - local features=sequence.features - features=features and features[kind] - features=features and (features[script] or features[default] or features[wildcard]) - features=features and (features[language] or features[default] or features[wildcard]) - if features then - local subtables=sequence.subtables - if subtables then - for s=1,#subtables do - local ss=subtables[s] - if not featuremap[s] then - featuremap[ss]=true - featurelist[#featurelist+1]=ss - end - end - end - end - end - if #featurelist>0 then - return featuremap,featurelist - end - end - return nil,nil -end -local function check_otf(forced,specification,suffix) - local name=specification.name - if forced then - name=specification.forcedname - end - local fullname=findbinfile(name,suffix) or "" - if fullname=="" then - fullname=fonts.names.getfilename(name,suffix) or "" - end - if fullname~="" and not fonts.names.ignoredfile(fullname) then - specification.filename=fullname - return read_from_otf(specification) - end -end -local function opentypereader(specification,suffix) - local forced=specification.forced or "" - if formats[forced] then - return check_otf(true,specification,forced) - else - return check_otf(false,specification,suffix) - end -end -readers.opentype=opentypereader -function readers.otf (specification) return opentypereader(specification,"otf") end -function readers.ttf (specification) return opentypereader(specification,"ttf") end -function readers.ttc (specification) return opentypereader(specification,"ttf") end -function readers.dfont(specification) return opentypereader(specification,"ttf") end -function otf.scriptandlanguage(tfmdata,attr) - local properties=tfmdata.properties - return properties.script or "dflt",properties.language or "dflt" -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-otb']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local concat=table.concat -local format,gmatch,gsub,find,match,lower,strip=string.format,string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip -local type,next,tonumber,tostring,rawget=type,next,tonumber,tostring,rawget -local lpegmatch=lpeg.match -local utfchar=utf.char -local trace_baseinit=false trackers.register("otf.baseinit",function(v) trace_baseinit=v end) -local trace_singles=false trackers.register("otf.singles",function(v) trace_singles=v end) -local trace_multiples=false trackers.register("otf.multiples",function(v) trace_multiples=v end) -local trace_alternatives=false trackers.register("otf.alternatives",function(v) trace_alternatives=v end) -local trace_ligatures=false trackers.register("otf.ligatures",function(v) trace_ligatures=v end) -local trace_ligatures_detail=false trackers.register("otf.ligatures.detail",function(v) trace_ligatures_detail=v end) -local trace_kerns=false trackers.register("otf.kerns",function(v) trace_kerns=v end) -local trace_preparing=false trackers.register("otf.preparing",function(v) trace_preparing=v end) -local report_prepare=logs.reporter("fonts","otf prepare") -local fonts=fonts -local otf=fonts.handlers.otf -local otffeatures=otf.features -local registerotffeature=otffeatures.register -otf.defaultbasealternate="none" -local wildcard="*" -local default="dflt" -local formatters=string.formatters -local f_unicode=formatters["%U"] -local f_uniname=formatters["%U (%s)"] -local f_unilist=formatters["% t (% t)"] -local function gref(descriptions,n) - if type(n)=="number" then - local name=descriptions[n].name - if name then - return f_uniname(n,name) - else - return f_unicode(n) - end - elseif n then - local num,nam,j={},{},0 - for i=1,#n do - local ni=n[i] - if tonumber(ni) then - j=j+1 - local di=descriptions[ni] - num[j]=f_unicode(ni) - nam[j]=di and di.name or "-" - end - end - return f_unilist(num,nam) - else - return "" - end -end -local function cref(feature,lookuptags,lookupname) - if lookupname then - return formatters["feature %a, lookup %a"](feature,lookuptags[lookupname]) - else - return formatters["feature %a"](feature) - end -end -local function report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,comment) - report_prepare("%s: base alternate %s => %s (%S => %S)", - cref(feature,lookuptags,lookupname), - gref(descriptions,unicode), - replacement and gref(descriptions,replacement), - value, - comment) -end -local function report_substitution(feature,lookuptags,lookupname,descriptions,unicode,substitution) - report_prepare("%s: base substitution %s => %S", - cref(feature,lookuptags,lookupname), - gref(descriptions,unicode), - gref(descriptions,substitution)) -end -local function report_ligature(feature,lookuptags,lookupname,descriptions,unicode,ligature) - report_prepare("%s: base ligature %s => %S", - cref(feature,lookuptags,lookupname), - gref(descriptions,ligature), - gref(descriptions,unicode)) -end -local function report_kern(feature,lookuptags,lookupname,descriptions,unicode,otherunicode,value) - report_prepare("%s: base kern %s + %s => %S", - cref(feature,lookuptags,lookupname), - gref(descriptions,unicode), - gref(descriptions,otherunicode), - value) -end -local basemethods={} -local basemethod="" -local function applybasemethod(what,...) - local m=basemethods[basemethod][what] - if m then - return m(...) - end -end -local basehash,basehashes,applied={},1,{} -local function registerbasehash(tfmdata) - local properties=tfmdata.properties - local hash=concat(applied," ") - local base=basehash[hash] - if not base then - basehashes=basehashes+1 - base=basehashes - basehash[hash]=base - end - properties.basehash=base - properties.fullname=properties.fullname.."-"..base - applied={} -end -local function registerbasefeature(feature,value) - applied[#applied+1]=feature.."="..tostring(value) -end -local trace=false -local function finalize_ligatures(tfmdata,ligatures) - local nofligatures=#ligatures - if nofligatures>0 then - local characters=tfmdata.characters - local descriptions=tfmdata.descriptions - local resources=tfmdata.resources - local unicodes=resources.unicodes - local private=resources.private - local alldone=false - while not alldone do - local done=0 - for i=1,nofligatures do - local ligature=ligatures[i] - if ligature then - local unicode,lookupdata=ligature[1],ligature[2] - if trace_ligatures_detail then - report_prepare("building % a into %a",lookupdata,unicode) - end - local size=#lookupdata - local firstcode=lookupdata[1] - local firstdata=characters[firstcode] - local okay=false - if firstdata then - local firstname="ctx_"..firstcode - for i=1,size-1 do - local firstdata=characters[firstcode] - if not firstdata then - firstcode=private - if trace_ligatures_detail then - report_prepare("defining %a as %a",firstname,firstcode) - end - unicodes[firstname]=firstcode - firstdata={ intermediate=true,ligatures={} } - characters[firstcode]=firstdata - descriptions[firstcode]={ name=firstname } - private=private+1 - end - local target - local secondcode=lookupdata[i+1] - local secondname=firstname.."_"..secondcode - if i==size-1 then - target=unicode - if not rawget(unicodes,secondname) then - unicodes[secondname]=unicode - end - okay=true - else - target=rawget(unicodes,secondname) - if not target then - break - end - end - if trace_ligatures_detail then - report_prepare("codes (%a,%a) + (%a,%a) -> %a",firstname,firstcode,secondname,secondcode,target) - end - local firstligs=firstdata.ligatures - if firstligs then - firstligs[secondcode]={ char=target } - else - firstdata.ligatures={ [secondcode]={ char=target } } - end - firstcode=target - firstname=secondname - end - elseif trace_ligatures_detail then - report_prepare("no glyph (%a,%a) for building %a",firstname,firstcode,target) - end - if okay then - ligatures[i]=false - done=done+1 - end - end - end - alldone=done==0 - end - if trace_ligatures_detail then - for k,v in table.sortedhash(characters) do - if v.ligatures then - table.print(v,k) - end - end - end - resources.private=private - return true - end -end -local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist) - local characters=tfmdata.characters - local descriptions=tfmdata.descriptions - local resources=tfmdata.resources - local properties=tfmdata.properties - local changed=tfmdata.changed - local lookuphash=resources.lookuphash - local lookuptypes=resources.lookuptypes - local lookuptags=resources.lookuptags - local ligatures={} - local alternate=tonumber(value) or true and 1 - local defaultalt=otf.defaultbasealternate - local trace_singles=trace_baseinit and trace_singles - local trace_alternatives=trace_baseinit and trace_alternatives - local trace_ligatures=trace_baseinit and trace_ligatures - local actions={ - substitution=function(lookupdata,lookuptags,lookupname,description,unicode) - if trace_singles then - report_substitution(feature,lookuptags,lookupname,descriptions,unicode,lookupdata) - end - changed[unicode]=lookupdata - end, - alternate=function(lookupdata,lookuptags,lookupname,description,unicode) - local replacement=lookupdata[alternate] - if replacement then - changed[unicode]=replacement - if trace_alternatives then - report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"normal") - end - elseif defaultalt=="first" then - replacement=lookupdata[1] - changed[unicode]=replacement - if trace_alternatives then - report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt) - end - elseif defaultalt=="last" then - replacement=lookupdata[#data] - if trace_alternatives then - report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt) - end - else - if trace_alternatives then - report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"unknown") - end - end - end, - ligature=function(lookupdata,lookuptags,lookupname,description,unicode) - if trace_ligatures then - report_ligature(feature,lookuptags,lookupname,descriptions,unicode,lookupdata) - end - ligatures[#ligatures+1]={ unicode,lookupdata } - end, - } - for unicode,character in next,characters do - local description=descriptions[unicode] - local lookups=description.slookups - if lookups then - for l=1,#lookuplist do - local lookupname=lookuplist[l] - local lookupdata=lookups[lookupname] - if lookupdata then - local lookuptype=lookuptypes[lookupname] - local action=actions[lookuptype] - if action then - action(lookupdata,lookuptags,lookupname,description,unicode) - end - end - end - end - local lookups=description.mlookups - if lookups then - for l=1,#lookuplist do - local lookupname=lookuplist[l] - local lookuplist=lookups[lookupname] - if lookuplist then - local lookuptype=lookuptypes[lookupname] - local action=actions[lookuptype] - if action then - for i=1,#lookuplist do - action(lookuplist[i],lookuptags,lookupname,description,unicode) - end - end - end - end - end - end - properties.hasligatures=finalize_ligatures(tfmdata,ligatures) -end -local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist) - local characters=tfmdata.characters - local descriptions=tfmdata.descriptions - local resources=tfmdata.resources - local properties=tfmdata.properties - local lookuptags=resources.lookuptags - local sharedkerns={} - local traceindeed=trace_baseinit and trace_kerns - local haskerns=false - for unicode,character in next,characters do - local description=descriptions[unicode] - local rawkerns=description.kerns - if rawkerns then - local s=sharedkerns[rawkerns] - if s==false then - elseif s then - character.kerns=s - else - local newkerns=character.kerns - local done=false - for l=1,#lookuplist do - local lookup=lookuplist[l] - local kerns=rawkerns[lookup] - if kerns then - for otherunicode,value in next,kerns do - if value==0 then - elseif not newkerns then - newkerns={ [otherunicode]=value } - done=true - if traceindeed then - report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,value) - end - elseif not newkerns[otherunicode] then - newkerns[otherunicode]=value - done=true - if traceindeed then - report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,value) - end - end - end - end - end - if done then - sharedkerns[rawkerns]=newkerns - character.kerns=newkerns - haskerns=true - else - sharedkerns[rawkerns]=false - end - end - end - end - properties.haskerns=haskerns -end -basemethods.independent={ - preparesubstitutions=preparesubstitutions, - preparepositionings=preparepositionings, -} -local function makefake(tfmdata,name,present) - local resources=tfmdata.resources - local private=resources.private - local character={ intermediate=true,ligatures={} } - resources.unicodes[name]=private - tfmdata.characters[private]=character - tfmdata.descriptions[private]={ name=name } - resources.private=private+1 - present[name]=private - return character -end -local function make_1(present,tree,name) - for k,v in next,tree do - if k=="ligature" then - present[name]=v - else - make_1(present,v,name.."_"..k) - end - end -end -local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,done,lookuptags,lookupname) - for k,v in next,tree do - if k=="ligature" then - local character=characters[preceding] - if not character then - if trace_baseinit then - report_prepare("weird ligature in lookup %a, current %C, preceding %C",lookuptags[lookupname],v,preceding) - end - character=makefake(tfmdata,name,present) - end - local ligatures=character.ligatures - if ligatures then - ligatures[unicode]={ char=v } - else - character.ligatures={ [unicode]={ char=v } } - end - if done then - local d=done[lookupname] - if not d then - done[lookupname]={ "dummy",v } - else - d[#d+1]=v - end - end - else - local code=present[name] or unicode - local name=name.."_"..k - make_2(present,tfmdata,characters,v,name,code,k,done,lookuptags,lookupname) - end - end -end -local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist) - local characters=tfmdata.characters - local descriptions=tfmdata.descriptions - local resources=tfmdata.resources - local changed=tfmdata.changed - local lookuphash=resources.lookuphash - local lookuptypes=resources.lookuptypes - local lookuptags=resources.lookuptags - local ligatures={} - local alternate=tonumber(value) or true and 1 - local defaultalt=otf.defaultbasealternate - local trace_singles=trace_baseinit and trace_singles - local trace_alternatives=trace_baseinit and trace_alternatives - local trace_ligatures=trace_baseinit and trace_ligatures - for l=1,#lookuplist do - local lookupname=lookuplist[l] - local lookupdata=lookuphash[lookupname] - local lookuptype=lookuptypes[lookupname] - for unicode,data in next,lookupdata do - if lookuptype=="substitution" then - if trace_singles then - report_substitution(feature,lookuptags,lookupname,descriptions,unicode,data) - end - changed[unicode]=data - elseif lookuptype=="alternate" then - local replacement=data[alternate] - if replacement then - changed[unicode]=replacement - if trace_alternatives then - report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"normal") - end - elseif defaultalt=="first" then - replacement=data[1] - changed[unicode]=replacement - if trace_alternatives then - report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt) - end - elseif defaultalt=="last" then - replacement=data[#data] - if trace_alternatives then - report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt) - end - else - if trace_alternatives then - report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"unknown") - end - end - elseif lookuptype=="ligature" then - ligatures[#ligatures+1]={ unicode,data,lookupname } - if trace_ligatures then - report_ligature(feature,lookuptags,lookupname,descriptions,unicode,data) - end - end - end - end - local nofligatures=#ligatures - if nofligatures>0 then - local characters=tfmdata.characters - local present={} - local done=trace_baseinit and trace_ligatures and {} - for i=1,nofligatures do - local ligature=ligatures[i] - local unicode,tree=ligature[1],ligature[2] - make_1(present,tree,"ctx_"..unicode) - end - for i=1,nofligatures do - local ligature=ligatures[i] - local unicode,tree,lookupname=ligature[1],ligature[2],ligature[3] - make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,lookuptags,lookupname) - end - end -end -local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist) - local characters=tfmdata.characters - local descriptions=tfmdata.descriptions - local resources=tfmdata.resources - local properties=tfmdata.properties - local lookuphash=resources.lookuphash - local lookuptags=resources.lookuptags - local traceindeed=trace_baseinit and trace_kerns - for l=1,#lookuplist do - local lookupname=lookuplist[l] - local lookupdata=lookuphash[lookupname] - for unicode,data in next,lookupdata do - local character=characters[unicode] - local kerns=character.kerns - if not kerns then - kerns={} - character.kerns=kerns - end - if traceindeed then - for otherunicode,kern in next,data do - if not kerns[otherunicode] and kern~=0 then - kerns[otherunicode]=kern - report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,kern) - end - end - else - for otherunicode,kern in next,data do - if not kerns[otherunicode] and kern~=0 then - kerns[otherunicode]=kern - end - end - end - end - end -end -local function initializehashes(tfmdata) - nodeinitializers.features(tfmdata) -end -basemethods.shared={ - initializehashes=initializehashes, - preparesubstitutions=preparesubstitutions, - preparepositionings=preparepositionings, -} -basemethod="independent" -local function featuresinitializer(tfmdata,value) - if true then - local starttime=trace_preparing and os.clock() - local features=tfmdata.shared.features - local fullname=tfmdata.properties.fullname or "?" - if features then - applybasemethod("initializehashes",tfmdata) - local collectlookups=otf.collectlookups - local rawdata=tfmdata.shared.rawdata - local properties=tfmdata.properties - local script=properties.script - local language=properties.language - local basesubstitutions=rawdata.resources.features.gsub - local basepositionings=rawdata.resources.features.gpos - if basesubstitutions or basepositionings then - local sequences=tfmdata.resources.sequences - for s=1,#sequences do - local sequence=sequences[s] - local sfeatures=sequence.features - if sfeatures then - local order=sequence.order - if order then - for i=1,#order do - local feature=order[i] - local value=features[feature] - if value then - local validlookups,lookuplist=collectlookups(rawdata,feature,script,language) - if not validlookups then - elseif basesubstitutions and basesubstitutions[feature] then - if trace_preparing then - report_prepare("filtering base %s feature %a for %a with value %a","sub",feature,fullname,value) - end - applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist) - registerbasefeature(feature,value) - elseif basepositionings and basepositionings[feature] then - if trace_preparing then - report_prepare("filtering base %a feature %a for %a with value %a","pos",feature,fullname,value) - end - applybasemethod("preparepositionings",tfmdata,feature,value,validlookups,lookuplist) - registerbasefeature(feature,value) - end - end - end - end - end - end - end - registerbasehash(tfmdata) - end - if trace_preparing then - report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,fullname) - end - end -end -registerotffeature { - name="features", - description="features", - default=true, - initializers={ - base=featuresinitializer, - } -} -directives.register("fonts.otf.loader.basemethod",function(v) - if basemethods[v] then - basemethod=v - end -end) - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['node-inj']={ - version=1.001, - comment="companion to node-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files", -} -local next=next -local utfchar=utf.char -local trace_injections=false trackers.register("nodes.injections",function(v) trace_injections=v end) -local report_injections=logs.reporter("nodes","injections") -local attributes,nodes,node=attributes,nodes,node -fonts=fonts -local fontdata=fonts.hashes.identifiers -nodes.injections=nodes.injections or {} -local injections=nodes.injections -local nodecodes=nodes.nodecodes -local glyph_code=nodecodes.glyph -local kern_code=nodecodes.kern -local nuts=nodes.nuts -local nodepool=nuts.pool -local newkern=nodepool.kern -local tonode=nuts.tonode -local tonut=nuts.tonut -local getfield=nuts.getfield -local getnext=nuts.getnext -local getprev=nuts.getprev -local getid=nuts.getid -local getattr=nuts.getattr -local getfont=nuts.getfont -local getsubtype=nuts.getsubtype -local getchar=nuts.getchar -local setfield=nuts.setfield -local setattr=nuts.setattr -local traverse_id=nuts.traverse_id -local insert_node_before=nuts.insert_before -local insert_node_after=nuts.insert_after -local a_kernpair=attributes.private('kernpair') -local a_ligacomp=attributes.private('ligacomp') -local a_markbase=attributes.private('markbase') -local a_markmark=attributes.private('markmark') -local a_markdone=attributes.private('markdone') -local a_cursbase=attributes.private('cursbase') -local a_curscurs=attributes.private('curscurs') -local a_cursdone=attributes.private('cursdone') -local unsetvalue=attributes.unsetvalue -function injections.installnewkern(nk) - newkern=nk or newkern -end -local cursives={} -local marks={} -local kerns={} -function injections.reset(n) -end -function injections.setligaindex(n,index) - setattr(n,a_ligacomp,index) -end -function injections.getligaindex(n,default) - return getattr(n,a_ligacomp) or default -end -function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext) - local dx,dy=factor*(exit[1]-entry[1]),factor*(exit[2]-entry[2]) - local ws,wn=tfmstart.width,tfmnext.width - local bound=#cursives+1 - setattr(start,a_cursbase,bound) - setattr(nxt,a_curscurs,bound) - cursives[bound]={ rlmode,dx,dy,ws,wn } - return dx,dy,bound -end -function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr) - local x,y,w,h=factor*spec[1],factor*spec[2],factor*spec[3],factor*spec[4] - if x~=0 or w~=0 or y~=0 or h~=0 then - local bound=getattr(current,a_kernpair) - if bound then - local kb=kerns[bound] - kb[2],kb[3],kb[4],kb[5]=(kb[2] or 0)+x,(kb[3] or 0)+y,(kb[4] or 0)+w,(kb[5] or 0)+h - else - bound=#kerns+1 - setattr(current,a_kernpair,bound) - kerns[bound]={ rlmode,x,y,w,h,r2lflag,tfmchr.width } - end - return x,y,w,h,bound - end - return x,y,w,h -end -function injections.setkern(current,factor,rlmode,x,tfmchr) - local dx=factor*x - if dx~=0 then - local bound=#kerns+1 - setattr(current,a_kernpair,bound) - kerns[bound]={ rlmode,dx } - return dx,bound - else - return 0,0 - end -end -function injections.setmark(start,base,factor,rlmode,ba,ma) - local dx,dy=factor*(ba[1]-ma[1]),factor*(ba[2]-ma[2]) - local bound=getattr(base,a_markbase) - local index=1 - if bound then - local mb=marks[bound] - if mb then - index=#mb+1 - mb[index]={ dx,dy,rlmode } - setattr(start,a_markmark,bound) - setattr(start,a_markdone,index) - return dx,dy,bound - else - report_injections("possible problem, %U is base mark without data (id %a)",getchar(base),bound) - end - end - index=index or 1 - bound=#marks+1 - setattr(base,a_markbase,bound) - setattr(start,a_markmark,bound) - setattr(start,a_markdone,index) - marks[bound]={ [index]={ dx,dy,rlmode } } - return dx,dy,bound -end -local function dir(n) - return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset" -end -local function trace(head) - report_injections("begin run") - for n in traverse_id(glyph_code,head) do - if getsubtype(n)<256 then - local kp=getattr(n,a_kernpair) - local mb=getattr(n,a_markbase) - local mm=getattr(n,a_markmark) - local md=getattr(n,a_markdone) - local cb=getattr(n,a_cursbase) - local cc=getattr(n,a_curscurs) - local char=getchar(n) - report_injections("font %s, char %U, glyph %c",getfont(n),char,char) - if kp then - local k=kerns[kp] - if k[3] then - report_injections(" pairkern: dir %a, x %p, y %p, w %p, h %p",dir(k[1]),k[2],k[3],k[4],k[5]) - else - report_injections(" kern: dir %a, dx %p",dir(k[1]),k[2]) - end - end - if mb then - report_injections(" markbase: bound %a",mb) - end - if mm then - local m=marks[mm] - if mb then - local m=m[mb] - if m then - report_injections(" markmark: bound %a, index %a, dx %p, dy %p",mm,md,m[1],m[2]) - else - report_injections(" markmark: bound %a, missing index",mm) - end - else - m=m[1] - report_injections(" markmark: bound %a, dx %p, dy %p",mm,m and m[1],m and m[2]) - end - end - if cb then - report_injections(" cursbase: bound %a",cb) - end - if cc then - local c=cursives[cc] - report_injections(" curscurs: bound %a, dir %a, dx %p, dy %p",cc,dir(c[1]),c[2],c[3]) - end - end - end - report_injections("end run") -end -local function show_result(head) - local current=head - local skipping=false - while current do - local id=getid(current) - if id==glyph_code then - report_injections("char: %C, width %p, xoffset %p, yoffset %p", - getchar(current),getfield(current,"width"),getfield(current,"xoffset"),getfield(current,"yoffset")) - skipping=false - elseif id==kern_code then - report_injections("kern: %p",getfield(current,"kern")) - skipping=false - elseif not skipping then - report_injections() - skipping=true - end - current=getnext(current) - end -end -function injections.handler(head,where,keep) - head=tonut(head) - local has_marks,has_cursives,has_kerns=next(marks),next(cursives),next(kerns) - if has_marks or has_cursives then - if trace_injections then - trace(head) - end - local done,ky,rl,valid,cx,wx,mk,nofvalid=false,{},{},{},{},{},{},0 - if has_kerns then - local nf,tm=nil,nil - for n in traverse_id(glyph_code,head) do - if getsubtype(n)<256 then - nofvalid=nofvalid+1 - valid[nofvalid]=n - local f=getfont(n) - if f~=nf then - nf=f - tm=fontdata[nf].resources.marks - end - if tm then - mk[n]=tm[getchar(n)] - end - local k=getattr(n,a_kernpair) - if k then - local kk=kerns[k] - if kk then - local x,y,w,h=kk[2] or 0,kk[3] or 0,kk[4] or 0,kk[5] or 0 - local dy=y-h - if dy~=0 then - ky[n]=dy - end - if w~=0 or x~=0 then - wx[n]=kk - end - rl[n]=kk[1] - end - end - end - end - else - local nf,tm=nil,nil - for n in traverse_id(glyph_code,head) do - if getsubtype(n)<256 then - nofvalid=nofvalid+1 - valid[nofvalid]=n - local f=getfont(n) - if f~=nf then - nf=f - tm=fontdata[nf].resources.marks - end - if tm then - mk[n]=tm[getchar(n)] - end - end - end - end - if nofvalid>0 then - local cx={} - if has_kerns and next(ky) then - for n,k in next,ky do - setfield(n,"yoffset",k) - end - end - if has_cursives then - local p_cursbase,p=nil,nil - local t,d,maxt={},{},0 - for i=1,nofvalid do - local n=valid[i] - if not mk[n] then - local n_cursbase=getattr(n,a_cursbase) - if p_cursbase then - local n_curscurs=getattr(n,a_curscurs) - if p_cursbase==n_curscurs then - local c=cursives[n_curscurs] - if c then - local rlmode,dx,dy,ws,wn=c[1],c[2],c[3],c[4],c[5] - if rlmode>=0 then - dx=dx-ws - else - dx=dx+wn - end - if dx~=0 then - cx[n]=dx - rl[n]=rlmode - end - dy=-dy - maxt=maxt+1 - t[maxt]=p - d[maxt]=dy - else - maxt=0 - end - end - elseif maxt>0 then - local ny=getfield(n,"yoffset") - for i=maxt,1,-1 do - ny=ny+d[i] - local ti=t[i] - setfield(ti,"yoffset",getfield(ti,"yoffset")+ny) - end - maxt=0 - end - if not n_cursbase and maxt>0 then - local ny=getfield(n,"yoffset") - for i=maxt,1,-1 do - ny=ny+d[i] - local ti=t[i] - setfield(ti,"yoffset",ny) - end - maxt=0 - end - p_cursbase,p=n_cursbase,n - end - end - if maxt>0 then - local ny=getfield(n,"yoffset") - for i=maxt,1,-1 do - ny=ny+d[i] - local ti=t[i] - setfield(ti,"yoffset",ny) - end - maxt=0 - end - if not keep then - cursives={} - end - end - if has_marks then - for i=1,nofvalid do - local p=valid[i] - local p_markbase=getattr(p,a_markbase) - if p_markbase then - local mrks=marks[p_markbase] - local nofmarks=#mrks - for n in traverse_id(glyph_code,getnext(p)) do - local n_markmark=getattr(n,a_markmark) - if p_markbase==n_markmark then - local index=getattr(n,a_markdone) or 1 - local d=mrks[index] - if d then - local rlmode=d[3] - local k=wx[p] - local px=getfield(p,"xoffset") - local ox=0 - if k then - local x=k[2] - local w=k[4] - if w then - if rlmode and rlmode>=0 then - ox=px-getfield(p,"width")+d[1]-(w-x) - else - ox=px-d[1]-x - end - else - if rlmode and rlmode>=0 then - ox=px-getfield(p,"width")+d[1] - else - ox=px-d[1]-x - end - end - else - local wp=getfield(p,"width") - local wn=getfield(n,"width") - if rlmode and rlmode>=0 then - ox=px-wp+d[1] - else - ox=px-d[1] - end - if wn~=0 then - insert_node_before(head,n,newkern(-wn/2)) - insert_node_after(head,n,newkern(-wn/2)) - end - end - setfield(n,"xoffset",ox) - local py=getfield(p,"yoffset") - local oy=0 - if mk[p] then - oy=py+d[2] - else - oy=getfield(n,"yoffset")+py+d[2] - end - setfield(n,"yoffset",oy) - if nofmarks==1 then - break - else - nofmarks=nofmarks-1 - end - end - elseif not n_markmark then - break - else - end - end - end - end - if not keep then - marks={} - end - end - if next(wx) then - for n,k in next,wx do - local x=k[2] - local w=k[4] - if w then - local rl=k[1] - local wx=w-x - if rl<0 then - if wx~=0 then - insert_node_before(head,n,newkern(wx)) - end - if x~=0 then - insert_node_after (head,n,newkern(x)) - end - else - if x~=0 then - insert_node_before(head,n,newkern(x)) - end - if wx~=0 then - insert_node_after (head,n,newkern(wx)) - end - end - elseif x~=0 then - insert_node_before(head,n,newkern(x)) - end - end - end - if next(cx) then - for n,k in next,cx do - if k~=0 then - local rln=rl[n] - if rln and rln<0 then - insert_node_before(head,n,newkern(-k)) - else - insert_node_before(head,n,newkern(k)) - end - end - end - end - if not keep then - kerns={} - end - return tonode(head),true - elseif not keep then - kerns,cursives,marks={},{},{} - end - elseif has_kerns then - if trace_injections then - trace(head) - end - for n in traverse_id(glyph_code,head) do - if getsubtype(n)<256 then - local k=getattr(n,a_kernpair) - if k then - local kk=kerns[k] - if kk then - local rl,x,y,w=kk[1],kk[2] or 0,kk[3],kk[4] - if y and y~=0 then - setfield(n,"yoffset",y) - end - if w then - local wx=w-x - if rl<0 then - if wx~=0 then - insert_node_before(head,n,newkern(wx)) - end - if x~=0 then - insert_node_after (head,n,newkern(x)) - end - else - if x~=0 then - insert_node_before(head,n,newkern(x)) - end - if wx~=0 then - insert_node_after(head,n,newkern(wx)) - end - end - else - if x~=0 then - insert_node_before(head,n,newkern(x)) - end - end - end - end - end - end - if not keep then - kerns={} - end - return tonode(head),true - else - end - return tonode(head),false -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-otx']={ - version=1.001, - comment="companion to font-otf.lua (analysing)", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local type=type -if not trackers then trackers={ register=function() end } end -local fonts,nodes,node=fonts,nodes,node -local allocate=utilities.storage.allocate -local otf=fonts.handlers.otf -local analyzers=fonts.analyzers -local initializers=allocate() -local methods=allocate() -analyzers.initializers=initializers -analyzers.methods=methods -analyzers.useunicodemarks=false -local a_state=attributes.private('state') -local nuts=nodes.nuts -local tonut=nuts.tonut -local getfield=nuts.getfield -local getnext=nuts.getnext -local getprev=nuts.getprev -local getid=nuts.getid -local getprop=nuts.getprop -local setprop=nuts.setprop -local getfont=nuts.getfont -local getsubtype=nuts.getsubtype -local getchar=nuts.getchar -local traverse_id=nuts.traverse_id -local traverse_node_list=nuts.traverse -local end_of_math=nuts.end_of_math -local nodecodes=nodes.nodecodes -local glyph_code=nodecodes.glyph -local disc_code=nodecodes.disc -local math_code=nodecodes.math -local fontdata=fonts.hashes.identifiers -local categories=characters and characters.categories or {} -local otffeatures=fonts.constructors.newfeatures("otf") -local registerotffeature=otffeatures.register -local s_init=1 local s_rphf=7 -local s_medi=2 local s_half=8 -local s_fina=3 local s_pref=9 -local s_isol=4 local s_blwf=10 -local s_mark=5 local s_pstf=11 -local s_rest=6 -local states={ - init=s_init, - medi=s_medi, - fina=s_fina, - isol=s_isol, - mark=s_mark, - rest=s_rest, - rphf=s_rphf, - half=s_half, - pref=s_pref, - blwf=s_blwf, - pstf=s_pstf, -} -local features={ - init=s_init, - medi=s_medi, - fina=s_fina, - isol=s_isol, - rphf=s_rphf, - half=s_half, - pref=s_pref, - blwf=s_blwf, - pstf=s_pstf, -} -analyzers.states=states -analyzers.features=features -function analyzers.setstate(head,font) - local useunicodemarks=analyzers.useunicodemarks - local tfmdata=fontdata[font] - local descriptions=tfmdata.descriptions - local first,last,current,n,done=nil,nil,head,0,false - current=tonut(current) - while current do - local id=getid(current) - if id==glyph_code and getfont(current)==font then - done=true - local char=getchar(current) - local d=descriptions[char] - if d then - if d.class=="mark" or (useunicodemarks and categories[char]=="mn") then - done=true - setprop(current,a_state,s_mark) - elseif n==0 then - first,last,n=current,current,1 - setprop(current,a_state,s_init) - else - last,n=current,n+1 - setprop(current,a_state,s_medi) - end - else - if first and first==last then - setprop(last,a_state,s_isol) - elseif last then - setprop(last,a_state,s_fina) - end - first,last,n=nil,nil,0 - end - elseif id==disc_code then - setprop(current,a_state,s_medi) - last=current - else - if first and first==last then - setprop(last,a_state,s_isol) - elseif last then - setprop(last,a_state,s_fina) - end - first,last,n=nil,nil,0 - if id==math_code then - current=end_of_math(current) - end - end - current=getnext(current) - end - if first and first==last then - setprop(last,a_state,s_isol) - elseif last then - setprop(last,a_state,s_fina) - end - return head,done -end -local function analyzeinitializer(tfmdata,value) - local script,language=otf.scriptandlanguage(tfmdata) - local action=initializers[script] - if not action then - elseif type(action)=="function" then - return action(tfmdata,value) - else - local action=action[language] - if action then - return action(tfmdata,value) - end - end -end -local function analyzeprocessor(head,font,attr) - local tfmdata=fontdata[font] - local script,language=otf.scriptandlanguage(tfmdata,attr) - local action=methods[script] - if not action then - elseif type(action)=="function" then - return action(head,font,attr) - else - action=action[language] - if action then - return action(head,font,attr) - end - end - return head,false -end -registerotffeature { - name="analyze", - description="analysis of character classes", - default=true, - initializers={ - node=analyzeinitializer, - }, - processors={ - position=1, - node=analyzeprocessor, - } -} -methods.latn=analyzers.setstate -local tatweel=0x0640 -local zwnj=0x200C -local zwj=0x200D -local isolated={ - [0x0600]=true,[0x0601]=true,[0x0602]=true,[0x0603]=true, - [0x0604]=true, - [0x0608]=true,[0x060B]=true,[0x0621]=true,[0x0674]=true, - [0x06DD]=true, - [0x0856]=true,[0x0858]=true,[0x0857]=true, - [0x07FA]=true, - [zwnj]=true, - [0x08AD]=true, -} -local final={ - [0x0622]=true,[0x0623]=true,[0x0624]=true,[0x0625]=true, - [0x0627]=true,[0x0629]=true,[0x062F]=true,[0x0630]=true, - [0x0631]=true,[0x0632]=true,[0x0648]=true,[0x0671]=true, - [0x0672]=true,[0x0673]=true,[0x0675]=true,[0x0676]=true, - [0x0677]=true,[0x0688]=true,[0x0689]=true,[0x068A]=true, - [0x068B]=true,[0x068C]=true,[0x068D]=true,[0x068E]=true, - [0x068F]=true,[0x0690]=true,[0x0691]=true,[0x0692]=true, - [0x0693]=true,[0x0694]=true,[0x0695]=true,[0x0696]=true, - [0x0697]=true,[0x0698]=true,[0x0699]=true,[0x06C0]=true, - [0x06C3]=true,[0x06C4]=true,[0x06C5]=true,[0x06C6]=true, - [0x06C7]=true,[0x06C8]=true,[0x06C9]=true,[0x06CA]=true, - [0x06CB]=true,[0x06CD]=true,[0x06CF]=true,[0x06D2]=true, - [0x06D3]=true,[0x06D5]=true,[0x06EE]=true,[0x06EF]=true, - [0x0759]=true,[0x075A]=true,[0x075B]=true,[0x076B]=true, - [0x076C]=true,[0x0771]=true,[0x0773]=true,[0x0774]=true, - [0x0778]=true,[0x0779]=true, - [0x08AA]=true,[0x08AB]=true,[0x08AC]=true, - [0xFEF5]=true,[0xFEF7]=true,[0xFEF9]=true,[0xFEFB]=true, - [0x0710]=true,[0x0715]=true,[0x0716]=true,[0x0717]=true, - [0x0718]=true,[0x0719]=true,[0x0728]=true,[0x072A]=true, - [0x072C]=true,[0x071E]=true, - [0x072F]=true,[0x074D]=true, - [0x0840]=true,[0x0849]=true,[0x0854]=true,[0x0846]=true, - [0x084F]=true, - [0x08AE]=true,[0x08B1]=true,[0x08B2]=true, -} -local medial={ - [0x0626]=true,[0x0628]=true,[0x062A]=true,[0x062B]=true, - [0x062C]=true,[0x062D]=true,[0x062E]=true,[0x0633]=true, - [0x0634]=true,[0x0635]=true,[0x0636]=true,[0x0637]=true, - [0x0638]=true,[0x0639]=true,[0x063A]=true,[0x063B]=true, - [0x063C]=true,[0x063D]=true,[0x063E]=true,[0x063F]=true, - [0x0641]=true,[0x0642]=true,[0x0643]=true, - [0x0644]=true,[0x0645]=true,[0x0646]=true,[0x0647]=true, - [0x0649]=true,[0x064A]=true,[0x066E]=true,[0x066F]=true, - [0x0678]=true,[0x0679]=true,[0x067A]=true,[0x067B]=true, - [0x067C]=true,[0x067D]=true,[0x067E]=true,[0x067F]=true, - [0x0680]=true,[0x0681]=true,[0x0682]=true,[0x0683]=true, - [0x0684]=true,[0x0685]=true,[0x0686]=true,[0x0687]=true, - [0x069A]=true,[0x069B]=true,[0x069C]=true,[0x069D]=true, - [0x069E]=true,[0x069F]=true,[0x06A0]=true,[0x06A1]=true, - [0x06A2]=true,[0x06A3]=true,[0x06A4]=true,[0x06A5]=true, - [0x06A6]=true,[0x06A7]=true,[0x06A8]=true,[0x06A9]=true, - [0x06AA]=true,[0x06AB]=true,[0x06AC]=true,[0x06AD]=true, - [0x06AE]=true,[0x06AF]=true,[0x06B0]=true,[0x06B1]=true, - [0x06B2]=true,[0x06B3]=true,[0x06B4]=true,[0x06B5]=true, - [0x06B6]=true,[0x06B7]=true,[0x06B8]=true,[0x06B9]=true, - [0x06BA]=true,[0x06BB]=true,[0x06BC]=true,[0x06BD]=true, - [0x06BE]=true,[0x06BF]=true,[0x06C1]=true,[0x06C2]=true, - [0x06CC]=true,[0x06CE]=true,[0x06D0]=true,[0x06D1]=true, - [0x06FA]=true,[0x06FB]=true,[0x06FC]=true,[0x06FF]=true, - [0x0750]=true,[0x0751]=true,[0x0752]=true,[0x0753]=true, - [0x0754]=true,[0x0755]=true,[0x0756]=true,[0x0757]=true, - [0x0758]=true,[0x075C]=true,[0x075D]=true,[0x075E]=true, - [0x075F]=true,[0x0760]=true,[0x0761]=true,[0x0762]=true, - [0x0763]=true,[0x0764]=true,[0x0765]=true,[0x0766]=true, - [0x0767]=true,[0x0768]=true,[0x0769]=true,[0x076A]=true, - [0x076D]=true,[0x076E]=true,[0x076F]=true,[0x0770]=true, - [0x0772]=true,[0x0775]=true,[0x0776]=true,[0x0777]=true, - [0x077A]=true,[0x077B]=true,[0x077C]=true,[0x077D]=true, - [0x077E]=true,[0x077F]=true, - [0x08A0]=true,[0x08A2]=true,[0x08A4]=true,[0x08A5]=true, - [0x08A6]=true,[0x0620]=true,[0x08A8]=true,[0x08A9]=true, - [0x08A7]=true,[0x08A3]=true, - [0x0712]=true,[0x0713]=true,[0x0714]=true,[0x071A]=true, - [0x071B]=true,[0x071C]=true,[0x071D]=true,[0x071F]=true, - [0x0720]=true,[0x0721]=true,[0x0722]=true,[0x0723]=true, - [0x0724]=true,[0x0725]=true,[0x0726]=true,[0x0727]=true, - [0x0729]=true,[0x072B]=true,[0x072D]=true,[0x072E]=true, - [0x074E]=true,[0x074F]=true, - [0x0841]=true,[0x0842]=true,[0x0843]=true,[0x0844]=true, - [0x0845]=true,[0x0847]=true,[0x0848]=true,[0x0855]=true, - [0x0851]=true,[0x084E]=true,[0x084D]=true,[0x084A]=true, - [0x084B]=true,[0x084C]=true,[0x0850]=true,[0x0852]=true, - [0x0853]=true, - [0x07D7]=true,[0x07E8]=true,[0x07D9]=true,[0x07EA]=true, - [0x07CA]=true,[0x07DB]=true,[0x07CC]=true,[0x07DD]=true, - [0x07CE]=true,[0x07DF]=true,[0x07D4]=true,[0x07E5]=true, - [0x07E9]=true,[0x07E7]=true,[0x07E3]=true,[0x07E2]=true, - [0x07E0]=true,[0x07E1]=true,[0x07DE]=true,[0x07DC]=true, - [0x07D1]=true,[0x07DA]=true,[0x07D8]=true,[0x07D6]=true, - [0x07D2]=true,[0x07D0]=true,[0x07CF]=true,[0x07CD]=true, - [0x07CB]=true,[0x07D3]=true,[0x07E4]=true,[0x07D5]=true, - [0x07E6]=true, - [tatweel]=true,[zwj]=true, - [0x08A1]=true,[0x08AF]=true,[0x08B0]=true, -} -local arab_warned={} -local function warning(current,what) - local char=getchar(current) - if not arab_warned[char] then - log.report("analyze","arab: character %C has no %a class",char,what) - arab_warned[char]=true - end -end -local function finish(first,last) - if last then - if first==last then - local fc=getchar(first) - if medial[fc] or final[fc] then - setprop(first,a_state,s_isol) - else - warning(first,"isol") - setprop(first,a_state,s_error) - end - else - local lc=getchar(last) - if medial[lc] or final[lc] then - setprop(last,a_state,s_fina) - else - warning(last,"fina") - setprop(last,a_state,s_error) - end - end - first,last=nil,nil - elseif first then - local fc=getchar(first) - if medial[fc] or final[fc] then - setprop(first,a_state,s_isol) - else - warning(first,"isol") - setprop(first,a_state,s_error) - end - first=nil - end - return first,last -end -function methods.arab(head,font,attr) - local useunicodemarks=analyzers.useunicodemarks - local tfmdata=fontdata[font] - local marks=tfmdata.resources.marks - local first,last,current,done=nil,nil,head,false - current=tonut(current) - while current do - local id=getid(current) - if id==glyph_code and getfont(current)==font and getsubtype(current)<256 and not getprop(current,a_state) then - done=true - local char=getchar(current) - if marks[char] or (useunicodemarks and categories[char]=="mn") then - setprop(current,a_state,s_mark) - elseif isolated[char] then - first,last=finish(first,last) - setprop(current,a_state,s_isol) - first,last=nil,nil - elseif not first then - if medial[char] then - setprop(current,a_state,s_init) - first,last=first or current,current - elseif final[char] then - setprop(current,a_state,s_isol) - first,last=nil,nil - else - first,last=finish(first,last) - end - elseif medial[char] then - first,last=first or current,current - setprop(current,a_state,s_medi) - elseif final[char] then - if getprop(last,a_state)~=s_init then - setprop(last,a_state,s_medi) - end - setprop(current,a_state,s_fina) - first,last=nil,nil - elseif char>=0x0600 and char<=0x06FF then - setprop(current,a_state,s_rest) - first,last=finish(first,last) - else - first,last=finish(first,last) - end - else - if first or last then - first,last=finish(first,last) - end - if id==math_code then - current=end_of_math(current) - end - end - current=getnext(current) - end - if first or last then - finish(first,last) - end - return head,done -end -methods.syrc=methods.arab -methods.mand=methods.arab -methods.nko=methods.arab -directives.register("otf.analyze.useunicodemarks",function(v) - analyzers.useunicodemarks=v -end) - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-otn']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files", -} -local concat,insert,remove=table.concat,table.insert,table.remove -local gmatch,gsub,find,match,lower,strip=string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip -local type,next,tonumber,tostring=type,next,tonumber,tostring -local lpegmatch=lpeg.match -local random=math.random -local formatters=string.formatters -local logs,trackers,nodes,attributes=logs,trackers,nodes,attributes -local registertracker=trackers.register -local fonts=fonts -local otf=fonts.handlers.otf -local trace_lookups=false registertracker("otf.lookups",function(v) trace_lookups=v end) -local trace_singles=false registertracker("otf.singles",function(v) trace_singles=v end) -local trace_multiples=false registertracker("otf.multiples",function(v) trace_multiples=v end) -local trace_alternatives=false registertracker("otf.alternatives",function(v) trace_alternatives=v end) -local trace_ligatures=false registertracker("otf.ligatures",function(v) trace_ligatures=v end) -local trace_contexts=false registertracker("otf.contexts",function(v) trace_contexts=v end) -local trace_marks=false registertracker("otf.marks",function(v) trace_marks=v end) -local trace_kerns=false registertracker("otf.kerns",function(v) trace_kerns=v end) -local trace_cursive=false registertracker("otf.cursive",function(v) trace_cursive=v end) -local trace_preparing=false registertracker("otf.preparing",function(v) trace_preparing=v end) -local trace_bugs=false registertracker("otf.bugs",function(v) trace_bugs=v end) -local trace_details=false registertracker("otf.details",function(v) trace_details=v end) -local trace_applied=false registertracker("otf.applied",function(v) trace_applied=v end) -local trace_steps=false registertracker("otf.steps",function(v) trace_steps=v end) -local trace_skips=false registertracker("otf.skips",function(v) trace_skips=v end) -local trace_directions=false registertracker("otf.directions",function(v) trace_directions=v end) -local report_direct=logs.reporter("fonts","otf direct") -local report_subchain=logs.reporter("fonts","otf subchain") -local report_chain=logs.reporter("fonts","otf chain") -local report_process=logs.reporter("fonts","otf process") -local report_prepare=logs.reporter("fonts","otf prepare") -local report_warning=logs.reporter("fonts","otf warning") -registertracker("otf.verbose_chain",function(v) otf.setcontextchain(v and "verbose") end) -registertracker("otf.normal_chain",function(v) otf.setcontextchain(v and "normal") end) -registertracker("otf.replacements","otf.singles,otf.multiples,otf.alternatives,otf.ligatures") -registertracker("otf.positions","otf.marks,otf.kerns,otf.cursive") -registertracker("otf.actions","otf.replacements,otf.positions") -registertracker("otf.injections","nodes.injections") -registertracker("*otf.sample","otf.steps,otf.actions,otf.analyzing") -local nuts=nodes.nuts -local tonode=nuts.tonode -local tonut=nuts.tonut -local getfield=nuts.getfield -local setfield=nuts.setfield -local getnext=nuts.getnext -local getprev=nuts.getprev -local getid=nuts.getid -local getattr=nuts.getattr -local setattr=nuts.setattr -local getprop=nuts.getprop -local setprop=nuts.setprop -local getfont=nuts.getfont -local getsubtype=nuts.getsubtype -local getchar=nuts.getchar -local insert_node_after=nuts.insert_after -local delete_node=nuts.delete -local copy_node=nuts.copy -local find_node_tail=nuts.tail -local flush_node_list=nuts.flush_list -local end_of_math=nuts.end_of_math -local setmetatableindex=table.setmetatableindex -local zwnj=0x200C -local zwj=0x200D -local wildcard="*" -local default="dflt" -local nodecodes=nodes.nodecodes -local whatcodes=nodes.whatcodes -local glyphcodes=nodes.glyphcodes -local disccodes=nodes.disccodes -local glyph_code=nodecodes.glyph -local glue_code=nodecodes.glue -local disc_code=nodecodes.disc -local whatsit_code=nodecodes.whatsit -local math_code=nodecodes.math -local dir_code=whatcodes.dir -local localpar_code=whatcodes.localpar -local discretionary_code=disccodes.discretionary -local ligature_code=glyphcodes.ligature -local privateattribute=attributes.private -local a_state=privateattribute('state') -local a_cursbase=privateattribute('cursbase') -local injections=nodes.injections -local setmark=injections.setmark -local setcursive=injections.setcursive -local setkern=injections.setkern -local setpair=injections.setpair -local resetinjection=injections.reset -local setligaindex=injections.setligaindex -local getligaindex=injections.getligaindex -local cursonce=true -local fonthashes=fonts.hashes -local fontdata=fonthashes.identifiers -local otffeatures=fonts.constructors.newfeatures("otf") -local registerotffeature=otffeatures.register -local onetimemessage=fonts.loggers.onetimemessage or function() end -otf.defaultnodealternate="none" -local tfmdata=false -local characters=false -local descriptions=false -local resources=false -local marks=false -local currentfont=false -local lookuptable=false -local anchorlookups=false -local lookuptypes=false -local lookuptags=false -local handlers={} -local rlmode=0 -local featurevalue=false -local checkstep=(nodes and nodes.tracers and nodes.tracers.steppers.check) or function() end -local registerstep=(nodes and nodes.tracers and nodes.tracers.steppers.register) or function() end -local registermessage=(nodes and nodes.tracers and nodes.tracers.steppers.message) or function() end -local function logprocess(...) - if trace_steps then - registermessage(...) - end - report_direct(...) -end -local function logwarning(...) - report_direct(...) -end -local f_unicode=formatters["%U"] -local f_uniname=formatters["%U (%s)"] -local f_unilist=formatters["% t (% t)"] -local function gref(n) - if type(n)=="number" then - local description=descriptions[n] - local name=description and description.name - if name then - return f_uniname(n,name) - else - return f_unicode(n) - end - elseif n then - local num,nam={},{} - for i=1,#n do - local ni=n[i] - if tonumber(ni) then - local di=descriptions[ni] - num[i]=f_unicode(ni) - nam[i]=di and di.name or "-" - end - end - return f_unilist(num,nam) - else - return "" - end -end -local function cref(kind,chainname,chainlookupname,lookupname,index) - if index then - return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookuptags[lookupname],index) - elseif lookupname then - return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookuptags[lookupname]) - elseif chainlookupname then - return formatters["feature %a, chain %a, sub %a"](kind,lookuptags[chainname],lookuptags[chainlookupname]) - elseif chainname then - return formatters["feature %a, chain %a"](kind,lookuptags[chainname]) - else - return formatters["feature %a"](kind) - end -end -local function pref(kind,lookupname) - return formatters["feature %a, lookup %a"](kind,lookuptags[lookupname]) -end -local function copy_glyph(g) - local components=getfield(g,"components") - if components then - setfield(g,"components",nil) - local n=copy_node(g) - setfield(g,"components",components) - return n - else - return copy_node(g) - end -end -local function markstoligature(kind,lookupname,head,start,stop,char) - if start==stop and getchar(start)==char then - return head,start - else - local prev=getprev(start) - local next=getnext(stop) - setfield(start,"prev",nil) - setfield(stop,"next",nil) - local base=copy_glyph(start) - if head==start then - head=base - end - resetinjection(base) - setfield(base,"char",char) - setfield(base,"subtype",ligature_code) - setfield(base,"components",start) - if prev then - setfield(prev,"next",base) - end - if next then - setfield(next,"prev",base) - end - setfield(base,"next",next) - setfield(base,"prev",prev) - return head,base - end -end -local function getcomponentindex(start) - if getid(start)~=glyph_code then - return 0 - elseif getsubtype(start)==ligature_code then - local i=0 - local components=getfield(start,"components") - while components do - i=i+getcomponentindex(components) - components=getnext(components) - end - return i - elseif not marks[getchar(start)] then - return 1 - else - return 0 - end -end -local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound) - if start==stop and getchar(start)==char then - resetinjection(start) - setfield(start,"char",char) - return head,start - end - local prev=getprev(start) - local next=getnext(stop) - setfield(start,"prev",nil) - setfield(stop,"next",nil) - local base=copy_glyph(start) - if start==head then - head=base - end - resetinjection(base) - setfield(base,"char",char) - setfield(base,"subtype",ligature_code) - setfield(base,"components",start) - if prev then - setfield(prev,"next",base) - end - if next then - setfield(next,"prev",base) - end - setfield(base,"next",next) - setfield(base,"prev",prev) - if not discfound then - local deletemarks=markflag~="mark" - local components=start - local baseindex=0 - local componentindex=0 - local head=base - local current=base - while start do - local char=getchar(start) - if not marks[char] then - baseindex=baseindex+componentindex - componentindex=getcomponentindex(start) - elseif not deletemarks then - setligaindex(start,baseindex+getligaindex(start,componentindex)) - if trace_marks then - logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),getligaindex(start)) - end - head,current=insert_node_after(head,current,copy_node(start)) - elseif trace_marks then - logwarning("%s: delete mark %s",pref(kind,lookupname),gref(char)) - end - start=getnext(start) - end - local start=getnext(current) - while start and getid(start)==glyph_code do - local char=getchar(start) - if marks[char] then - setligaindex(start,baseindex+getligaindex(start,componentindex)) - if trace_marks then - logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),getligaindex(start)) - end - else - break - end - start=getnext(start) - end - end - return head,base -end -function handlers.gsub_single(head,start,kind,lookupname,replacement) - if trace_singles then - logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(getchar(start)),gref(replacement)) - end - resetinjection(start) - setfield(start,"char",replacement) - return head,start,true -end -local function get_alternative_glyph(start,alternatives,value,trace_alternatives) - local n=#alternatives - if value=="random" then - local r=random(1,n) - return alternatives[r],trace_alternatives and formatters["value %a, taking %a"](value,r) - elseif value=="first" then - return alternatives[1],trace_alternatives and formatters["value %a, taking %a"](value,1) - elseif value=="last" then - return alternatives[n],trace_alternatives and formatters["value %a, taking %a"](value,n) - else - value=tonumber(value) - if type(value)~="number" then - return alternatives[1],trace_alternatives and formatters["invalid value %s, taking %a"](value,1) - elseif value>n then - local defaultalt=otf.defaultnodealternate - if defaultalt=="first" then - return alternatives[n],trace_alternatives and formatters["invalid value %s, taking %a"](value,1) - elseif defaultalt=="last" then - return alternatives[1],trace_alternatives and formatters["invalid value %s, taking %a"](value,n) - else - return false,trace_alternatives and formatters["invalid value %a, %s"](value,"out of range") - end - elseif value==0 then - return getchar(start),trace_alternatives and formatters["invalid value %a, %s"](value,"no change") - elseif value<1 then - return alternatives[1],trace_alternatives and formatters["invalid value %a, taking %a"](value,1) - else - return alternatives[value],trace_alternatives and formatters["value %a, taking %a"](value,value) - end - end -end -local function multiple_glyphs(head,start,multiple,ignoremarks) - local nofmultiples=#multiple - if nofmultiples>0 then - resetinjection(start) - setfield(start,"char",multiple[1]) - if nofmultiples>1 then - local sn=getnext(start) - for k=2,nofmultiples do - local n=copy_node(start) - resetinjection(n) - setfield(n,"char",multiple[k]) - setfield(n,"next",sn) - setfield(n,"prev",start) - if sn then - setfield(sn,"prev",n) - end - setfield(start,"next",n) - start=n - end - end - return head,start,true - else - if trace_multiples then - logprocess("no multiple for %s",gref(getchar(start))) - end - return head,start,false - end -end -function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence) - local value=featurevalue==true and tfmdata.shared.features[kind] or featurevalue - local choice,comment=get_alternative_glyph(start,alternative,value,trace_alternatives) - if choice then - if trace_alternatives then - logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(getchar(start)),choice,gref(choice),comment) - end - resetinjection(start) - setfield(start,"char",choice) - else - if trace_alternatives then - logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(getchar(start)),comment) - end - end - return head,start,true -end -function handlers.gsub_multiple(head,start,kind,lookupname,multiple,sequence) - if trace_multiples then - logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(getchar(start)),gref(multiple)) - end - return multiple_glyphs(head,start,multiple,sequence.flags[1]) -end -function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence) - local s,stop,discfound=getnext(start),nil,false - local startchar=getchar(start) - if marks[startchar] then - while s do - local id=getid(s) - if id==glyph_code and getfont(s)==currentfont and getsubtype(s)<256 then - local lg=ligature[getchar(s)] - if lg then - stop=s - ligature=lg - s=getnext(s) - else - break - end - else - break - end - end - if stop then - local lig=ligature.ligature - if lig then - if trace_ligatures then - local stopchar=getchar(stop) - head,start=markstoligature(kind,lookupname,head,start,stop,lig) - logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start))) - else - head,start=markstoligature(kind,lookupname,head,start,stop,lig) - end - return head,start,true - else - end - end - else - local skipmark=sequence.flags[1] - while s do - local id=getid(s) - if id==glyph_code and getsubtype(s)<256 then - if getfont(s)==currentfont then - local char=getchar(s) - if skipmark and marks[char] then - s=getnext(s) - else - local lg=ligature[char] - if lg then - stop=s - ligature=lg - s=getnext(s) - else - break - end - end - else - break - end - elseif id==disc_code then - discfound=true - s=getnext(s) - else - break - end - end - local lig=ligature.ligature - if lig then - if stop then - if trace_ligatures then - local stopchar=getchar(stop) - head,start=toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound) - logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start))) - else - head,start=toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound) - end - else - resetinjection(start) - setfield(start,"char",lig) - if trace_ligatures then - logprocess("%s: replacing %s by (no real) ligature %s case 3",pref(kind,lookupname),gref(startchar),gref(lig)) - end - end - return head,start,true - else - end - end - return head,start,false -end -function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence) - local markchar=getchar(start) - if marks[markchar] then - local base=getprev(start) - if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then - local basechar=getchar(base) - if marks[basechar] then - while true do - base=getprev(base) - if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then - basechar=getchar(base) - if not marks[basechar] then - break - end - else - if trace_bugs then - logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar)) - end - return head,start,false - end - end - end - local baseanchors=descriptions[basechar] - if baseanchors then - baseanchors=baseanchors.anchors - end - if baseanchors then - local baseanchors=baseanchors['basechar'] - if baseanchors then - local al=anchorlookups[lookupname] - for anchor,ba in next,baseanchors do - if al[anchor] then - local ma=markanchors[anchor] - if ma then - local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) - if trace_marks then - logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)", - pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) - end - return head,start,true - end - end - end - if trace_bugs then - logwarning("%s, no matching anchors for mark %s and base %s",pref(kind,lookupname),gref(markchar),gref(basechar)) - end - end - elseif trace_bugs then - onetimemessage(currentfont,basechar,"no base anchors",report_fonts) - end - elseif trace_bugs then - logwarning("%s: prev node is no char",pref(kind,lookupname)) - end - elseif trace_bugs then - logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar)) - end - return head,start,false -end -function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence) - local markchar=getchar(start) - if marks[markchar] then - local base=getprev(start) - if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then - local basechar=getchar(base) - if marks[basechar] then - while true do - base=getprev(base) - if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then - basechar=getchar(base) - if not marks[basechar] then - break - end - else - if trace_bugs then - logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar)) - end - return head,start,false - end - end - end - local index=getligaindex(start) - local baseanchors=descriptions[basechar] - if baseanchors then - baseanchors=baseanchors.anchors - if baseanchors then - local baseanchors=baseanchors['baselig'] - if baseanchors then - local al=anchorlookups[lookupname] - for anchor,ba in next,baseanchors do - if al[anchor] then - local ma=markanchors[anchor] - if ma then - ba=ba[index] - if ba then - local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) - if trace_marks then - logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)", - pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy) - end - return head,start,true - else - if trace_bugs then - logwarning("%s: no matching anchors for mark %s and baselig %s with index %a",pref(kind,lookupname),gref(markchar),gref(basechar),index) - end - end - end - end - end - if trace_bugs then - logwarning("%s: no matching anchors for mark %s and baselig %s",pref(kind,lookupname),gref(markchar),gref(basechar)) - end - end - end - elseif trace_bugs then - onetimemessage(currentfont,basechar,"no base anchors",report_fonts) - end - elseif trace_bugs then - logwarning("%s: prev node is no char",pref(kind,lookupname)) - end - elseif trace_bugs then - logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar)) - end - return head,start,false -end -function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence) - local markchar=getchar(start) - if marks[markchar] then - local base=getprev(start) - local slc=getligaindex(start) - if slc then - while base do - local blc=getligaindex(base) - if blc and blc~=slc then - base=getprev(base) - else - break - end - end - end - if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then - local basechar=getchar(base) - local baseanchors=descriptions[basechar] - if baseanchors then - baseanchors=baseanchors.anchors - if baseanchors then - baseanchors=baseanchors['basemark'] - if baseanchors then - local al=anchorlookups[lookupname] - for anchor,ba in next,baseanchors do - if al[anchor] then - local ma=markanchors[anchor] - if ma then - local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) - if trace_marks then - logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)", - pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) - end - return head,start,true - end - end - end - if trace_bugs then - logwarning("%s: no matching anchors for mark %s and basemark %s",pref(kind,lookupname),gref(markchar),gref(basechar)) - end - end - end - elseif trace_bugs then - onetimemessage(currentfont,basechar,"no base anchors",report_fonts) - end - elseif trace_bugs then - logwarning("%s: prev node is no mark",pref(kind,lookupname)) - end - elseif trace_bugs then - logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar)) - end - return head,start,false -end -function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence) - local alreadydone=cursonce and getprop(start,a_cursbase) - if not alreadydone then - local done=false - local startchar=getchar(start) - if marks[startchar] then - if trace_cursive then - logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar)) - end - else - local nxt=getnext(start) - while not done and nxt and getid(nxt)==glyph_code and getfont(nxt)==currentfont and getsubtype(nxt)<256 do - local nextchar=getchar(nxt) - if marks[nextchar] then - nxt=getnext(nxt) - else - local entryanchors=descriptions[nextchar] - if entryanchors then - entryanchors=entryanchors.anchors - if entryanchors then - entryanchors=entryanchors['centry'] - if entryanchors then - local al=anchorlookups[lookupname] - for anchor,entry in next,entryanchors do - if al[anchor] then - local exit=exitanchors[anchor] - if exit then - local dx,dy,bound=setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar]) - if trace_cursive then - logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode) - end - done=true - break - end - end - end - end - end - elseif trace_bugs then - onetimemessage(currentfont,startchar,"no entry anchors",report_fonts) - end - break - end - end - end - return head,start,done - else - if trace_cursive and trace_details then - logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone) - end - return head,start,false - end -end -function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence) - local startchar=getchar(start) - local dx,dy,w,h=setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar]) - if trace_kerns then - logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h) - end - return head,start,false -end -function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence) - local snext=getnext(start) - if not snext then - return head,start,false - else - local prev,done=start,false - local factor=tfmdata.parameters.factor - local lookuptype=lookuptypes[lookupname] - while snext and getid(snext)==glyph_code and getfont(snext)==currentfont and getsubtype(snext)<256 do - local nextchar=getchar(snext) - local krn=kerns[nextchar] - if not krn and marks[nextchar] then - prev=snext - snext=getnext(snext) - else - if not krn then - elseif type(krn)=="table" then - if lookuptype=="pair" then - local a,b=krn[2],krn[3] - if a and #a>0 then - local startchar=getchar(start) - local x,y,w,h=setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar]) - if trace_kerns then - logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h) - end - end - if b and #b>0 then - local startchar=getchar(start) - local x,y,w,h=setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar]) - if trace_kerns then - logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h) - end - end - else - report_process("%s: check this out (old kern stuff)",pref(kind,lookupname)) - end - done=true - elseif krn~=0 then - local k=setkern(snext,factor,rlmode,krn) - if trace_kerns then - logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(getchar(prev)),gref(nextchar)) - end - done=true - end - break - end - end - return head,start,done - end -end -local chainmores={} -local chainprocs={} -local function logprocess(...) - if trace_steps then - registermessage(...) - end - report_subchain(...) -end -local logwarning=report_subchain -local function logprocess(...) - if trace_steps then - registermessage(...) - end - report_chain(...) -end -local logwarning=report_chain -function chainprocs.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname) - logwarning("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname)) - return head,start,false -end -function chainmores.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname,n) - logprocess("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname)) - return head,start,false -end -function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements) - local char=getchar(start) - local replacement=replacements[char] - if replacement then - if trace_singles then - logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement)) - end - resetinjection(start) - setfield(start,"char",replacement) - return head,start,true - else - return head,start,false - end -end -function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex) - local current=start - local subtables=currentlookup.subtables - if #subtables>1 then - logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," ")) - end - while current do - if getid(current)==glyph_code then - local currentchar=getchar(current) - local lookupname=subtables[1] - local replacement=lookuphash[lookupname] - if not replacement then - if trace_bugs then - logwarning("%s: no single hits",cref(kind,chainname,chainlookupname,lookupname,chainindex)) - end - else - replacement=replacement[currentchar] - if not replacement or replacement=="" then - if trace_bugs then - logwarning("%s: no single for %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar)) - end - else - if trace_singles then - logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement)) - end - resetinjection(current) - setfield(current,"char",replacement) - end - end - return head,start,true - elseif current==stop then - break - else - current=getnext(current) - end - end - return head,start,false -end -chainmores.gsub_single=chainprocs.gsub_single -function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - local startchar=getchar(start) - local subtables=currentlookup.subtables - local lookupname=subtables[1] - local replacements=lookuphash[lookupname] - if not replacements then - if trace_bugs then - logwarning("%s: no multiple hits",cref(kind,chainname,chainlookupname,lookupname)) - end - else - replacements=replacements[startchar] - if not replacements or replacement=="" then - if trace_bugs then - logwarning("%s: no multiple for %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar)) - end - else - if trace_multiples then - logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements)) - end - return multiple_glyphs(head,start,replacements,currentlookup.flags[1]) - end - end - return head,start,false -end -chainmores.gsub_multiple=chainprocs.gsub_multiple -function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - local current=start - local subtables=currentlookup.subtables - local value=featurevalue==true and tfmdata.shared.features[kind] or featurevalue - while current do - if getid(current)==glyph_code then - local currentchar=getchar(current) - local lookupname=subtables[1] - local alternatives=lookuphash[lookupname] - if not alternatives then - if trace_bugs then - logwarning("%s: no alternative hit",cref(kind,chainname,chainlookupname,lookupname)) - end - else - alternatives=alternatives[currentchar] - if alternatives then - local choice,comment=get_alternative_glyph(current,alternatives,value,trace_alternatives) - if choice then - if trace_alternatives then - logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment) - end - resetinjection(start) - setfield(start,"char",choice) - else - if trace_alternatives then - logwarning("%s: no variant %a for %s, %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(char),comment) - end - end - elseif trace_bugs then - logwarning("%s: no alternative for %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(currentchar),comment) - end - end - return head,start,true - elseif current==stop then - break - else - current=getnext(current) - end - end - return head,start,false -end -chainmores.gsub_alternate=chainprocs.gsub_alternate -function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex) - local startchar=getchar(start) - local subtables=currentlookup.subtables - local lookupname=subtables[1] - local ligatures=lookuphash[lookupname] - if not ligatures then - if trace_bugs then - logwarning("%s: no ligature hits",cref(kind,chainname,chainlookupname,lookupname,chainindex)) - end - else - ligatures=ligatures[startchar] - if not ligatures then - if trace_bugs then - logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar)) - end - else - local s=getnext(start) - local discfound=false - local last=stop - local nofreplacements=0 - local skipmark=currentlookup.flags[1] - while s do - local id=getid(s) - if id==disc_code then - s=getnext(s) - discfound=true - else - local schar=getchar(s) - if skipmark and marks[schar] then - s=getnext(s) - else - local lg=ligatures[schar] - if lg then - ligatures,last,nofreplacements=lg,s,nofreplacements+1 - if s==stop then - break - else - s=getnext(s) - end - else - break - end - end - end - end - local l2=ligatures.ligature - if l2 then - if chainindex then - stop=last - end - if trace_ligatures then - if start==stop then - logprocess("%s: replacing character %s by ligature %s case 3",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2)) - else - logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop)),gref(l2)) - end - end - head,start=toligature(kind,lookupname,head,start,stop,l2,currentlookup.flags[1],discfound) - return head,start,true,nofreplacements - elseif trace_bugs then - if start==stop then - logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar)) - else - logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop))) - end - end - end - end - return head,start,false,0 -end -chainmores.gsub_ligature=chainprocs.gsub_ligature -function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - local markchar=getchar(start) - if marks[markchar] then - local subtables=currentlookup.subtables - local lookupname=subtables[1] - local markanchors=lookuphash[lookupname] - if markanchors then - markanchors=markanchors[markchar] - end - if markanchors then - local base=getprev(start) - if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then - local basechar=getchar(base) - if marks[basechar] then - while true do - base=getprev(base) - if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then - basechar=getchar(base) - if not marks[basechar] then - break - end - else - if trace_bugs then - logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar)) - end - return head,start,false - end - end - end - local baseanchors=descriptions[basechar].anchors - if baseanchors then - local baseanchors=baseanchors['basechar'] - if baseanchors then - local al=anchorlookups[lookupname] - for anchor,ba in next,baseanchors do - if al[anchor] then - local ma=markanchors[anchor] - if ma then - local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) - if trace_marks then - logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)", - cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) - end - return head,start,true - end - end - end - if trace_bugs then - logwarning("%s, no matching anchors for mark %s and base %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar)) - end - end - end - elseif trace_bugs then - logwarning("%s: prev node is no char",cref(kind,chainname,chainlookupname,lookupname)) - end - elseif trace_bugs then - logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar)) - end - elseif trace_bugs then - logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar)) - end - return head,start,false -end -function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - local markchar=getchar(start) - if marks[markchar] then - local subtables=currentlookup.subtables - local lookupname=subtables[1] - local markanchors=lookuphash[lookupname] - if markanchors then - markanchors=markanchors[markchar] - end - if markanchors then - local base=getprev(start) - if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then - local basechar=getchar(base) - if marks[basechar] then - while true do - base=getprev(base) - if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then - basechar=getchar(base) - if not marks[basechar] then - break - end - else - if trace_bugs then - logwarning("%s: no base for mark %s",cref(kind,chainname,chainlookupname,lookupname),markchar) - end - return head,start,false - end - end - end - local index=getligaindex(start) - local baseanchors=descriptions[basechar].anchors - if baseanchors then - local baseanchors=baseanchors['baselig'] - if baseanchors then - local al=anchorlookups[lookupname] - for anchor,ba in next,baseanchors do - if al[anchor] then - local ma=markanchors[anchor] - if ma then - ba=ba[index] - if ba then - local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) - if trace_marks then - logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)", - cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy) - end - return head,start,true - end - end - end - end - if trace_bugs then - logwarning("%s: no matching anchors for mark %s and baselig %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar)) - end - end - end - elseif trace_bugs then - logwarning("feature %s, lookup %s: prev node is no char",kind,lookupname) - end - elseif trace_bugs then - logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar)) - end - elseif trace_bugs then - logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar)) - end - return head,start,false -end -function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - local markchar=getchar(start) - if marks[markchar] then - local subtables=currentlookup.subtables - local lookupname=subtables[1] - local markanchors=lookuphash[lookupname] - if markanchors then - markanchors=markanchors[markchar] - end - if markanchors then - local base=getprev(start) - local slc=getligaindex(start) - if slc then - while base do - local blc=getligaindex(base) - if blc and blc~=slc then - base=getprev(base) - else - break - end - end - end - if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then - local basechar=getchar(base) - local baseanchors=descriptions[basechar].anchors - if baseanchors then - baseanchors=baseanchors['basemark'] - if baseanchors then - local al=anchorlookups[lookupname] - for anchor,ba in next,baseanchors do - if al[anchor] then - local ma=markanchors[anchor] - if ma then - local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) - if trace_marks then - logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)", - cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) - end - return head,start,true - end - end - end - if trace_bugs then - logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar)) - end - end - end - elseif trace_bugs then - logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname)) - end - elseif trace_bugs then - logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar)) - end - elseif trace_bugs then - logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar)) - end - return head,start,false -end -function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - local alreadydone=cursonce and getprop(start,a_cursbase) - if not alreadydone then - local startchar=getchar(start) - local subtables=currentlookup.subtables - local lookupname=subtables[1] - local exitanchors=lookuphash[lookupname] - if exitanchors then - exitanchors=exitanchors[startchar] - end - if exitanchors then - local done=false - if marks[startchar] then - if trace_cursive then - logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar)) - end - else - local nxt=getnext(start) - while not done and nxt and getid(nxt)==glyph_code and getfont(nxt)==currentfont and getsubtype(nxt)<256 do - local nextchar=getchar(nxt) - if marks[nextchar] then - nxt=getnext(nxt) - else - local entryanchors=descriptions[nextchar] - if entryanchors then - entryanchors=entryanchors.anchors - if entryanchors then - entryanchors=entryanchors['centry'] - if entryanchors then - local al=anchorlookups[lookupname] - for anchor,entry in next,entryanchors do - if al[anchor] then - local exit=exitanchors[anchor] - if exit then - local dx,dy,bound=setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar]) - if trace_cursive then - logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode) - end - done=true - break - end - end - end - end - end - elseif trace_bugs then - onetimemessage(currentfont,startchar,"no entry anchors",report_fonts) - end - break - end - end - end - return head,start,done - else - if trace_cursive and trace_details then - logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone) - end - return head,start,false - end - end - return head,start,false -end -function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence) - local startchar=getchar(start) - local subtables=currentlookup.subtables - local lookupname=subtables[1] - local kerns=lookuphash[lookupname] - if kerns then - kerns=kerns[startchar] - if kerns then - local dx,dy,w,h=setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar]) - if trace_kerns then - logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h) - end - end - end - return head,start,false -end -chainmores.gpos_single=chainprocs.gpos_single -function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence) - local snext=getnext(start) - if snext then - local startchar=getchar(start) - local subtables=currentlookup.subtables - local lookupname=subtables[1] - local kerns=lookuphash[lookupname] - if kerns then - kerns=kerns[startchar] - if kerns then - local lookuptype=lookuptypes[lookupname] - local prev,done=start,false - local factor=tfmdata.parameters.factor - while snext and getid(snext)==glyph_code and getfont(snext)==currentfont and getsubtype(snext)<256 do - local nextchar=getchar(snext) - local krn=kerns[nextchar] - if not krn and marks[nextchar] then - prev=snext - snext=getnext(snext) - else - if not krn then - elseif type(krn)=="table" then - if lookuptype=="pair" then - local a,b=krn[2],krn[3] - if a and #a>0 then - local startchar=getchar(start) - local x,y,w,h=setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar]) - if trace_kerns then - logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h) - end - end - if b and #b>0 then - local startchar=getchar(start) - local x,y,w,h=setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar]) - if trace_kerns then - logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h) - end - end - else - report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname)) - local a,b=krn[2],krn[6] - if a and a~=0 then - local k=setkern(snext,factor,rlmode,a) - if trace_kerns then - logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar)) - end - end - if b and b~=0 then - logwarning("%s: ignoring second kern xoff %s",cref(kind,chainname,chainlookupname),b*factor) - end - end - done=true - elseif krn~=0 then - local k=setkern(snext,factor,rlmode,krn) - if trace_kerns then - logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar)) - end - done=true - end - break - end - end - return head,start,done - end - end - end - return head,start,false -end -chainmores.gpos_pair=chainprocs.gpos_pair -local function show_skip(kind,chainname,char,ck,class) - if ck[9] then - logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a, %a => %a",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10]) - else - logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a",cref(kind,chainname),gref(char),class,ck[1],ck[2]) - end -end -local quit_on_no_replacement=true -directives.register("otf.chain.quitonnoreplacement",function(value) - quit_on_no_replacement=value -end) -local function normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash) - local flags=sequence.flags - local done=false - local skipmark=flags[1] - local skipligature=flags[2] - local skipbase=flags[3] - local someskip=skipmark or skipligature or skipbase - local markclass=sequence.markclass - local skipped=false - for k=1,#contexts do - local match=true - local current=start - local last=start - local ck=contexts[k] - local seq=ck[3] - local s=#seq - if s==1 then - match=getid(current)==glyph_code and getfont(current)==currentfont and getsubtype(current)<256 and seq[1][getchar(current)] - else - local f,l=ck[4],ck[5] - if f==1 and f==l then - else - if f==l then - else - local n=f+1 - last=getnext(last) - while n<=l do - if last then - local id=getid(last) - if id==glyph_code then - if getfont(last)==currentfont and getsubtype(last)<256 then - local char=getchar(last) - local ccd=descriptions[char] - if ccd then - local class=ccd.class - if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then - skipped=true - if trace_skips then - show_skip(kind,chainname,char,ck,class) - end - last=getnext(last) - elseif seq[n][char] then - if n1 then - local prev=getprev(start) - if prev then - local n=f-1 - while n>=1 do - if prev then - local id=getid(prev) - if id==glyph_code then - if getfont(prev)==currentfont and getsubtype(prev)<256 then - local char=getchar(prev) - local ccd=descriptions[char] - if ccd then - local class=ccd.class - if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then - skipped=true - if trace_skips then - show_skip(kind,chainname,char,ck,class) - end - elseif seq[n][char] then - n=n -1 - else - match=false - break - end - else - match=false - break - end - else - match=false - break - end - elseif id==disc_code then - elseif seq[n][32] then - n=n -1 - else - match=false - break - end - prev=getprev(prev) - elseif seq[n][32] then - n=n -1 - else - match=false - break - end - end - elseif f==2 then - match=seq[1][32] - else - for n=f-1,1 do - if not seq[n][32] then - match=false - break - end - end - end - end - if match and s>l then - local current=last and getnext(last) - if current then - local n=l+1 - while n<=s do - if current then - local id=getid(current) - if id==glyph_code then - if getfont(current)==currentfont and getsubtype(current)<256 then - local char=getchar(current) - local ccd=descriptions[char] - if ccd then - local class=ccd.class - if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then - skipped=true - if trace_skips then - show_skip(kind,chainname,char,ck,class) - end - elseif seq[n][char] then - n=n+1 - else - match=false - break - end - else - match=false - break - end - else - match=false - break - end - elseif id==disc_code then - elseif seq[n][32] then - n=n+1 - else - match=false - break - end - current=getnext(current) - elseif seq[n][32] then - n=n+1 - else - match=false - break - end - end - elseif s-l==1 then - match=seq[s][32] - else - for n=l+1,s do - if not seq[n][32] then - match=false - break - end - end - end - end - end - if match then - if trace_contexts then - local rule,lookuptype,f,l=ck[1],ck[2],ck[4],ck[5] - local char=getchar(start) - if ck[9] then - logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a, %a => %a", - cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10]) - else - logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a", - cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype) - end - end - local chainlookups=ck[6] - if chainlookups then - local nofchainlookups=#chainlookups - if nofchainlookups==1 then - local chainlookupname=chainlookups[1] - local chainlookup=lookuptable[chainlookupname] - if chainlookup then - local cp=chainprocs[chainlookup.type] - if cp then - local ok - head,start,ok=cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence) - if ok then - done=true - end - else - logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type) - end - else - logprocess("%s is not yet supported",cref(kind,chainname,chainlookupname)) - end - else - local i=1 - while true do - if skipped then - while true do - local char=getchar(start) - local ccd=descriptions[char] - if ccd then - local class=ccd.class - if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then - start=getnext(start) - else - break - end - else - break - end - end - end - local chainlookupname=chainlookups[i] - local chainlookup=lookuptable[chainlookupname] - if not chainlookup then - i=i+1 - else - local cp=chainmores[chainlookup.type] - if not cp then - logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type) - i=i+1 - else - local ok,n - head,start,ok,n=cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence) - if ok then - done=true - i=i+(n or 1) - else - i=i+1 - end - end - end - if i>nofchainlookups then - break - elseif start then - start=getnext(start) - else - end - end - end - else - local replacements=ck[7] - if replacements then - head,start,done=chainprocs.reversesub(head,start,last,kind,chainname,ck,lookuphash,replacements) - else - done=quit_on_no_replacement - if trace_contexts then - logprocess("%s: skipping match",cref(kind,chainname)) - end - end - end - end - end - return head,start,done -end -local verbose_handle_contextchain=function(font,...) - logwarning("no verbose handler installed, reverting to 'normal'") - otf.setcontextchain() - return normal_handle_contextchain(...) -end -otf.chainhandlers={ - normal=normal_handle_contextchain, - verbose=verbose_handle_contextchain, -} -function otf.setcontextchain(method) - if not method or method=="normal" or not otf.chainhandlers[method] then - if handlers.contextchain then - logwarning("installing normal contextchain handler") - end - handlers.contextchain=normal_handle_contextchain - else - logwarning("installing contextchain handler %a",method) - local handler=otf.chainhandlers[method] - handlers.contextchain=function(...) - return handler(currentfont,...) - end - end - handlers.gsub_context=handlers.contextchain - handlers.gsub_contextchain=handlers.contextchain - handlers.gsub_reversecontextchain=handlers.contextchain - handlers.gpos_contextchain=handlers.contextchain - handlers.gpos_context=handlers.contextchain -end -otf.setcontextchain() -local missing={} -local function logprocess(...) - if trace_steps then - registermessage(...) - end - report_process(...) -end -local logwarning=report_process -local function report_missing_cache(typ,lookup) - local f=missing[currentfont] if not f then f={} missing[currentfont]=f end - local t=f[typ] if not t then t={} f[typ]=t end - if not t[lookup] then - t[lookup]=true - logwarning("missing cache for lookup %a, type %a, font %a, name %a",lookup,typ,currentfont,tfmdata.properties.fullname) - end -end -local resolved={} -local lookuphashes={} -setmetatableindex(lookuphashes,function(t,font) - local lookuphash=fontdata[font].resources.lookuphash - if not lookuphash or not next(lookuphash) then - lookuphash=false - end - t[font]=lookuphash - return lookuphash -end) -local autofeatures=fonts.analyzers.features -local function initialize(sequence,script,language,enabled) - local features=sequence.features - if features then - local order=sequence.order - if order then - for i=1,#order do - local kind=order[i] - local valid=enabled[kind] - if valid then - local scripts=features[kind] - local languages=scripts[script] or scripts[wildcard] - if languages and (languages[language] or languages[wildcard]) then - return { valid,autofeatures[kind] or false,sequence.chain or 0,kind,sequence } - end - end - end - else - end - end - return false -end -function otf.dataset(tfmdata,font) - local shared=tfmdata.shared - local properties=tfmdata.properties - local language=properties.language or "dflt" - local script=properties.script or "dflt" - local enabled=shared.features - local res=resolved[font] - if not res then - res={} - resolved[font]=res - end - local rs=res[script] - if not rs then - rs={} - res[script]=rs - end - local rl=rs[language] - if not rl then - rl={ - } - rs[language]=rl - local sequences=tfmdata.resources.sequences - for s=1,#sequences do - local v=enabled and initialize(sequences[s],script,language,enabled) - if v then - rl[#rl+1]=v - end - end - end - return rl -end -local function featuresprocessor(head,font,attr) - local lookuphash=lookuphashes[font] - if not lookuphash then - return head,false - end - head=tonut(head) - if trace_steps then - checkstep(head) - end - tfmdata=fontdata[font] - descriptions=tfmdata.descriptions - characters=tfmdata.characters - resources=tfmdata.resources - marks=resources.marks - anchorlookups=resources.lookup_to_anchor - lookuptable=resources.lookups - lookuptypes=resources.lookuptypes - lookuptags=resources.lookuptags - currentfont=font - rlmode=0 - local sequences=resources.sequences - local done=false - local datasets=otf.dataset(tfmdata,font,attr) - local dirstack={} - for s=1,#datasets do - local dataset=datasets[s] - featurevalue=dataset[1] - local sequence=dataset[5] - local rlparmode=0 - local topstack=0 - local success=false - local attribute=dataset[2] - local chain=dataset[3] - local typ=sequence.type - local subtables=sequence.subtables - if chain<0 then - local handler=handlers[typ] - local start=find_node_tail(head) - while start do - local id=getid(start) - if id==glyph_code then - if getfont(start)==font and getsubtype(start)<256 then - local a=getattr(start,0) - if a then - a=a==attr - else - a=true - end - if a then - for i=1,#subtables do - local lookupname=subtables[i] - local lookupcache=lookuphash[lookupname] - if lookupcache then - local lookupmatch=lookupcache[getchar(start)] - if lookupmatch then - head,start,success=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) - if success then - break - end - end - else - report_missing_cache(typ,lookupname) - end - end - if start then start=getprev(start) end - else - start=getprev(start) - end - else - start=getprev(start) - end - else - start=getprev(start) - end - end - else - local handler=handlers[typ] - local ns=#subtables - local start=head - rlmode=0 - if ns==1 then - local lookupname=subtables[1] - local lookupcache=lookuphash[lookupname] - if not lookupcache then - report_missing_cache(typ,lookupname) - else - local function subrun(start) - local head=start - local done=false - while start do - local id=getid(start) - if id==glyph_code and getfont(start)==font and getsubtype(start)<256 then - local a=getattr(start,0) - if a then - a=(a==attr) and (not attribute or getprop(start,a_state)==attribute) - else - a=not attribute or getprop(start,a_state)==attribute - end - if a then - local lookupmatch=lookupcache[getchar(start)] - if lookupmatch then - local ok - head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) - if ok then - done=true - end - end - if start then start=getnext(start) end - else - start=getnext(start) - end - else - start=getnext(start) - end - end - if done then - success=true - return head - end - end - local function kerndisc(disc) - local prev=getprev(disc) - local next=getnext(disc) - if prev and next then - setfield(prev,"next",next) - local a=getattr(prev,0) - if a then - a=(a==attr) and (not attribute or getprop(prev,a_state)==attribute) - else - a=not attribute or getprop(prev,a_state)==attribute - end - if a then - local lookupmatch=lookupcache[getchar(prev)] - if lookupmatch then - local h,d,ok=handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) - if ok then - done=true - success=true - end - end - end - setfield(prev,"next",disc) - end - return next - end - while start do - local id=getid(start) - if id==glyph_code then - if getfont(start)==font and getsubtype(start)<256 then - local a=getattr(start,0) - if a then - a=(a==attr) and (not attribute or getprop(start,a_state)==attribute) - else - a=not attribute or getprop(start,a_state)==attribute - end - if a then - local lookupmatch=lookupcache[getchar(start)] - if lookupmatch then - local ok - head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) - if ok then - success=true - end - end - if start then start=getnext(start) end - else - start=getnext(start) - end - else - start=getnext(start) - end - elseif id==disc_code then - if getsubtype(start)==discretionary_code then - local pre=getfield(start,"pre") - if pre then - local new=subrun(pre) - if new then setfield(start,"pre",new) end - end - local post=getfield(start,"post") - if post then - local new=subrun(post) - if new then setfield(start,"post",new) end - end - local replace=getfield(start,"replace") - if replace then - local new=subrun(replace) - if new then setfield(start,"replace",new) end - end -elseif typ=="gpos_single" or typ=="gpos_pair" then - kerndisc(start) - end - start=getnext(start) - elseif id==whatsit_code then - local subtype=getsubtype(start) - if subtype==dir_code then - local dir=getfield(start,"dir") - if dir=="+TRT" or dir=="+TLT" then - topstack=topstack+1 - dirstack[topstack]=dir - elseif dir=="-TRT" or dir=="-TLT" then - topstack=topstack-1 - end - local newdir=dirstack[topstack] - if newdir=="+TRT" then - rlmode=-1 - elseif newdir=="+TLT" then - rlmode=1 - else - rlmode=rlparmode - end - if trace_directions then - report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir) - end - elseif subtype==localpar_code then - local dir=getfield(start,"dir") - if dir=="TRT" then - rlparmode=-1 - elseif dir=="TLT" then - rlparmode=1 - else - rlparmode=0 - end - rlmode=rlparmode - if trace_directions then - report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode) - end - end - start=getnext(start) - elseif id==math_code then - start=getnext(end_of_math(start)) - else - start=getnext(start) - end - end - end - else - local function subrun(start) - local head=start - local done=false - while start do - local id=getid(start) - if id==glyph_code and getfont(start)==font and getsubtype(start)<256 then - local a=getattr(start,0) - if a then - a=(a==attr) and (not attribute or getprop(start,a_state)==attribute) - else - a=not attribute or getprop(start,a_state)==attribute - end - if a then - for i=1,ns do - local lookupname=subtables[i] - local lookupcache=lookuphash[lookupname] - if lookupcache then - local lookupmatch=lookupcache[getchar(start)] - if lookupmatch then - local ok - head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) - if ok then - done=true - break - elseif not start then - break - end - end - else - report_missing_cache(typ,lookupname) - end - end - if start then start=getnext(start) end - else - start=getnext(start) - end - else - start=getnext(start) - end - end - if done then - success=true - return head - end - end - local function kerndisc(disc) - local prev=getprev(disc) - local next=getnext(disc) - if prev and next then - setfield(prev,"next",next) - local a=getattr(prev,0) - if a then - a=(a==attr) and (not attribute or getprop(prev,a_state)==attribute) - else - a=not attribute or getprop(prev,a_state)==attribute - end - if a then - for i=1,ns do - local lookupname=subtables[i] - local lookupcache=lookuphash[lookupname] - if lookupcache then - local lookupmatch=lookupcache[getchar(prev)] - if lookupmatch then - local h,d,ok=handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) - if ok then - done=true - break - end - end - else - report_missing_cache(typ,lookupname) - end - end - end - setfield(prev,"next",disc) - end - return next - end - while start do - local id=getid(start) - if id==glyph_code then - if getfont(start)==font and getsubtype(start)<256 then - local a=getattr(start,0) - if a then - a=(a==attr) and (not attribute or getprop(start,a_state)==attribute) - else - a=not attribute or getprop(start,a_state)==attribute - end - if a then - for i=1,ns do - local lookupname=subtables[i] - local lookupcache=lookuphash[lookupname] - if lookupcache then - local lookupmatch=lookupcache[getchar(start)] - if lookupmatch then - local ok - head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) - if ok then - success=true - break - elseif not start then - break - end - end - else - report_missing_cache(typ,lookupname) - end - end - if start then start=getnext(start) end - else - start=getnext(start) - end - else - start=getnext(start) - end - elseif id==disc_code then - if getsubtype(start)==discretionary_code then - local pre=getfield(start,"pre") - if pre then - local new=subrun(pre) - if new then setfield(start,"pre",new) end - end - local post=getfield(start,"post") - if post then - local new=subrun(post) - if new then setfield(start,"post",new) end - end - local replace=getfield(start,"replace") - if replace then - local new=subrun(replace) - if new then setfield(start,"replace",new) end - end -elseif typ=="gpos_single" or typ=="gpos_pair" then - kerndisc(start) - end - start=getnext(start) - elseif id==whatsit_code then - local subtype=getsubtype(start) - if subtype==dir_code then - local dir=getfield(start,"dir") - if dir=="+TRT" or dir=="+TLT" then - topstack=topstack+1 - dirstack[topstack]=dir - elseif dir=="-TRT" or dir=="-TLT" then - topstack=topstack-1 - end - local newdir=dirstack[topstack] - if newdir=="+TRT" then - rlmode=-1 - elseif newdir=="+TLT" then - rlmode=1 - else - rlmode=rlparmode - end - if trace_directions then - report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir) - end - elseif subtype==localpar_code then - local dir=getfield(start,"dir") - if dir=="TRT" then - rlparmode=-1 - elseif dir=="TLT" then - rlparmode=1 - else - rlparmode=0 - end - rlmode=rlparmode - if trace_directions then - report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode) - end - end - start=getnext(start) - elseif id==math_code then - start=getnext(end_of_math(start)) - else - start=getnext(start) - end - end - end - end - if success then - done=true - end - if trace_steps then - registerstep(head) - end - end - head=tonode(head) - return head,done -end -local function generic(lookupdata,lookupname,unicode,lookuphash) - local target=lookuphash[lookupname] - if target then - target[unicode]=lookupdata - else - lookuphash[lookupname]={ [unicode]=lookupdata } - end -end -local action={ - substitution=generic, - multiple=generic, - alternate=generic, - position=generic, - ligature=function(lookupdata,lookupname,unicode,lookuphash) - local target=lookuphash[lookupname] - if not target then - target={} - lookuphash[lookupname]=target - end - for i=1,#lookupdata do - local li=lookupdata[i] - local tu=target[li] - if not tu then - tu={} - target[li]=tu - end - target=tu - end - target.ligature=unicode - end, - pair=function(lookupdata,lookupname,unicode,lookuphash) - local target=lookuphash[lookupname] - if not target then - target={} - lookuphash[lookupname]=target - end - local others=target[unicode] - local paired=lookupdata[1] - if others then - others[paired]=lookupdata - else - others={ [paired]=lookupdata } - target[unicode]=others - end - end, -} -local function prepare_lookups(tfmdata) - local rawdata=tfmdata.shared.rawdata - local resources=rawdata.resources - local lookuphash=resources.lookuphash - local anchor_to_lookup=resources.anchor_to_lookup - local lookup_to_anchor=resources.lookup_to_anchor - local lookuptypes=resources.lookuptypes - local characters=tfmdata.characters - local descriptions=tfmdata.descriptions - for unicode,character in next,characters do - local description=descriptions[unicode] - if description then - local lookups=description.slookups - if lookups then - for lookupname,lookupdata in next,lookups do - action[lookuptypes[lookupname]](lookupdata,lookupname,unicode,lookuphash) - end - end - local lookups=description.mlookups - if lookups then - for lookupname,lookuplist in next,lookups do - local lookuptype=lookuptypes[lookupname] - for l=1,#lookuplist do - local lookupdata=lookuplist[l] - action[lookuptype](lookupdata,lookupname,unicode,lookuphash) - end - end - end - local list=description.kerns - if list then - for lookup,krn in next,list do - local target=lookuphash[lookup] - if target then - target[unicode]=krn - else - lookuphash[lookup]={ [unicode]=krn } - end - end - end - local list=description.anchors - if list then - for typ,anchors in next,list do - if typ=="mark" or typ=="cexit" then - for name,anchor in next,anchors do - local lookups=anchor_to_lookup[name] - if lookups then - for lookup,_ in next,lookups do - local target=lookuphash[lookup] - if target then - target[unicode]=anchors - else - lookuphash[lookup]={ [unicode]=anchors } - end - end - end - end - end - end - end - end - end -end -local function split(replacement,original) - local result={} - for i=1,#replacement do - result[original[i]]=replacement[i] - end - return result -end -local valid={ - coverage={ chainsub=true,chainpos=true,contextsub=true }, - reversecoverage={ reversesub=true }, - glyphs={ chainsub=true,chainpos=true }, -} -local function prepare_contextchains(tfmdata) - local rawdata=tfmdata.shared.rawdata - local resources=rawdata.resources - local lookuphash=resources.lookuphash - local lookuptags=resources.lookuptags - local lookups=rawdata.lookups - if lookups then - for lookupname,lookupdata in next,rawdata.lookups do - local lookuptype=lookupdata.type - if lookuptype then - local rules=lookupdata.rules - if rules then - local format=lookupdata.format - local validformat=valid[format] - if not validformat then - report_prepare("unsupported format %a",format) - elseif not validformat[lookuptype] then - report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookuptags[lookupname]) - else - local contexts=lookuphash[lookupname] - if not contexts then - contexts={} - lookuphash[lookupname]=contexts - end - local t,nt={},0 - for nofrules=1,#rules do - local rule=rules[nofrules] - local current=rule.current - local before=rule.before - local after=rule.after - local replacements=rule.replacements - local sequence={} - local nofsequences=0 - if before then - for n=1,#before do - nofsequences=nofsequences+1 - sequence[nofsequences]=before[n] - end - end - local start=nofsequences+1 - for n=1,#current do - nofsequences=nofsequences+1 - sequence[nofsequences]=current[n] - end - local stop=nofsequences - if after then - for n=1,#after do - nofsequences=nofsequences+1 - sequence[nofsequences]=after[n] - end - end - if sequence[1] then - nt=nt+1 - t[nt]={ nofrules,lookuptype,sequence,start,stop,rule.lookups,replacements } - for unic,_ in next,sequence[start] do - local cu=contexts[unic] - if not cu then - contexts[unic]=t - end - end - end - end - end - else - end - else - report_prepare("missing lookuptype for lookupname %a",lookuptags[lookupname]) - end - end - end -end -local function featuresinitializer(tfmdata,value) - if true then - local rawdata=tfmdata.shared.rawdata - local properties=rawdata.properties - if not properties.initialized then - local starttime=trace_preparing and os.clock() - local resources=rawdata.resources - resources.lookuphash=resources.lookuphash or {} - prepare_contextchains(tfmdata) - prepare_lookups(tfmdata) - properties.initialized=true - if trace_preparing then - report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,tfmdata.properties.fullname) - end - end - end -end -registerotffeature { - name="features", - description="features", - default=true, - initializers={ - position=1, - node=featuresinitializer, - }, - processors={ - node=featuresprocessor, - } -} -otf.handlers=handlers - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-otp']={ - version=1.001, - comment="companion to font-otf.lua (packing)", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local next,type=next,type -local sort,concat=table.sort,table.concat -local sortedhash=table.sortedhash -local trace_packing=false trackers.register("otf.packing",function(v) trace_packing=v end) -local trace_loading=false trackers.register("otf.loading",function(v) trace_loading=v end) -local report_otf=logs.reporter("fonts","otf loading") -fonts=fonts or {} -local handlers=fonts.handlers or {} -fonts.handlers=handlers -local otf=handlers.otf or {} -handlers.otf=otf -local enhancers=otf.enhancers or {} -otf.enhancers=enhancers -local glists=otf.glists or { "gsub","gpos" } -otf.glists=glists -local criterium=1 -local threshold=0 -local function tabstr_normal(t) - local s={} - local n=0 - for k,v in next,t do - n=n+1 - if type(v)=="table" then - s[n]=k..">"..tabstr_normal(v) - elseif v==true then - s[n]=k.."+" - elseif v then - s[n]=k.."="..v - else - s[n]=k.."-" - end - end - if n==0 then - return "" - elseif n==1 then - return s[1] - else - sort(s) - return concat(s,",") - end -end -local function tabstr_flat(t) - local s={} - local n=0 - for k,v in next,t do - n=n+1 - s[n]=k.."="..v - end - if n==0 then - return "" - elseif n==1 then - return s[1] - else - sort(s) - return concat(s,",") - end -end -local function tabstr_mixed(t) - local s={} - local n=#t - if n==0 then - return "" - elseif n==1 then - local k=t[1] - if k==true then - return "++" - elseif k==false then - return "--" - else - return tostring(k) - end - else - for i=1,n do - local k=t[i] - if k==true then - s[i]="++" - elseif k==false then - s[i]="--" - else - s[i]=k - end - end - return concat(s,",") - end -end -local function tabstr_boolean(t) - local s={} - local n=0 - for k,v in next,t do - n=n+1 - if v then - s[n]=k.."+" - else - s[n]=k.."-" - end - end - if n==0 then - return "" - elseif n==1 then - return s[1] - else - sort(s) - return concat(s,",") - end -end -local function packdata(data) - if data then - local h,t,c={},{},{} - local hh,tt,cc={},{},{} - local nt,ntt=0,0 - local function pack_normal(v) - local tag=tabstr_normal(v) - local ht=h[tag] - if ht then - c[ht]=c[ht]+1 - return ht - else - nt=nt+1 - t[nt]=v - h[tag]=nt - c[nt]=1 - return nt - end - end - local function pack_flat(v) - local tag=tabstr_flat(v) - local ht=h[tag] - if ht then - c[ht]=c[ht]+1 - return ht - else - nt=nt+1 - t[nt]=v - h[tag]=nt - c[nt]=1 - return nt - end - end - local function pack_boolean(v) - local tag=tabstr_boolean(v) - local ht=h[tag] - if ht then - c[ht]=c[ht]+1 - return ht - else - nt=nt+1 - t[nt]=v - h[tag]=nt - c[nt]=1 - return nt - end - end - local function pack_indexed(v) - local tag=concat(v," ") - local ht=h[tag] - if ht then - c[ht]=c[ht]+1 - return ht - else - nt=nt+1 - t[nt]=v - h[tag]=nt - c[nt]=1 - return nt - end - end - local function pack_mixed(v) - local tag=tabstr_mixed(v) - local ht=h[tag] - if ht then - c[ht]=c[ht]+1 - return ht - else - nt=nt+1 - t[nt]=v - h[tag]=nt - c[nt]=1 - return nt - end - end - local function pack_final(v) - if c[v]<=criterium then - return t[v] - else - local hv=hh[v] - if hv then - return hv - else - ntt=ntt+1 - tt[ntt]=t[v] - hh[v]=ntt - cc[ntt]=c[v] - return ntt - end - end - end - local function success(stage,pass) - if nt==0 then - if trace_loading or trace_packing then - report_otf("pack quality: nothing to pack") - end - return false - elseif nt>=threshold then - local one,two,rest=0,0,0 - if pass==1 then - for k,v in next,c do - if v==1 then - one=one+1 - elseif v==2 then - two=two+1 - else - rest=rest+1 - end - end - else - for k,v in next,cc do - if v>20 then - rest=rest+1 - elseif v>10 then - two=two+1 - else - one=one+1 - end - end - data.tables=tt - end - if trace_loading or trace_packing then - report_otf("pack quality: stage %s, pass %s, %s packed, 1-10:%s, 11-20:%s, rest:%s (criterium: %s)",stage,pass,one+two+rest,one,two,rest,criterium) - end - return true - else - if trace_loading or trace_packing then - report_otf("pack quality: stage %s, pass %s, %s packed, aborting pack (threshold: %s)",stage,pass,nt,threshold) - end - return false - end - end - local function packers(pass) - if pass==1 then - return pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed - else - return pack_final,pack_final,pack_final,pack_final,pack_final - end - end - local resources=data.resources - local lookuptypes=resources.lookuptypes - for pass=1,2 do - if trace_packing then - report_otf("start packing: stage 1, pass %s",pass) - end - local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass) - for unicode,description in next,data.descriptions do - local boundingbox=description.boundingbox - if boundingbox then - description.boundingbox=pack_indexed(boundingbox) - end - local slookups=description.slookups - if slookups then - for tag,slookup in next,slookups do - local what=lookuptypes[tag] - if what=="pair" then - local t=slookup[2] if t then slookup[2]=pack_indexed(t) end - local t=slookup[3] if t then slookup[3]=pack_indexed(t) end - elseif what~="substitution" then - slookups[tag]=pack_indexed(slookup) - end - end - end - local mlookups=description.mlookups - if mlookups then - for tag,mlookup in next,mlookups do - local what=lookuptypes[tag] - if what=="pair" then - for i=1,#mlookup do - local lookup=mlookup[i] - local t=lookup[2] if t then lookup[2]=pack_indexed(t) end - local t=lookup[3] if t then lookup[3]=pack_indexed(t) end - end - elseif what~="substitution" then - for i=1,#mlookup do - mlookup[i]=pack_indexed(mlookup[i]) - end - end - end - end - local kerns=description.kerns - if kerns then - for tag,kern in next,kerns do - kerns[tag]=pack_flat(kern) - end - end - local math=description.math - if math then - local kerns=math.kerns - if kerns then - for tag,kern in next,kerns do - kerns[tag]=pack_normal(kern) - end - end - end - local anchors=description.anchors - if anchors then - for what,anchor in next,anchors do - if what=="baselig" then - for _,a in next,anchor do - for k=1,#a do - a[k]=pack_indexed(a[k]) - end - end - else - for k,v in next,anchor do - anchor[k]=pack_indexed(v) - end - end - end - end - local altuni=description.altuni - if altuni then - for i=1,#altuni do - altuni[i]=pack_flat(altuni[i]) - end - end - end - local lookups=data.lookups - if lookups then - for _,lookup in next,lookups do - local rules=lookup.rules - if rules then - for i=1,#rules do - local rule=rules[i] - local r=rule.before if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end - local r=rule.after if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end - local r=rule.current if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end - local r=rule.replacements if r then rule.replacements=pack_flat (r) end - local r=rule.lookups if r then rule.lookups=pack_indexed(r) end - end - end - end - end - local anchor_to_lookup=resources.anchor_to_lookup - if anchor_to_lookup then - for anchor,lookup in next,anchor_to_lookup do - anchor_to_lookup[anchor]=pack_normal(lookup) - end - end - local lookup_to_anchor=resources.lookup_to_anchor - if lookup_to_anchor then - for lookup,anchor in next,lookup_to_anchor do - lookup_to_anchor[lookup]=pack_normal(anchor) - end - end - local sequences=resources.sequences - if sequences then - for feature,sequence in next,sequences do - local flags=sequence.flags - if flags then - sequence.flags=pack_normal(flags) - end - local subtables=sequence.subtables - if subtables then - sequence.subtables=pack_normal(subtables) - end - local features=sequence.features - if features then - for script,feature in next,features do - features[script]=pack_normal(feature) - end - end - local order=sequence.order - if order then - sequence.order=pack_indexed(order) - end - local markclass=sequence.markclass - if markclass then - sequence.markclass=pack_boolean(markclass) - end - end - end - local lookups=resources.lookups - if lookups then - for name,lookup in next,lookups do - local flags=lookup.flags - if flags then - lookup.flags=pack_normal(flags) - end - local subtables=lookup.subtables - if subtables then - lookup.subtables=pack_normal(subtables) - end - end - end - local features=resources.features - if features then - for _,what in next,glists do - local list=features[what] - if list then - for feature,spec in next,list do - list[feature]=pack_normal(spec) - end - end - end - end - if not success(1,pass) then - return - end - end - if nt>0 then - for pass=1,2 do - if trace_packing then - report_otf("start packing: stage 2, pass %s",pass) - end - local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass) - for unicode,description in next,data.descriptions do - local kerns=description.kerns - if kerns then - description.kerns=pack_normal(kerns) - end - local math=description.math - if math then - local kerns=math.kerns - if kerns then - math.kerns=pack_normal(kerns) - end - end - local anchors=description.anchors - if anchors then - description.anchors=pack_normal(anchors) - end - local mlookups=description.mlookups - if mlookups then - for tag,mlookup in next,mlookups do - mlookups[tag]=pack_normal(mlookup) - end - end - local altuni=description.altuni - if altuni then - description.altuni=pack_normal(altuni) - end - end - local lookups=data.lookups - if lookups then - for _,lookup in next,lookups do - local rules=lookup.rules - if rules then - for i=1,#rules do - local rule=rules[i] - local r=rule.before if r then rule.before=pack_normal(r) end - local r=rule.after if r then rule.after=pack_normal(r) end - local r=rule.current if r then rule.current=pack_normal(r) end - end - end - end - end - local sequences=resources.sequences - if sequences then - for feature,sequence in next,sequences do - sequence.features=pack_normal(sequence.features) - end - end - if not success(2,pass) then - end - end - for pass=1,2 do - local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass) - for unicode,description in next,data.descriptions do - local slookups=description.slookups - if slookups then - description.slookups=pack_normal(slookups) - end - local mlookups=description.mlookups - if mlookups then - description.mlookups=pack_normal(mlookups) - end - end - end - end - end -end -local unpacked_mt={ - __index=function(t,k) - t[k]=false - return k - end -} -local function unpackdata(data) - if data then - local tables=data.tables - if tables then - local resources=data.resources - local lookuptypes=resources.lookuptypes - local unpacked={} - setmetatable(unpacked,unpacked_mt) - for unicode,description in next,data.descriptions do - local tv=tables[description.boundingbox] - if tv then - description.boundingbox=tv - end - local slookups=description.slookups - if slookups then - local tv=tables[slookups] - if tv then - description.slookups=tv - slookups=unpacked[tv] - end - if slookups then - for tag,lookup in next,slookups do - local what=lookuptypes[tag] - if what=="pair" then - local tv=tables[lookup[2]] - if tv then - lookup[2]=tv - end - local tv=tables[lookup[3]] - if tv then - lookup[3]=tv - end - elseif what~="substitution" then - local tv=tables[lookup] - if tv then - slookups[tag]=tv - end - end - end - end - end - local mlookups=description.mlookups - if mlookups then - local tv=tables[mlookups] - if tv then - description.mlookups=tv - mlookups=unpacked[tv] - end - if mlookups then - for tag,list in next,mlookups do - local tv=tables[list] - if tv then - mlookups[tag]=tv - list=unpacked[tv] - end - if list then - local what=lookuptypes[tag] - if what=="pair" then - for i=1,#list do - local lookup=list[i] - local tv=tables[lookup[2]] - if tv then - lookup[2]=tv - end - local tv=tables[lookup[3]] - if tv then - lookup[3]=tv - end - end - elseif what~="substitution" then - for i=1,#list do - local tv=tables[list[i]] - if tv then - list[i]=tv - end - end - end - end - end - end - end - local kerns=description.kerns - if kerns then - local tm=tables[kerns] - if tm then - description.kerns=tm - kerns=unpacked[tm] - end - if kerns then - for k,kern in next,kerns do - local tv=tables[kern] - if tv then - kerns[k]=tv - end - end - end - end - local math=description.math - if math then - local kerns=math.kerns - if kerns then - local tm=tables[kerns] - if tm then - math.kerns=tm - kerns=unpacked[tm] - end - if kerns then - for k,kern in next,kerns do - local tv=tables[kern] - if tv then - kerns[k]=tv - end - end - end - end - end - local anchors=description.anchors - if anchors then - local ta=tables[anchors] - if ta then - description.anchors=ta - anchors=unpacked[ta] - end - if anchors then - for tag,anchor in next,anchors do - if tag=="baselig" then - for _,list in next,anchor do - for i=1,#list do - local tv=tables[list[i]] - if tv then - list[i]=tv - end - end - end - else - for a,data in next,anchor do - local tv=tables[data] - if tv then - anchor[a]=tv - end - end - end - end - end - end - local altuni=description.altuni - if altuni then - local altuni=tables[altuni] - if altuni then - description.altuni=altuni - for i=1,#altuni do - local tv=tables[altuni[i]] - if tv then - altuni[i]=tv - end - end - end - end - end - local lookups=data.lookups - if lookups then - for _,lookup in next,lookups do - local rules=lookup.rules - if rules then - for i=1,#rules do - local rule=rules[i] - local before=rule.before - if before then - local tv=tables[before] - if tv then - rule.before=tv - before=unpacked[tv] - end - if before then - for i=1,#before do - local tv=tables[before[i]] - if tv then - before[i]=tv - end - end - end - end - local after=rule.after - if after then - local tv=tables[after] - if tv then - rule.after=tv - after=unpacked[tv] - end - if after then - for i=1,#after do - local tv=tables[after[i]] - if tv then - after[i]=tv - end - end - end - end - local current=rule.current - if current then - local tv=tables[current] - if tv then - rule.current=tv - current=unpacked[tv] - end - if current then - for i=1,#current do - local tv=tables[current[i]] - if tv then - current[i]=tv - end - end - end - end - local replacements=rule.replacements - if replacements then - local tv=tables[replacements] - if tv then - rule.replacements=tv - end - end - local lookups=rule.lookups - if lookups then - local tv=tables[lookups] - if tv then - rule.lookups=tv - end - end - end - end - end - end - local anchor_to_lookup=resources.anchor_to_lookup - if anchor_to_lookup then - for anchor,lookup in next,anchor_to_lookup do - local tv=tables[lookup] - if tv then - anchor_to_lookup[anchor]=tv - end - end - end - local lookup_to_anchor=resources.lookup_to_anchor - if lookup_to_anchor then - for lookup,anchor in next,lookup_to_anchor do - local tv=tables[anchor] - if tv then - lookup_to_anchor[lookup]=tv - end - end - end - local ls=resources.sequences - if ls then - for _,feature in next,ls do - local flags=feature.flags - if flags then - local tv=tables[flags] - if tv then - feature.flags=tv - end - end - local subtables=feature.subtables - if subtables then - local tv=tables[subtables] - if tv then - feature.subtables=tv - end - end - local features=feature.features - if features then - local tv=tables[features] - if tv then - feature.features=tv - features=unpacked[tv] - end - if features then - for script,data in next,features do - local tv=tables[data] - if tv then - features[script]=tv - end - end - end - end - local order=feature.order - if order then - local tv=tables[order] - if tv then - feature.order=tv - end - end - local markclass=feature.markclass - if markclass then - local tv=tables[markclass] - if tv then - feature.markclass=tv - end - end - end - end - local lookups=resources.lookups - if lookups then - for _,lookup in next,lookups do - local flags=lookup.flags - if flags then - local tv=tables[flags] - if tv then - lookup.flags=tv - end - end - local subtables=lookup.subtables - if subtables then - local tv=tables[subtables] - if tv then - lookup.subtables=tv - end - end - end - end - local features=resources.features - if features then - for _,what in next,glists do - local feature=features[what] - if feature then - for tag,spec in next,feature do - local tv=tables[spec] - if tv then - feature[tag]=tv - end - end - end - end - end - data.tables=nil - end - end -end -if otf.enhancers.register then - otf.enhancers.register("pack",packdata) - otf.enhancers.register("unpack",unpackdata) -end -otf.enhancers.unpack=unpackdata -otf.enhancers.pack=packdata - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['luatex-fonts-lua']={ - version=1.001, - comment="companion to luatex-*.tex", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end -local fonts=fonts -fonts.formats.lua="lua" -function fonts.readers.lua(specification) - local fullname=specification.filename or "" - if fullname=="" then - local forced=specification.forced or "" - if forced~="" then - fullname=specification.name.."."..forced - else - fullname=specification.name - end - end - local fullname=resolvers.findfile(fullname) or "" - if fullname~="" then - local loader=loadfile(fullname) - loader=loader and loader() - return loader and loader(specification) - end -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-def']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local format,gmatch,match,find,lower,gsub=string.format,string.gmatch,string.match,string.find,string.lower,string.gsub -local tostring,next=tostring,next -local lpegmatch=lpeg.match -local suffixonly,removesuffix=file.suffix,file.removesuffix -local allocate=utilities.storage.allocate -local trace_defining=false trackers .register("fonts.defining",function(v) trace_defining=v end) -local directive_embedall=false directives.register("fonts.embedall",function(v) directive_embedall=v end) -trackers.register("fonts.loading","fonts.defining","otf.loading","afm.loading","tfm.loading") -trackers.register("fonts.all","fonts.*","otf.*","afm.*","tfm.*") -local report_defining=logs.reporter("fonts","defining") -local fonts=fonts -local fontdata=fonts.hashes.identifiers -local readers=fonts.readers -local definers=fonts.definers -local specifiers=fonts.specifiers -local constructors=fonts.constructors -local fontgoodies=fonts.goodies -readers.sequence=allocate { 'otf','ttf','afm','tfm','lua' } -local variants=allocate() -specifiers.variants=variants -definers.methods=definers.methods or {} -local internalized=allocate() -local lastdefined=nil -local loadedfonts=constructors.loadedfonts -local designsizes=constructors.designsizes -local resolvefile=fontgoodies and fontgoodies.filenames and fontgoodies.filenames.resolve or function(s) return s end -local splitter,splitspecifiers=nil,"" -local P,C,S,Cc=lpeg.P,lpeg.C,lpeg.S,lpeg.Cc -local left=P("(") -local right=P(")") -local colon=P(":") -local space=P(" ") -definers.defaultlookup="file" -local prefixpattern=P(false) -local function addspecifier(symbol) - splitspecifiers=splitspecifiers..symbol - local method=S(splitspecifiers) - local lookup=C(prefixpattern)*colon - local sub=left*C(P(1-left-right-method)^1)*right - local specification=C(method)*C(P(1)^1) - local name=C((1-sub-specification)^1) - splitter=P((lookup+Cc(""))*name*(sub+Cc(""))*(specification+Cc(""))) -end -local function addlookup(str,default) - prefixpattern=prefixpattern+P(str) -end -definers.addlookup=addlookup -addlookup("file") -addlookup("name") -addlookup("spec") -local function getspecification(str) - return lpegmatch(splitter,str or "") -end -definers.getspecification=getspecification -function definers.registersplit(symbol,action,verbosename) - addspecifier(symbol) - variants[symbol]=action - if verbosename then - variants[verbosename]=action - end -end -local function makespecification(specification,lookup,name,sub,method,detail,size) - size=size or 655360 - if not lookup or lookup=="" then - lookup=definers.defaultlookup - end - if trace_defining then - report_defining("specification %a, lookup %a, name %a, sub %a, method %a, detail %a", - specification,lookup,name,sub,method,detail) - end - local t={ - lookup=lookup, - specification=specification, - size=size, - name=name, - sub=sub, - method=method, - detail=detail, - resolved="", - forced="", - features={}, - } - return t -end -definers.makespecification=makespecification -function definers.analyze(specification,size) - local lookup,name,sub,method,detail=getspecification(specification or "") - return makespecification(specification,lookup,name,sub,method,detail,size) -end -definers.resolvers=definers.resolvers or {} -local resolvers=definers.resolvers -function resolvers.file(specification) - local name=resolvefile(specification.name) - local suffix=lower(suffixonly(name)) - if fonts.formats[suffix] then - specification.forced=suffix - specification.forcedname=name - specification.name=removesuffix(name) - else - specification.name=name - end -end -function resolvers.name(specification) - local resolve=fonts.names.resolve - if resolve then - local resolved,sub=resolve(specification.name,specification.sub,specification) - if resolved then - specification.resolved=resolved - specification.sub=sub - local suffix=lower(suffixonly(resolved)) - if fonts.formats[suffix] then - specification.forced=suffix - specification.forcedname=resolved - specification.name=removesuffix(resolved) - else - specification.name=resolved - end - end - else - resolvers.file(specification) - end -end -function resolvers.spec(specification) - local resolvespec=fonts.names.resolvespec - if resolvespec then - local resolved,sub=resolvespec(specification.name,specification.sub,specification) - if resolved then - specification.resolved=resolved - specification.sub=sub - specification.forced=lower(suffixonly(resolved)) - specification.forcedname=resolved - specification.name=removesuffix(resolved) - end - else - resolvers.name(specification) - end -end -function definers.resolve(specification) - if not specification.resolved or specification.resolved=="" then - local r=resolvers[specification.lookup] - if r then - r(specification) - end - end - if specification.forced=="" then - specification.forced=nil - specification.forcedname=nil - end - specification.hash=lower(specification.name..' @ '..constructors.hashfeatures(specification)) - if specification.sub and specification.sub~="" then - specification.hash=specification.sub..' @ '..specification.hash - end - return specification -end -function definers.applypostprocessors(tfmdata) - local postprocessors=tfmdata.postprocessors - if postprocessors then - local properties=tfmdata.properties - for i=1,#postprocessors do - local extrahash=postprocessors[i](tfmdata) - if type(extrahash)=="string" and extrahash~="" then - extrahash=gsub(lower(extrahash),"[^a-z]","-") - properties.fullname=format("%s-%s",properties.fullname,extrahash) - end - end - end - return tfmdata -end -local function checkembedding(tfmdata) - local properties=tfmdata.properties - local embedding - if directive_embedall then - embedding="full" - elseif properties and properties.filename and constructors.dontembed[properties.filename] then - embedding="no" - else - embedding="subset" - end - if properties then - properties.embedding=embedding - else - tfmdata.properties={ embedding=embedding } - end - tfmdata.embedding=embedding -end -function definers.loadfont(specification) - local hash=constructors.hashinstance(specification) - local tfmdata=loadedfonts[hash] - if not tfmdata then - local forced=specification.forced or "" - if forced~="" then - local reader=readers[lower(forced)] - tfmdata=reader and reader(specification) - if not tfmdata then - report_defining("forced type %a of %a not found",forced,specification.name) - end - else - local sequence=readers.sequence - for s=1,#sequence do - local reader=sequence[s] - if readers[reader] then - if trace_defining then - report_defining("trying (reader sequence driven) type %a for %a with file %a",reader,specification.name,specification.filename) - end - tfmdata=readers[reader](specification) - if tfmdata then - break - else - specification.filename=nil - end - end - end - end - if tfmdata then - tfmdata=definers.applypostprocessors(tfmdata) - checkembedding(tfmdata) - loadedfonts[hash]=tfmdata - designsizes[specification.hash]=tfmdata.parameters.designsize - end - end - if not tfmdata then - report_defining("font with asked name %a is not found using lookup %a",specification.name,specification.lookup) - end - return tfmdata -end -function constructors.checkvirtualids() -end -function constructors.readanddefine(name,size) - local specification=definers.analyze(name,size) - local method=specification.method - if method and variants[method] then - specification=variants[method](specification) - end - specification=definers.resolve(specification) - local hash=constructors.hashinstance(specification) - local id=definers.registered(hash) - if not id then - local tfmdata=definers.loadfont(specification) - if tfmdata then - tfmdata.properties.hash=hash - constructors.checkvirtualids(tfmdata) - id=font.define(tfmdata) - definers.register(tfmdata,id) - else - id=0 - end - end - return fontdata[id],id -end -function definers.current() - return lastdefined -end -function definers.registered(hash) - local id=internalized[hash] - return id,id and fontdata[id] -end -function definers.register(tfmdata,id) - if tfmdata and id then - local hash=tfmdata.properties.hash - if not hash then - report_defining("registering font, id %a, name %a, invalid hash",id,tfmdata.properties.filename or "?") - elseif not internalized[hash] then - internalized[hash]=id - if trace_defining then - report_defining("registering font, id %s, hash %a",id,hash) - end - fontdata[id]=tfmdata - end - end -end -function definers.read(specification,size,id) - statistics.starttiming(fonts) - if type(specification)=="string" then - specification=definers.analyze(specification,size) - end - local method=specification.method - if method and variants[method] then - specification=variants[method](specification) - end - specification=definers.resolve(specification) - local hash=constructors.hashinstance(specification) - local tfmdata=definers.registered(hash) - if tfmdata then - if trace_defining then - report_defining("already hashed: %s",hash) - end - else - tfmdata=definers.loadfont(specification) - if tfmdata then - if trace_defining then - report_defining("loaded and hashed: %s",hash) - end - tfmdata.properties.hash=hash - if id then - definers.register(tfmdata,id) - end - else - if trace_defining then - report_defining("not loaded and hashed: %s",hash) - end - end - end - lastdefined=tfmdata or id - if not tfmdata then - report_defining("unknown font %a, loading aborted",specification.name) - elseif trace_defining and type(tfmdata)=="table" then - local properties=tfmdata.properties or {} - local parameters=tfmdata.parameters or {} - report_defining("using %a font with id %a, name %a, size %a, bytes %a, encoding %a, fullname %a, filename %a", - properties.format or "unknown",id,properties.name,parameters.size,properties.encodingbytes, - properties.encodingname,properties.fullname,file.basename(properties.filename)) - end - statistics.stoptiming(fonts) - return tfmdata -end -function font.getfont(id) - return fontdata[id] -end -callbacks.register('define_font',definers.read,"definition of fonts (tfmdata preparation)") - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['luatex-font-def']={ - version=1.001, - comment="companion to luatex-*.tex", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end -local fonts=fonts -fonts.constructors.namemode="specification" -function fonts.definers.getspecification(str) - return "",str,"",":",str -end -local list={} -local function issome () list.lookup='name' end -local function isfile () list.lookup='file' end -local function isname () list.lookup='name' end -local function thename(s) list.name=s end -local function issub (v) list.sub=v end -local function iscrap (s) list.crap=string.lower(s) end -local function iskey (k,v) list[k]=v end -local function istrue (s) list[s]=true end -local function isfalse(s) list[s]=false end -local P,S,R,C=lpeg.P,lpeg.S,lpeg.R,lpeg.C -local spaces=P(" ")^0 -local namespec=(1-S("/:("))^0 -local crapspec=spaces*P("/")*(((1-P(":"))^0)/iscrap)*spaces -local filename_1=P("file:")/isfile*(namespec/thename) -local filename_2=P("[")*P(true)/isname*(((1-P("]"))^0)/thename)*P("]") -local fontname_1=P("name:")/isname*(namespec/thename) -local fontname_2=P(true)/issome*(namespec/thename) -local sometext=(R("az","AZ","09")+S("+-."))^1 -local truevalue=P("+")*spaces*(sometext/istrue) -local falsevalue=P("-")*spaces*(sometext/isfalse) -local keyvalue=(C(sometext)*spaces*P("=")*spaces*C(sometext))/iskey -local somevalue=sometext/istrue -local subvalue=P("(")*(C(P(1-S("()"))^1)/issub)*P(")") -local option=spaces*(keyvalue+falsevalue+truevalue+somevalue)*spaces -local options=P(":")*spaces*(P(";")^0*option)^0 -local pattern=(filename_1+filename_2+fontname_1+fontname_2)*subvalue^0*crapspec^0*options^0 -local function colonized(specification) - list={} - lpeg.match(pattern,specification.specification) - list.crap=nil - if list.name then - specification.name=list.name - list.name=nil - end - if list.lookup then - specification.lookup=list.lookup - list.lookup=nil - end - if list.sub then - specification.sub=list.sub - list.sub=nil - end - specification.features.normal=fonts.handlers.otf.features.normalize(list) - return specification -end -fonts.definers.registersplit(":",colonized,"cryptic") -fonts.definers.registersplit("",colonized,"more cryptic") -function fonts.definers.applypostprocessors(tfmdata) - local postprocessors=tfmdata.postprocessors - if postprocessors then - for i=1,#postprocessors do - local extrahash=postprocessors[i](tfmdata) - if type(extrahash)=="string" and extrahash~="" then - extrahash=string.gsub(lower(extrahash),"[^a-z]","-") - tfmdata.properties.fullname=format("%s-%s",tfmdata.properties.fullname,extrahash) - end - end - end - return tfmdata -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['luatex-fonts-ext']={ - version=1.001, - comment="companion to luatex-*.tex", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end -local fonts=fonts -local otffeatures=fonts.constructors.newfeatures("otf") -local function initializeitlc(tfmdata,value) - if value then - local parameters=tfmdata.parameters - local italicangle=parameters.italicangle - if italicangle and italicangle~=0 then - local properties=tfmdata.properties - local factor=tonumber(value) or 1 - properties.hasitalics=true - properties.autoitalicamount=factor*(parameters.uwidth or 40)/2 - end - end -end -otffeatures.register { - name="itlc", - description="italic correction", - initializers={ - base=initializeitlc, - node=initializeitlc, - } -} -local function initializeslant(tfmdata,value) - value=tonumber(value) - if not value then - value=0 - elseif value>1 then - value=1 - elseif value<-1 then - value=-1 - end - tfmdata.parameters.slantfactor=value -end -otffeatures.register { - name="slant", - description="slant glyphs", - initializers={ - base=initializeslant, - node=initializeslant, - } -} -local function initializeextend(tfmdata,value) - value=tonumber(value) - if not value then - value=0 - elseif value>10 then - value=10 - elseif value<-10 then - value=-10 - end - tfmdata.parameters.extendfactor=value -end -otffeatures.register { - name="extend", - description="scale glyphs horizontally", - initializers={ - base=initializeextend, - node=initializeextend, - } -} -fonts.protrusions=fonts.protrusions or {} -fonts.protrusions.setups=fonts.protrusions.setups or {} -local setups=fonts.protrusions.setups -local function initializeprotrusion(tfmdata,value) - if value then - local setup=setups[value] - if setup then - local factor,left,right=setup.factor or 1,setup.left or 1,setup.right or 1 - local emwidth=tfmdata.parameters.quad - tfmdata.parameters.protrusion={ - auto=true, - } - for i,chr in next,tfmdata.characters do - local v,pl,pr=setup[i],nil,nil - if v then - pl,pr=v[1],v[2] - end - if pl and pl~=0 then chr.left_protruding=left*pl*factor end - if pr and pr~=0 then chr.right_protruding=right*pr*factor end - end - end - end -end -otffeatures.register { - name="protrusion", - description="shift characters into the left and or right margin", - initializers={ - base=initializeprotrusion, - node=initializeprotrusion, - } -} -fonts.expansions=fonts.expansions or {} -fonts.expansions.setups=fonts.expansions.setups or {} -local setups=fonts.expansions.setups -local function initializeexpansion(tfmdata,value) - if value then - local setup=setups[value] - if setup then - local factor=setup.factor or 1 - tfmdata.parameters.expansion={ - stretch=10*(setup.stretch or 0), - shrink=10*(setup.shrink or 0), - step=10*(setup.step or 0), - auto=true, - } - for i,chr in next,tfmdata.characters do - local v=setup[i] - if v and v~=0 then - chr.expansion_factor=v*factor - else - chr.expansion_factor=factor - end - end - end - end -end -otffeatures.register { - name="expansion", - description="apply hz optimization", - initializers={ - base=initializeexpansion, - node=initializeexpansion, - } -} -function fonts.loggers.onetimemessage() end -local byte=string.byte -fonts.expansions.setups['default']={ - stretch=2,shrink=2,step=.5,factor=1, - [byte('A')]=0.5,[byte('B')]=0.7,[byte('C')]=0.7,[byte('D')]=0.5,[byte('E')]=0.7, - [byte('F')]=0.7,[byte('G')]=0.5,[byte('H')]=0.7,[byte('K')]=0.7,[byte('M')]=0.7, - [byte('N')]=0.7,[byte('O')]=0.5,[byte('P')]=0.7,[byte('Q')]=0.5,[byte('R')]=0.7, - [byte('S')]=0.7,[byte('U')]=0.7,[byte('W')]=0.7,[byte('Z')]=0.7, - [byte('a')]=0.7,[byte('b')]=0.7,[byte('c')]=0.7,[byte('d')]=0.7,[byte('e')]=0.7, - [byte('g')]=0.7,[byte('h')]=0.7,[byte('k')]=0.7,[byte('m')]=0.7,[byte('n')]=0.7, - [byte('o')]=0.7,[byte('p')]=0.7,[byte('q')]=0.7,[byte('s')]=0.7,[byte('u')]=0.7, - [byte('w')]=0.7,[byte('z')]=0.7, - [byte('2')]=0.7,[byte('3')]=0.7,[byte('6')]=0.7,[byte('8')]=0.7,[byte('9')]=0.7, -} -fonts.protrusions.setups['default']={ - factor=1,left=1,right=1, - [0x002C]={ 0,1 }, - [0x002E]={ 0,1 }, - [0x003A]={ 0,1 }, - [0x003B]={ 0,1 }, - [0x002D]={ 0,1 }, - [0x2013]={ 0,0.50 }, - [0x2014]={ 0,0.33 }, - [0x3001]={ 0,1 }, - [0x3002]={ 0,1 }, - [0x060C]={ 0,1 }, - [0x061B]={ 0,1 }, - [0x06D4]={ 0,1 }, -} -fonts.handlers.otf.features.normalize=function(t) - if t.rand then - t.rand="random" - end - return t -end -function fonts.helpers.nametoslot(name) - local t=type(name) - if t=="string" then - local tfmdata=fonts.hashes.identifiers[currentfont()] - local shared=tfmdata and tfmdata.shared - local fntdata=shared and shared.rawdata - return fntdata and fntdata.resources.unicodes[name] - elseif t=="number" then - return n - end -end -fonts.encodings=fonts.encodings or {} -local reencodings={} -fonts.encodings.reencodings=reencodings -local function specialreencode(tfmdata,value) - local encoding=value and reencodings[value] - if encoding then - local temp={} - local char=tfmdata.characters - for k,v in next,encoding do - temp[k]=char[v] - end - for k,v in next,temp do - char[k]=temp[k] - end - return string.format("reencoded:%s",value) - end -end -local function reencode(tfmdata,value) - tfmdata.postprocessors=tfmdata.postprocessors or {} - table.insert(tfmdata.postprocessors, - function(tfmdata) - return specialreencode(tfmdata,value) - end - ) -end -otffeatures.register { - name="reencode", - description="reencode characters", - manipulators={ - base=reencode, - node=reencode, - } -} - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['luatex-fonts-cbk']={ - version=1.001, - comment="companion to luatex-*.tex", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end -local fonts=fonts -local nodes=nodes -local traverse_id=node.traverse_id -local glyph_code=nodes.nodecodes.glyph -local ligaturing=node.ligaturing -local kerning=node.kerning -function node.ligaturing() texio.write_nl("warning: node.ligaturing is already applied") end -function node.kerning () texio.write_nl("warning: node.kerning is already applied") end -function nodes.handlers.characters(head) - local fontdata=fonts.hashes.identifiers - if fontdata then - local usedfonts,basefonts,prevfont,basefont={},{},nil,nil - for n in traverse_id(glyph_code,head) do - local font=n.font - if font~=prevfont then - if basefont then - basefont[2]=n.prev - end - prevfont=font - local used=usedfonts[font] - if not used then - local tfmdata=fontdata[font] - if tfmdata then - local shared=tfmdata.shared - if shared then - local processors=shared.processes - if processors and #processors>0 then - usedfonts[font]=processors - else - basefont={ n,nil } - basefonts[#basefonts+1]=basefont - end - end - end - end - end - end - if next(usedfonts) then - for font,processors in next,usedfonts do - for i=1,#processors do - head=processors[i](head,font,0) or head - end - end - end - if #basefonts>0 then - for i=1,#basefonts do - local range=basefonts[i] - local start,stop=range[1],range[2] - if stop then - ligaturing(start,stop) - kerning(start,stop) - else - ligaturing(start) - kerning(start) - end - end - end - return head,true - else - return head,false - end -end -function nodes.simple_font_handler(head) - head=nodes.handlers.characters(head) - nodes.injections.handler(head) - nodes.handlers.protectglyphs(head) - return head -end - -end -- closure diff --git a/src/fontloader/luaotfload-fonts-cbk.lua b/src/fontloader/luaotfload-fonts-cbk.lua deleted file mode 100644 index 9db94f6..0000000 --- a/src/fontloader/luaotfload-fonts-cbk.lua +++ /dev/null @@ -1,68 +0,0 @@ -if not modules then modules = { } end modules ['luatex-fonts-cbk'] = { - version = 1.001, - comment = "companion to luatex-*.tex", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end - -local fonts = fonts -local nodes = nodes - --- Fonts: (might move to node-gef.lua) - -local traverse_id = node.traverse_id -local glyph_code = nodes.nodecodes.glyph - -function nodes.handlers.characters(head) - local fontdata = fonts.hashes.identifiers - if fontdata then - local usedfonts, done, prevfont = { }, false, nil - for n in traverse_id(glyph_code,head) do - local font = n.font - if font ~= prevfont then - prevfont = font - local used = usedfonts[font] - if not used then - local tfmdata = fontdata[font] -- - if tfmdata then - local shared = tfmdata.shared -- we need to check shared, only when same features - if shared then - local processors = shared.processes - if processors and #processors > 0 then - usedfonts[font] = processors - done = true - end - end - end - end - end - end - if done then - for font, processors in next, usedfonts do - for i=1,#processors do - local h, d = processors[i](head,font,0) - head, done = h or head, done or d - end - end - end - return head, true - else - return head, false - end -end - -function nodes.simple_font_handler(head) --- lang.hyphenate(head) - head = nodes.handlers.characters(head) - nodes.injections.handler(head) - nodes.handlers.protectglyphs(head) - head = node.ligaturing(head) - head = node.kerning(head) - return head -end diff --git a/src/fontloader/luaotfload-fonts-def.lua b/src/fontloader/luaotfload-fonts-def.lua deleted file mode 100644 index 0c2f0db..0000000 --- a/src/fontloader/luaotfload-fonts-def.lua +++ /dev/null @@ -1,97 +0,0 @@ -if not modules then modules = { } end modules ['luatex-font-def'] = { - version = 1.001, - comment = "companion to luatex-*.tex", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end - -local fonts = fonts - --- A bit of tuning for definitions. - -fonts.constructors.namemode = "specification" -- somehow latex needs this (changed name!) => will change into an overload - --- tricky: we sort of bypass the parser and directly feed all into --- the sub parser - -function fonts.definers.getspecification(str) - return "", str, "", ":", str -end - --- the generic name parser (different from context!) - -local list = { } - -local function issome () list.lookup = 'name' end -- xetex mode prefers name (not in context!) -local function isfile () list.lookup = 'file' end -local function isname () list.lookup = 'name' end -local function thename(s) list.name = s end -local function issub (v) list.sub = v end -local function iscrap (s) list.crap = string.lower(s) end -local function iskey (k,v) list[k] = v end -local function istrue (s) list[s] = true end -local function isfalse(s) list[s] = false end - -local P, S, R, C = lpeg.P, lpeg.S, lpeg.R, lpeg.C - -local spaces = P(" ")^0 -local namespec = (1-S("/:("))^0 -- was: (1-S("/: ("))^0 -local crapspec = spaces * P("/") * (((1-P(":"))^0)/iscrap) * spaces -local filename_1 = P("file:")/isfile * (namespec/thename) -local filename_2 = P("[") * P(true)/isname * (((1-P("]"))^0)/thename) * P("]") -local fontname_1 = P("name:")/isname * (namespec/thename) -local fontname_2 = P(true)/issome * (namespec/thename) -local sometext = (R("az","AZ","09") + S("+-."))^1 -local truevalue = P("+") * spaces * (sometext/istrue) -local falsevalue = P("-") * spaces * (sometext/isfalse) -local keyvalue = (C(sometext) * spaces * P("=") * spaces * C(sometext))/iskey -local somevalue = sometext/istrue -local subvalue = P("(") * (C(P(1-S("()"))^1)/issub) * P(")") -- for Kim -local option = spaces * (keyvalue + falsevalue + truevalue + somevalue) * spaces -local options = P(":") * spaces * (P(";")^0 * option)^0 - -local pattern = (filename_1 + filename_2 + fontname_1 + fontname_2) * subvalue^0 * crapspec^0 * options^0 - -local function colonized(specification) -- xetex mode - list = { } - lpeg.match(pattern,specification.specification) - list.crap = nil -- style not supported, maybe some day - if list.name then - specification.name = list.name - list.name = nil - end - if list.lookup then - specification.lookup = list.lookup - list.lookup = nil - end - if list.sub then - specification.sub = list.sub - list.sub = nil - end - specification.features.normal = fonts.handlers.otf.features.normalize(list) - return specification -end - -fonts.definers.registersplit(":",colonized,"cryptic") -fonts.definers.registersplit("", colonized,"more cryptic") -- catches \font\text=[names] - -function fonts.definers.applypostprocessors(tfmdata) - local postprocessors = tfmdata.postprocessors - if postprocessors then - for i=1,#postprocessors do - local extrahash = postprocessors[i](tfmdata) -- after scaling etc - if type(extrahash) == "string" and extrahash ~= "" then - -- e.g. a reencoding needs this - extrahash = string.gsub(lower(extrahash),"[^a-z]","-") - tfmdata.properties.fullname = format("%s-%s",tfmdata.properties.fullname,extrahash) - end - end - end - return tfmdata -end diff --git a/src/fontloader/luaotfload-fonts-enc.lua b/src/fontloader/luaotfload-fonts-enc.lua deleted file mode 100644 index e20c3a0..0000000 --- a/src/fontloader/luaotfload-fonts-enc.lua +++ /dev/null @@ -1,28 +0,0 @@ -if not modules then modules = { } end modules ['luatex-font-enc'] = { - version = 1.001, - comment = "companion to luatex-*.tex", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end - -local fonts = fonts -fonts.encodings = { } -fonts.encodings.agl = { } - -setmetatable(fonts.encodings.agl, { __index = function(t,k) - if k == "unicodes" then - texio.write(" ") - local unicodes = dofile(resolvers.findfile("font-age.lua")) - fonts.encodings.agl = { unicodes = unicodes } - return unicodes - else - return nil - end -end }) - diff --git a/src/fontloader/luaotfload-fonts-ext.lua b/src/fontloader/luaotfload-fonts-ext.lua deleted file mode 100644 index b60d045..0000000 --- a/src/fontloader/luaotfload-fonts-ext.lua +++ /dev/null @@ -1,272 +0,0 @@ -if not modules then modules = { } end modules ['luatex-fonts-ext'] = { - version = 1.001, - comment = "companion to luatex-*.tex", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end - -local fonts = fonts -local otffeatures = fonts.constructors.newfeatures("otf") - --- A few generic extensions. - -local function initializeitlc(tfmdata,value) - if value then - -- the magic 40 and it formula come from Dohyun Kim but we might need another guess - local parameters = tfmdata.parameters - local italicangle = parameters.italicangle - if italicangle and italicangle ~= 0 then - local properties = tfmdata.properties - local factor = tonumber(value) or 1 - properties.hasitalics = true - properties.autoitalicamount = factor * (parameters.uwidth or 40)/2 - end - end -end - -otffeatures.register { - name = "itlc", - description = "italic correction", - initializers = { - base = initializeitlc, - node = initializeitlc, - } -} - --- slant and extend - -local function initializeslant(tfmdata,value) - value = tonumber(value) - if not value then - value = 0 - elseif value > 1 then - value = 1 - elseif value < -1 then - value = -1 - end - tfmdata.parameters.slantfactor = value -end - -otffeatures.register { - name = "slant", - description = "slant glyphs", - initializers = { - base = initializeslant, - node = initializeslant, - } -} - -local function initializeextend(tfmdata,value) - value = tonumber(value) - if not value then - value = 0 - elseif value > 10 then - value = 10 - elseif value < -10 then - value = -10 - end - tfmdata.parameters.extendfactor = value -end - -otffeatures.register { - name = "extend", - description = "scale glyphs horizontally", - initializers = { - base = initializeextend, - node = initializeextend, - } -} - --- expansion and protrusion - -fonts.protrusions = fonts.protrusions or { } -fonts.protrusions.setups = fonts.protrusions.setups or { } - -local setups = fonts.protrusions.setups - -local function initializeprotrusion(tfmdata,value) - if value then - local setup = setups[value] - if setup then - local factor, left, right = setup.factor or 1, setup.left or 1, setup.right or 1 - local emwidth = tfmdata.parameters.quad - tfmdata.parameters.protrusion = { - auto = true, - } - for i, chr in next, tfmdata.characters do - local v, pl, pr = setup[i], nil, nil - if v then - pl, pr = v[1], v[2] - end - if pl and pl ~= 0 then chr.left_protruding = left *pl*factor end - if pr and pr ~= 0 then chr.right_protruding = right*pr*factor end - end - end - end -end - -otffeatures.register { - name = "protrusion", - description = "shift characters into the left and or right margin", - initializers = { - base = initializeprotrusion, - node = initializeprotrusion, - } -} - -fonts.expansions = fonts.expansions or { } -fonts.expansions.setups = fonts.expansions.setups or { } - -local setups = fonts.expansions.setups - -local function initializeexpansion(tfmdata,value) - if value then - local setup = setups[value] - if setup then - local factor = setup.factor or 1 - tfmdata.parameters.expansion = { - stretch = 10 * (setup.stretch or 0), - shrink = 10 * (setup.shrink or 0), - step = 10 * (setup.step or 0), - auto = true, - } - for i, chr in next, tfmdata.characters do - local v = setup[i] - if v and v ~= 0 then - chr.expansion_factor = v*factor - else -- can be option - chr.expansion_factor = factor - end - end - end - end -end - -otffeatures.register { - name = "expansion", - description = "apply hz optimization", - initializers = { - base = initializeexpansion, - node = initializeexpansion, - } -} - --- left over - -function fonts.loggers.onetimemessage() end - --- example vectors - -local byte = string.byte - -fonts.expansions.setups['default'] = { - - stretch = 2, shrink = 2, step = .5, factor = 1, - - [byte('A')] = 0.5, [byte('B')] = 0.7, [byte('C')] = 0.7, [byte('D')] = 0.5, [byte('E')] = 0.7, - [byte('F')] = 0.7, [byte('G')] = 0.5, [byte('H')] = 0.7, [byte('K')] = 0.7, [byte('M')] = 0.7, - [byte('N')] = 0.7, [byte('O')] = 0.5, [byte('P')] = 0.7, [byte('Q')] = 0.5, [byte('R')] = 0.7, - [byte('S')] = 0.7, [byte('U')] = 0.7, [byte('W')] = 0.7, [byte('Z')] = 0.7, - [byte('a')] = 0.7, [byte('b')] = 0.7, [byte('c')] = 0.7, [byte('d')] = 0.7, [byte('e')] = 0.7, - [byte('g')] = 0.7, [byte('h')] = 0.7, [byte('k')] = 0.7, [byte('m')] = 0.7, [byte('n')] = 0.7, - [byte('o')] = 0.7, [byte('p')] = 0.7, [byte('q')] = 0.7, [byte('s')] = 0.7, [byte('u')] = 0.7, - [byte('w')] = 0.7, [byte('z')] = 0.7, - [byte('2')] = 0.7, [byte('3')] = 0.7, [byte('6')] = 0.7, [byte('8')] = 0.7, [byte('9')] = 0.7, -} - -fonts.protrusions.setups['default'] = { - - factor = 1, left = 1, right = 1, - - [0x002C] = { 0, 1 }, -- comma - [0x002E] = { 0, 1 }, -- period - [0x003A] = { 0, 1 }, -- colon - [0x003B] = { 0, 1 }, -- semicolon - [0x002D] = { 0, 1 }, -- hyphen - [0x2013] = { 0, 0.50 }, -- endash - [0x2014] = { 0, 0.33 }, -- emdash - [0x3001] = { 0, 1 }, -- ideographic comma 、 - [0x3002] = { 0, 1 }, -- ideographic full stop 。 - [0x060C] = { 0, 1 }, -- arabic comma ، - [0x061B] = { 0, 1 }, -- arabic semicolon ؛ - [0x06D4] = { 0, 1 }, -- arabic full stop ۔ - -} - --- normalizer - -fonts.handlers.otf.features.normalize = function(t) - if t.rand then - t.rand = "random" - end - return t -end - --- bonus - -function fonts.helpers.nametoslot(name) - local t = type(name) - if t == "string" then - local tfmdata = fonts.hashes.identifiers[currentfont()] - local shared = tfmdata and tfmdata.shared - local fntdata = shared and shared.rawdata - return fntdata and fntdata.resources.unicodes[name] - elseif t == "number" then - return n - end -end - --- \font\test=file:somefont:reencode=mymessup --- --- fonts.encodings.reencodings.mymessup = { --- [109] = 110, -- m --- [110] = 109, -- n --- } - -fonts.encodings = fonts.encodings or { } -local reencodings = { } -fonts.encodings.reencodings = reencodings - -local function specialreencode(tfmdata,value) - -- we forget about kerns as we assume symbols and we - -- could issue a message if ther are kerns but it's - -- a hack anyway so we odn't care too much here - local encoding = value and reencodings[value] - if encoding then - local temp = { } - local char = tfmdata.characters - for k, v in next, encoding do - temp[k] = char[v] - end - for k, v in next, temp do - char[k] = temp[k] - end - -- if we use the font otherwise luatex gets confused so - -- we return an additional hash component for fullname - return string.format("reencoded:%s",value) - end -end - -local function reencode(tfmdata,value) - tfmdata.postprocessors = tfmdata.postprocessors or { } - table.insert(tfmdata.postprocessors, - function(tfmdata) - return specialreencode(tfmdata,value) - end - ) -end - -otffeatures.register { - name = "reencode", - description = "reencode characters", - manipulators = { - base = reencode, - node = reencode, - } -} diff --git a/src/fontloader/luaotfload-fonts-inj.lua b/src/fontloader/luaotfload-fonts-inj.lua deleted file mode 100644 index ae48150..0000000 --- a/src/fontloader/luaotfload-fonts-inj.lua +++ /dev/null @@ -1,526 +0,0 @@ -if not modules then modules = { } end modules ['node-inj'] = { - version = 1.001, - comment = "companion to node-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files", -} - --- This is very experimental (this will change when we have luatex > .50 and --- a few pending thingies are available. Also, Idris needs to make a few more --- test fonts. Btw, future versions of luatex will have extended glyph properties --- that can be of help. Some optimizations can go away when we have faster machines. - --- todo: make a special one for context - -local next = next -local utfchar = utf.char - -local trace_injections = false trackers.register("nodes.injections", function(v) trace_injections = v end) - -local report_injections = logs.reporter("nodes","injections") - -local attributes, nodes, node = attributes, nodes, node - -fonts = fonts -local fontdata = fonts.hashes.identifiers - -nodes.injections = nodes.injections or { } -local injections = nodes.injections - -local nodecodes = nodes.nodecodes -local glyph_code = nodecodes.glyph -local kern_code = nodecodes.kern -local nodepool = nodes.pool -local newkern = nodepool.kern - -local traverse_id = node.traverse_id -local insert_node_before = node.insert_before -local insert_node_after = node.insert_after - -local a_kernpair = attributes.private('kernpair') -local a_ligacomp = attributes.private('ligacomp') -local a_markbase = attributes.private('markbase') -local a_markmark = attributes.private('markmark') -local a_markdone = attributes.private('markdone') -local a_cursbase = attributes.private('cursbase') -local a_curscurs = attributes.private('curscurs') -local a_cursdone = attributes.private('cursdone') - --- This injector has been tested by Idris Samawi Hamid (several arabic fonts as well as --- the rather demanding Husayni font), Khaled Hosny (latin and arabic) and Kaj Eigner --- (arabic, hebrew and thai) and myself (whatever font I come across). I'm pretty sure --- that this code is not 100% okay but examples are needed to figure things out. - -function injections.installnewkern(nk) - newkern = nk or newkern -end - -local cursives = { } -local marks = { } -local kerns = { } - --- Currently we do gpos/kern in a bit inofficial way but when we have the extra fields in --- glyphnodes to manipulate ht/dp/wd explicitly I will provide an alternative; also, we --- can share tables. - --- For the moment we pass the r2l key ... volt/arabtype tests .. idris: this needs --- checking with husayni (volt and fontforge). - -function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext) - local dx, dy = factor*(exit[1]-entry[1]), factor*(exit[2]-entry[2]) - local ws, wn = tfmstart.width, tfmnext.width - local bound = #cursives + 1 - start[a_cursbase] = bound - nxt[a_curscurs] = bound - cursives[bound] = { rlmode, dx, dy, ws, wn } - return dx, dy, bound -end - -function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr) - local x, y, w, h = factor*spec[1], factor*spec[2], factor*spec[3], factor*spec[4] - -- dy = y - h - if x ~= 0 or w ~= 0 or y ~= 0 or h ~= 0 then - local bound = current[a_kernpair] - if bound then - local kb = kerns[bound] - -- inefficient but singles have less, but weird anyway, needs checking - kb[2], kb[3], kb[4], kb[5] = (kb[2] or 0) + x, (kb[3] or 0) + y, (kb[4] or 0)+ w, (kb[5] or 0) + h - else - bound = #kerns + 1 - current[a_kernpair] = bound - kerns[bound] = { rlmode, x, y, w, h, r2lflag, tfmchr.width } - end - return x, y, w, h, bound - end - return x, y, w, h -- no bound -end - -function injections.setkern(current,factor,rlmode,x,tfmchr) - local dx = factor*x - if dx ~= 0 then - local bound = #kerns + 1 - current[a_kernpair] = bound - kerns[bound] = { rlmode, dx } - return dx, bound - else - return 0, 0 - end -end - -function injections.setmark(start,base,factor,rlmode,ba,ma,index,baseismark) -- ba=baseanchor, ma=markanchor - local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2]) -- the index argument is no longer used but when this - local bound = base[a_markbase] -- fails again we should pass it - local index = 1 - if bound then - local mb = marks[bound] - if mb then - -- if not index then index = #mb + 1 end - index = #mb + 1 - mb[index] = { dx, dy, rlmode } - start[a_markmark] = bound - start[a_markdone] = index - return dx, dy, bound - else - report_injections("possible problem, %U is base mark without data (id %a)",base.char,bound) - end - end --- index = index or 1 - index = index or 1 - bound = #marks + 1 - base[a_markbase] = bound - start[a_markmark] = bound - start[a_markdone] = index - marks[bound] = { [index] = { dx, dy, rlmode, baseismark } } - return dx, dy, bound -end - -local function dir(n) - return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset" -end - -local function trace(head) - report_injections("begin run") - for n in traverse_id(glyph_code,head) do - if n.subtype < 256 then - local kp = n[a_kernpair] - local mb = n[a_markbase] - local mm = n[a_markmark] - local md = n[a_markdone] - local cb = n[a_cursbase] - local cc = n[a_curscurs] - local char = n.char - report_injections("font %s, char %U, glyph %c",n.font,char,char) - if kp then - local k = kerns[kp] - if k[3] then - report_injections(" pairkern: dir %a, x %p, y %p, w %p, h %p",dir(k[1]),k[2],k[3],k[4],k[5]) - else - report_injections(" kern: dir %a, dx %p",dir(k[1]),k[2]) - end - end - if mb then - report_injections(" markbase: bound %a",mb) - end - if mm then - local m = marks[mm] - if mb then - local m = m[mb] - if m then - report_injections(" markmark: bound %a, index %a, dx %p, dy %p",mm,md,m[1],m[2]) - else - report_injections(" markmark: bound %a, missing index",mm) - end - else - m = m[1] - report_injections(" markmark: bound %a, dx %p, dy %p",mm,m and m[1],m and m[2]) - end - end - if cb then - report_injections(" cursbase: bound %a",cb) - end - if cc then - local c = cursives[cc] - report_injections(" curscurs: bound %a, dir %a, dx %p, dy %p",cc,dir(c[1]),c[2],c[3]) - end - end - end - report_injections("end run") -end - --- todo: reuse tables (i.e. no collection), but will be extra fields anyway --- todo: check for attribute - --- We can have a fast test on a font being processed, so we can check faster for marks etc --- but I'll make a context variant anyway. - -local function show_result(head) - local current = head - local skipping = false - while current do - local id = current.id - if id == glyph_code then - report_injections("char: %C, width %p, xoffset %p, yoffset %p",current.char,current.width,current.xoffset,current.yoffset) - skipping = false - elseif id == kern_code then - report_injections("kern: %p",current.kern) - skipping = false - elseif not skipping then - report_injections() - skipping = true - end - current = current.next - end -end - -function injections.handler(head,where,keep) - local has_marks, has_cursives, has_kerns = next(marks), next(cursives), next(kerns) - if has_marks or has_cursives then - if trace_injections then - trace(head) - end - -- in the future variant we will not copy items but refs to tables - local done, ky, rl, valid, cx, wx, mk, nofvalid = false, { }, { }, { }, { }, { }, { }, 0 - if has_kerns then -- move outside loop - local nf, tm = nil, nil - for n in traverse_id(glyph_code,head) do -- only needed for relevant fonts - if n.subtype < 256 then - nofvalid = nofvalid + 1 - valid[nofvalid] = n - if n.font ~= nf then - nf = n.font - tm = fontdata[nf].resources.marks - end - if tm then - mk[n] = tm[n.char] - end - local k = n[a_kernpair] - if k then - local kk = kerns[k] - if kk then - local x, y, w, h = kk[2] or 0, kk[3] or 0, kk[4] or 0, kk[5] or 0 - local dy = y - h - if dy ~= 0 then - ky[n] = dy - end - if w ~= 0 or x ~= 0 then - wx[n] = kk - end - rl[n] = kk[1] -- could move in test - end - end - end - end - else - local nf, tm = nil, nil - for n in traverse_id(glyph_code,head) do - if n.subtype < 256 then - nofvalid = nofvalid + 1 - valid[nofvalid] = n - if n.font ~= nf then - nf = n.font - tm = fontdata[nf].resources.marks - end - if tm then - mk[n] = tm[n.char] - end - end - end - end - if nofvalid > 0 then - -- we can assume done == true because we have cursives and marks - local cx = { } - if has_kerns and next(ky) then - for n, k in next, ky do - n.yoffset = k - end - end - -- todo: reuse t and use maxt - if has_cursives then - local p_cursbase, p = nil, nil - -- since we need valid[n+1] we can also use a "while true do" - local t, d, maxt = { }, { }, 0 - for i=1,nofvalid do -- valid == glyphs - local n = valid[i] - if not mk[n] then - local n_cursbase = n[a_cursbase] - if p_cursbase then - local n_curscurs = n[a_curscurs] - if p_cursbase == n_curscurs then - local c = cursives[n_curscurs] - if c then - local rlmode, dx, dy, ws, wn = c[1], c[2], c[3], c[4], c[5] - if rlmode >= 0 then - dx = dx - ws - else - dx = dx + wn - end - if dx ~= 0 then - cx[n] = dx - rl[n] = rlmode - end - -- if rlmode and rlmode < 0 then - dy = -dy - -- end - maxt = maxt + 1 - t[maxt] = p - d[maxt] = dy - else - maxt = 0 - end - end - elseif maxt > 0 then - local ny = n.yoffset - for i=maxt,1,-1 do - ny = ny + d[i] - local ti = t[i] - ti.yoffset = ti.yoffset + ny - end - maxt = 0 - end - if not n_cursbase and maxt > 0 then - local ny = n.yoffset - for i=maxt,1,-1 do - ny = ny + d[i] - local ti = t[i] - ti.yoffset = ny - end - maxt = 0 - end - p_cursbase, p = n_cursbase, n - end - end - if maxt > 0 then - local ny = n.yoffset - for i=maxt,1,-1 do - ny = ny + d[i] - local ti = t[i] - ti.yoffset = ny - end - maxt = 0 - end - if not keep then - cursives = { } - end - end - if has_marks then - for i=1,nofvalid do - local p = valid[i] - local p_markbase = p[a_markbase] - if p_markbase then - local mrks = marks[p_markbase] - local nofmarks = #mrks - for n in traverse_id(glyph_code,p.next) do - local n_markmark = n[a_markmark] - if p_markbase == n_markmark then - local index = n[a_markdone] or 1 - local d = mrks[index] - if d then - local rlmode = d[3] - -- - local k = wx[p] - if k then - local x = k[2] - local w = k[4] - if w then - if rlmode and rlmode >= 0 then - -- kern(x) glyph(p) kern(w-x) mark(n) - n.xoffset = p.xoffset - p.width + d[1] - (w-x) - else - -- kern(w-x) glyph(p) kern(x) mark(n) - n.xoffset = p.xoffset - d[1] - x - end - else - if rlmode and rlmode >= 0 then - -- okay for husayni - n.xoffset = p.xoffset - p.width + d[1] - else - -- needs checking: is x ok here? - n.xoffset = p.xoffset - d[1] - x - end - end - else - if rlmode and rlmode >= 0 then - n.xoffset = p.xoffset - p.width + d[1] - else - n.xoffset = p.xoffset - d[1] - end - local w = n.width - if w ~= 0 then - insert_node_before(head,n,newkern(-w/2)) - insert_node_after(head,n,newkern(-w/2)) - end - end - -- -- - if mk[p] then - n.yoffset = p.yoffset + d[2] - else - n.yoffset = n.yoffset + p.yoffset + d[2] - end - -- - if nofmarks == 1 then - break - else - nofmarks = nofmarks - 1 - end - end - else - -- KE: there can be sequences in ligatures - end - end - end - end - if not keep then - marks = { } - end - end - -- todo : combine - if next(wx) then - for n, k in next, wx do - -- only w can be nil (kernclasses), can be sped up when w == nil - local x = k[2] - local w = k[4] - if w then - local rl = k[1] -- r2l = k[6] - local wx = w - x - if rl < 0 then -- KE: don't use r2l here - if wx ~= 0 then - insert_node_before(head,n,newkern(wx)) -- type 0/2 - end - if x ~= 0 then - insert_node_after (head,n,newkern(x)) -- type 0/2 - end - else - if x ~= 0 then - insert_node_before(head,n,newkern(x)) -- type 0/2 - end - if wx ~= 0 then - insert_node_after (head,n,newkern(wx)) -- type 0/2 - end - end - elseif x ~= 0 then - -- this needs checking for rl < 0 but it is unlikely that a r2l script - -- uses kernclasses between glyphs so we're probably safe (KE has a - -- problematic font where marks interfere with rl < 0 in the previous - -- case) - insert_node_before(head,n,newkern(x)) -- a real font kern, type 0 - end - end - end - if next(cx) then - for n, k in next, cx do - if k ~= 0 then - local rln = rl[n] - if rln and rln < 0 then - insert_node_before(head,n,newkern(-k)) -- type 0/2 - else - insert_node_before(head,n,newkern(k)) -- type 0/2 - end - end - end - end - if not keep then - kerns = { } - end - -- if trace_injections then - -- show_result(head) - -- end - return head, true - elseif not keep then - kerns, cursives, marks = { }, { }, { } - end - elseif has_kerns then - if trace_injections then - trace(head) - end - for n in traverse_id(glyph_code,head) do - if n.subtype < 256 then - local k = n[a_kernpair] - if k then - local kk = kerns[k] - if kk then - local rl, x, y, w = kk[1], kk[2] or 0, kk[3], kk[4] - if y and y ~= 0 then - n.yoffset = y -- todo: h ? - end - if w then - -- copied from above - -- local r2l = kk[6] - local wx = w - x - if rl < 0 then -- KE: don't use r2l here - if wx ~= 0 then - insert_node_before(head,n,newkern(wx)) - end - if x ~= 0 then - insert_node_after (head,n,newkern(x)) - end - else - if x ~= 0 then - insert_node_before(head,n,newkern(x)) - end - if wx ~= 0 then - insert_node_after(head,n,newkern(wx)) - end - end - else - -- simple (e.g. kernclass kerns) - if x ~= 0 then - insert_node_before(head,n,newkern(x)) - end - end - end - end - end - end - if not keep then - kerns = { } - end - -- if trace_injections then - -- show_result(head) - -- end - return head, true - else - -- no tracing needed - end - return head, false -end diff --git a/src/fontloader/luaotfload-fonts-lua.lua b/src/fontloader/luaotfload-fonts-lua.lua deleted file mode 100644 index ec3fe38..0000000 --- a/src/fontloader/luaotfload-fonts-lua.lua +++ /dev/null @@ -1,33 +0,0 @@ -if not modules then modules = { } end modules ['luatex-fonts-lua'] = { - version = 1.001, - comment = "companion to luatex-*.tex", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end - -local fonts = fonts -fonts.formats.lua = "lua" - -function fonts.readers.lua(specification) - local fullname = specification.filename or "" - if fullname == "" then - local forced = specification.forced or "" - if forced ~= "" then - fullname = specification.name .. "." .. forced - else - fullname = specification.name - end - end - local fullname = resolvers.findfile(fullname) or "" - if fullname ~= "" then - local loader = loadfile(fullname) - loader = loader and loader() - return loader and loader(specification) - end -end diff --git a/src/fontloader/luaotfload-fonts-otn.lua b/src/fontloader/luaotfload-fonts-otn.lua deleted file mode 100644 index c57be5f..0000000 --- a/src/fontloader/luaotfload-fonts-otn.lua +++ /dev/null @@ -1,2848 +0,0 @@ -if not modules then modules = { } end modules ['font-otn'] = { - version = 1.001, - comment = "companion to font-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files", -} - --- preprocessors = { "nodes" } - --- this is still somewhat preliminary and it will get better in due time; --- much functionality could only be implemented thanks to the husayni font --- of Idris Samawi Hamid to who we dedicate this module. - --- in retrospect it always looks easy but believe it or not, it took a lot --- of work to get proper open type support done: buggy fonts, fuzzy specs, --- special made testfonts, many skype sessions between taco, idris and me, --- torture tests etc etc ... unfortunately the code does not show how much --- time it took ... - --- todo: --- --- kerning is probably not yet ok for latin around dics nodes (interesting challenge) --- extension infrastructure (for usage out of context) --- sorting features according to vendors/renderers --- alternative loop quitters --- check cursive and r2l --- find out where ignore-mark-classes went --- default features (per language, script) --- handle positions (we need example fonts) --- handle gpos_single (we might want an extra width field in glyph nodes because adding kerns might interfere) --- mark (to mark) code is still not what it should be (too messy but we need some more extreem husayni tests) --- remove some optimizations (when I have a faster machine) --- --- maybe redo the lot some way (more context specific) - ---[[ldx-- -

This module is a bit more split up that I'd like but since we also want to test -with plain it has to be so. This module is part of -and discussion about improvements and functionality mostly happens on the - mailing list.

- -

The specification of OpenType is kind of vague. Apart from a lack of a proper -free specifications there's also the problem that Microsoft and Adobe -may have their own interpretation of how and in what order to apply features. -In general the Microsoft website has more detailed specifications and is a -better reference. There is also some information in the FontForge help files.

- -

Because there is so much possible, fonts might contain bugs and/or be made to -work with certain rederers. These may evolve over time which may have the side -effect that suddenly fonts behave differently.

- -

After a lot of experiments (mostly by Taco, me and Idris) we're now at yet another -implementation. Of course all errors are mine and of course the code can be -improved. There are quite some optimizations going on here and processing speed -is currently acceptable. Not all functions are implemented yet, often because I -lack the fonts for testing. Many scripts are not yet supported either, but I will -look into them as soon as users ask for it.

- -

Because there are different interpretations possible, I will extend the code -with more (configureable) variants. I can also add hooks for users so that they can -write their own extensions.

- -

Glyphs are indexed not by unicode but in their own way. This is because there is no -relationship with unicode at all, apart from the fact that a font might cover certain -ranges of characters. One character can have multiple shapes. However, at the - end we use unicode so and all extra glyphs are mapped into a private -space. This is needed because we need to access them and has to include -then in the output eventually.

- -

The raw table as it coms from gets reorganized in to fit out needs. -In that table is packed (similar tables are shared) and cached on disk -so that successive runs can use the optimized table (after loading the table is -unpacked). The flattening code used later is a prelude to an even more compact table -format (and as such it keeps evolving).

- -

This module is sparsely documented because it is a moving target. The table format -of the reader changes and we experiment a lot with different methods for supporting -features.

- -

As with the code, we may decide to store more information in the - table.

- -

Incrementing the version number will force a re-cache. We jump the number by one -when there's a fix in the library or code that -results in different tables.

---ldx]]-- - --- action handler chainproc chainmore comment --- --- gsub_single ok ok ok --- gsub_multiple ok ok not implemented yet --- gsub_alternate ok ok not implemented yet --- gsub_ligature ok ok ok --- gsub_context ok -- --- gsub_contextchain ok -- --- gsub_reversecontextchain ok -- --- chainsub -- ok --- reversesub -- ok --- gpos_mark2base ok ok --- gpos_mark2ligature ok ok --- gpos_mark2mark ok ok --- gpos_cursive ok untested --- gpos_single ok ok --- gpos_pair ok ok --- gpos_context ok -- --- gpos_contextchain ok -- --- --- todo: contextpos and contextsub and class stuff --- --- actions: --- --- handler : actions triggered by lookup --- chainproc : actions triggered by contextual lookup --- chainmore : multiple substitutions triggered by contextual lookup (e.g. fij -> f + ij) --- --- remark: the 'not implemented yet' variants will be done when we have fonts that use them --- remark: we need to check what to do with discretionaries - --- We used to have independent hashes for lookups but as the tags are unique --- we now use only one hash. If needed we can have multiple again but in that --- case I will probably prefix (i.e. rename) the lookups in the cached font file. - --- Todo: make plugin feature that operates on char/glyphnode arrays - -local concat, insert, remove = table.concat, table.insert, table.remove -local gmatch, gsub, find, match, lower, strip = string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip -local type, next, tonumber, tostring = type, next, tonumber, tostring -local lpegmatch = lpeg.match -local random = math.random -local formatters = string.formatters - -local logs, trackers, nodes, attributes = logs, trackers, nodes, attributes - -local registertracker = trackers.register - -local fonts = fonts -local otf = fonts.handlers.otf - -local trace_lookups = false registertracker("otf.lookups", function(v) trace_lookups = v end) -local trace_singles = false registertracker("otf.singles", function(v) trace_singles = v end) -local trace_multiples = false registertracker("otf.multiples", function(v) trace_multiples = v end) -local trace_alternatives = false registertracker("otf.alternatives", function(v) trace_alternatives = v end) -local trace_ligatures = false registertracker("otf.ligatures", function(v) trace_ligatures = v end) -local trace_contexts = false registertracker("otf.contexts", function(v) trace_contexts = v end) -local trace_marks = false registertracker("otf.marks", function(v) trace_marks = v end) -local trace_kerns = false registertracker("otf.kerns", function(v) trace_kerns = v end) -local trace_cursive = false registertracker("otf.cursive", function(v) trace_cursive = v end) -local trace_preparing = false registertracker("otf.preparing", function(v) trace_preparing = v end) -local trace_bugs = false registertracker("otf.bugs", function(v) trace_bugs = v end) -local trace_details = false registertracker("otf.details", function(v) trace_details = v end) -local trace_applied = false registertracker("otf.applied", function(v) trace_applied = v end) -local trace_steps = false registertracker("otf.steps", function(v) trace_steps = v end) -local trace_skips = false registertracker("otf.skips", function(v) trace_skips = v end) -local trace_directions = false registertracker("otf.directions", function(v) trace_directions = v end) - -local report_direct = logs.reporter("fonts","otf direct") -local report_subchain = logs.reporter("fonts","otf subchain") -local report_chain = logs.reporter("fonts","otf chain") -local report_process = logs.reporter("fonts","otf process") -local report_prepare = logs.reporter("fonts","otf prepare") -local report_warning = logs.reporter("fonts","otf warning") - -registertracker("otf.verbose_chain", function(v) otf.setcontextchain(v and "verbose") end) -registertracker("otf.normal_chain", function(v) otf.setcontextchain(v and "normal") end) - -registertracker("otf.replacements", "otf.singles,otf.multiples,otf.alternatives,otf.ligatures") -registertracker("otf.positions","otf.marks,otf.kerns,otf.cursive") -registertracker("otf.actions","otf.replacements,otf.positions") -registertracker("otf.injections","nodes.injections") - -registertracker("*otf.sample","otf.steps,otf.actions,otf.analyzing") - -local insert_node_after = node.insert_after -local delete_node = nodes.delete -local copy_node = node.copy -local find_node_tail = node.tail or node.slide -local flush_node_list = node.flush_list -local end_of_math = node.end_of_math - -local setmetatableindex = table.setmetatableindex - -local zwnj = 0x200C -local zwj = 0x200D -local wildcard = "*" -local default = "dflt" - -local nodecodes = nodes.nodecodes -local whatcodes = nodes.whatcodes -local glyphcodes = nodes.glyphcodes -local disccodes = nodes.disccodes - -local glyph_code = nodecodes.glyph -local glue_code = nodecodes.glue -local disc_code = nodecodes.disc -local whatsit_code = nodecodes.whatsit -local math_code = nodecodes.math - -local dir_code = whatcodes.dir -local localpar_code = whatcodes.localpar - -local discretionary_code = disccodes.discretionary - -local ligature_code = glyphcodes.ligature - -local privateattribute = attributes.private - --- Something is messed up: we have two mark / ligature indices, one at the injection --- end and one here ... this is bases in KE's patches but there is something fishy --- there as I'm pretty sure that for husayni we need some connection (as it's much --- more complex than an average font) but I need proper examples of all cases, not --- of only some. - -local a_state = privateattribute('state') -local a_markbase = privateattribute('markbase') -local a_markmark = privateattribute('markmark') -local a_markdone = privateattribute('markdone') -- assigned at the injection end -local a_cursbase = privateattribute('cursbase') -local a_curscurs = privateattribute('curscurs') -local a_cursdone = privateattribute('cursdone') -local a_kernpair = privateattribute('kernpair') -local a_ligacomp = privateattribute('ligacomp') -- assigned here (ideally it should be combined) - -local injections = nodes.injections -local setmark = injections.setmark -local setcursive = injections.setcursive -local setkern = injections.setkern -local setpair = injections.setpair - -local markonce = true -local cursonce = true -local kernonce = true - -local fonthashes = fonts.hashes -local fontdata = fonthashes.identifiers - -local otffeatures = fonts.constructors.newfeatures("otf") -local registerotffeature = otffeatures.register - -local onetimemessage = fonts.loggers.onetimemessage or function() end - -otf.defaultnodealternate = "none" -- first last - --- we share some vars here, after all, we have no nested lookups and less code - -local tfmdata = false -local characters = false -local descriptions = false -local resources = false -local marks = false -local currentfont = false -local lookuptable = false -local anchorlookups = false -local lookuptypes = false -local handlers = { } -local rlmode = 0 -local featurevalue = false - --- head is always a whatsit so we can safely assume that head is not changed - --- we use this for special testing and documentation - -local checkstep = (nodes and nodes.tracers and nodes.tracers.steppers.check) or function() end -local registerstep = (nodes and nodes.tracers and nodes.tracers.steppers.register) or function() end -local registermessage = (nodes and nodes.tracers and nodes.tracers.steppers.message) or function() end - -local function logprocess(...) - if trace_steps then - registermessage(...) - end - report_direct(...) -end - -local function logwarning(...) - report_direct(...) -end - -local f_unicode = formatters["%U"] -local f_uniname = formatters["%U (%s)"] -local f_unilist = formatters["% t (% t)"] - -local function gref(n) -- currently the same as in font-otb - if type(n) == "number" then - local description = descriptions[n] - local name = description and description.name - if name then - return f_uniname(n,name) - else - return f_unicode(n) - end - elseif n then - local num, nam = { }, { } - for i=1,#n do - local ni = n[i] - if tonumber(ni) then -- later we will start at 2 - local di = descriptions[ni] - num[i] = f_unicode(ni) - nam[i] = di and di.name or "-" - end - end - return f_unilist(num,nam) - else - return "" - end -end - -local function cref(kind,chainname,chainlookupname,lookupname,index) -- not in the mood to alias f_ - if index then - return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookupname,index) - elseif lookupname then - return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookupname) - elseif chainlookupname then - return formatters["feature %a, chain %a, sub %a"](kind,chainname,chainlookupname) - elseif chainname then - return formatters["feature %a, chain %a"](kind,chainname) - else - return formatters["feature %a"](kind) - end -end - -local function pref(kind,lookupname) - return formatters["feature %a, lookup %a"](kind,lookupname) -end - --- We can assume that languages that use marks are not hyphenated. We can also assume --- that at most one discretionary is present. - --- We do need components in funny kerning mode but maybe I can better reconstruct then --- as we do have the font components info available; removing components makes the --- previous code much simpler. Also, later on copying and freeing becomes easier. --- However, for arabic we need to keep them around for the sake of mark placement --- and indices. - -local function copy_glyph(g) -- next and prev are untouched ! - local components = g.components - if components then - g.components = nil - local n = copy_node(g) - g.components = components - return n - else - return copy_node(g) - end -end - --- start is a mark and we need to keep that one - -local function markstoligature(kind,lookupname,head,start,stop,char) - if start == stop and start.char == char then - return head, start - else - local prev = start.prev - local next = stop.next - start.prev = nil - stop.next = nil - local base = copy_glyph(start) - if head == start then - head = base - end - base.char = char - base.subtype = ligature_code - base.components = start - if prev then - prev.next = base - end - if next then - next.prev = base - end - base.next = next - base.prev = prev - return head, base - end -end - --- The next code is somewhat complicated by the fact that some fonts can have ligatures made --- from ligatures that themselves have marks. This was identified by Kai in for instance --- arabtype: KAF LAM SHADDA ALEF FATHA (0x0643 0x0644 0x0651 0x0627 0x064E). This becomes --- KAF LAM-ALEF with a SHADDA on the first and a FATHA op de second component. In a next --- iteration this becomes a KAF-LAM-ALEF with a SHADDA on the second and a FATHA on the --- third component. - -local function getcomponentindex(start) - if start.id ~= glyph_code then - return 0 - elseif start.subtype == ligature_code then - local i = 0 - local components = start.components - while components do - i = i + getcomponentindex(components) - components = components.next - end - return i - elseif not marks[start.char] then - return 1 - else - return 0 - end -end - --- eventually we will do positioning in an other way (needs addional w/h/d fields) - -local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound) -- brr head - if start == stop and start.char == char then - start.char = char - return head, start - end - local prev = start.prev - local next = stop.next - start.prev = nil - stop.next = nil - local base = copy_glyph(start) - if start == head then - head = base - end - base.char = char - base.subtype = ligature_code - base.components = start -- start can have components - if prev then - prev.next = base - end - if next then - next.prev = base - end - base.next = next - base.prev = prev - if not discfound then - local deletemarks = markflag ~= "mark" - local components = start - local baseindex = 0 - local componentindex = 0 - local head = base - local current = base - -- first we loop over the glyphs in start .. stop - while start do - local char = start.char - if not marks[char] then - baseindex = baseindex + componentindex - componentindex = getcomponentindex(start) - elseif not deletemarks then -- quite fishy - start[a_ligacomp] = baseindex + (start[a_ligacomp] or componentindex) - if trace_marks then - logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp]) - end - head, current = insert_node_after(head,current,copy_node(start)) -- unlikely that mark has components - elseif trace_marks then - logwarning("%s: delete mark %s",pref(kind,lookupname),gref(char)) - end - start = start.next - end - -- we can have one accent as part of a lookup and another following - -- local start = components -- was wrong (component scanning was introduced when more complex ligs in devanagari was added) - local start = current.next - while start and start.id == glyph_code do - local char = start.char - if marks[char] then - start[a_ligacomp] = baseindex + (start[a_ligacomp] or componentindex) - if trace_marks then - logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp]) - end - else - break - end - start = start.next - end - end - return head, base -end - -function handlers.gsub_single(head,start,kind,lookupname,replacement) - if trace_singles then - logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(start.char),gref(replacement)) - end - start.char = replacement - return head, start, true -end - -local function get_alternative_glyph(start,alternatives,value,trace_alternatives) - local n = #alternatives - if value == "random" then - local r = random(1,n) - return alternatives[r], trace_alternatives and formatters["value %a, taking %a"](value,r) - elseif value == "first" then - return alternatives[1], trace_alternatives and formatters["value %a, taking %a"](value,1) - elseif value == "last" then - return alternatives[n], trace_alternatives and formatters["value %a, taking %a"](value,n) - else - value = tonumber(value) - if type(value) ~= "number" then - return alternatives[1], trace_alternatives and formatters["invalid value %s, taking %a"](value,1) - elseif value > n then - local defaultalt = otf.defaultnodealternate - if defaultalt == "first" then - return alternatives[n], trace_alternatives and formatters["invalid value %s, taking %a"](value,1) - elseif defaultalt == "last" then - return alternatives[1], trace_alternatives and formatters["invalid value %s, taking %a"](value,n) - else - return false, trace_alternatives and formatters["invalid value %a, %s"](value,"out of range") - end - elseif value == 0 then - return start.char, trace_alternatives and formatters["invalid value %a, %s"](value,"no change") - elseif value < 1 then - return alternatives[1], trace_alternatives and formatters["invalid value %a, taking %a"](value,1) - else - return alternatives[value], trace_alternatives and formatters["value %a, taking %a"](value,value) - end - end -end - -local function multiple_glyphs(head,start,multiple,ignoremarks) - local nofmultiples = #multiple - if nofmultiples > 0 then - start.char = multiple[1] - if nofmultiples > 1 then - local sn = start.next - for k=2,nofmultiples do -- todo: use insert_node --- untested: --- --- while ignoremarks and marks[sn.char] then --- local sn = sn.next --- end - local n = copy_node(start) -- ignore components - n.char = multiple[k] - n.next = sn - n.prev = start - if sn then - sn.prev = n - end - start.next = n - start = n - end - end - return head, start, true - else - if trace_multiples then - logprocess("no multiple for %s",gref(start.char)) - end - return head, start, false - end -end - -function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence) - local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue - local choice, comment = get_alternative_glyph(start,alternative,value,trace_alternatives) - if choice then - if trace_alternatives then - logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(start.char),choice,gref(choice),comment) - end - start.char = choice - else - if trace_alternatives then - logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(start.char),comment) - end - end - return head, start, true -end - -function handlers.gsub_multiple(head,start,kind,lookupname,multiple,sequence) - if trace_multiples then - logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(start.char),gref(multiple)) - end - return multiple_glyphs(head,start,multiple,sequence.flags[1]) -end - -function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence) - local s, stop, discfound = start.next, nil, false - local startchar = start.char - if marks[startchar] then - while s do - local id = s.id - if id == glyph_code and s.font == currentfont and s.subtype<256 then - local lg = ligature[s.char] - if lg then - stop = s - ligature = lg - s = s.next - else - break - end - else - break - end - end - if stop then - local lig = ligature.ligature - if lig then - if trace_ligatures then - local stopchar = stop.char - head, start = markstoligature(kind,lookupname,head,start,stop,lig) - logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char)) - else - head, start = markstoligature(kind,lookupname,head,start,stop,lig) - end - return head, start, true - else - -- ok, goto next lookup - end - end - else - local skipmark = sequence.flags[1] - while s do - local id = s.id - if id == glyph_code and s.subtype<256 then - if s.font == currentfont then - local char = s.char - if skipmark and marks[char] then - s = s.next - else - local lg = ligature[char] - if lg then - stop = s - ligature = lg - s = s.next - else - break - end - end - else - break - end - elseif id == disc_code then - discfound = true - s = s.next - else - break - end - end - local lig = ligature.ligature - if lig then - if stop then - if trace_ligatures then - local stopchar = stop.char - head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound) - logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char)) - else - head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound) - end - return head, start, true - else - -- weird but happens (in some arabic font) - start.char = lig - if trace_ligatures then - logprocess("%s: replacing %s by (no real) ligature %s case 3",pref(kind,lookupname),gref(startchar),gref(lig)) - end - return head, start, true - end - else - -- weird but happens - end - end - return head, start, false -end - ---[[ldx-- -

We get hits on a mark, but we're not sure if the it has to be applied so -we need to explicitly test for basechar, baselig and basemark entries.

---ldx]]-- - -function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence) - local markchar = start.char - if marks[markchar] then - local base = start.prev -- [glyph] [start=mark] - if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then - local basechar = base.char - if marks[basechar] then - while true do - base = base.prev - if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then - basechar = base.char - if not marks[basechar] then - break - end - else - if trace_bugs then - logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar)) - end - return head, start, false - end - end - end - local baseanchors = descriptions[basechar] - if baseanchors then - baseanchors = baseanchors.anchors - end - if baseanchors then - local baseanchors = baseanchors['basechar'] - if baseanchors then - local al = anchorlookups[lookupname] - for anchor,ba in next, baseanchors do - if al[anchor] then - local ma = markanchors[anchor] - if ma then - local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) - if trace_marks then - logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)", - pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) - end - return head, start, true - end - end - end - if trace_bugs then - logwarning("%s, no matching anchors for mark %s and base %s",pref(kind,lookupname),gref(markchar),gref(basechar)) - end - end - elseif trace_bugs then - -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar)) - onetimemessage(currentfont,basechar,"no base anchors",report_fonts) - end - elseif trace_bugs then - logwarning("%s: prev node is no char",pref(kind,lookupname)) - end - elseif trace_bugs then - logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar)) - end - return head, start, false -end - -function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence) - -- check chainpos variant - local markchar = start.char - if marks[markchar] then - local base = start.prev -- [glyph] [optional marks] [start=mark] - if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then - local basechar = base.char - if marks[basechar] then - while true do - base = base.prev - if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then - basechar = base.char - if not marks[basechar] then - break - end - else - if trace_bugs then - logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar)) - end - return head, start, false - end - end - end - local index = start[a_ligacomp] - local baseanchors = descriptions[basechar] - if baseanchors then - baseanchors = baseanchors.anchors - if baseanchors then - local baseanchors = baseanchors['baselig'] - if baseanchors then - local al = anchorlookups[lookupname] - for anchor, ba in next, baseanchors do - if al[anchor] then - local ma = markanchors[anchor] - if ma then - ba = ba[index] - if ba then - local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) -- index - if trace_marks then - logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)", - pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy) - end - return head, start, true - else - if trace_bugs then - logwarning("%s: no matching anchors for mark %s and baselig %s with index %a",pref(kind,lookupname),gref(markchar),gref(basechar),index) - end - end - end - end - end - if trace_bugs then - logwarning("%s: no matching anchors for mark %s and baselig %s",pref(kind,lookupname),gref(markchar),gref(basechar)) - end - end - end - elseif trace_bugs then - -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar)) - onetimemessage(currentfont,basechar,"no base anchors",report_fonts) - end - elseif trace_bugs then - logwarning("%s: prev node is no char",pref(kind,lookupname)) - end - elseif trace_bugs then - logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar)) - end - return head, start, false -end - -function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence) - local markchar = start.char - if marks[markchar] then - local base = start.prev -- [glyph] [basemark] [start=mark] - local slc = start[a_ligacomp] - if slc then -- a rather messy loop ... needs checking with husayni - while base do - local blc = base[a_ligacomp] - if blc and blc ~= slc then - base = base.prev - else - break - end - end - end - if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then -- subtype test can go - local basechar = base.char - local baseanchors = descriptions[basechar] - if baseanchors then - baseanchors = baseanchors.anchors - if baseanchors then - baseanchors = baseanchors['basemark'] - if baseanchors then - local al = anchorlookups[lookupname] - for anchor,ba in next, baseanchors do - if al[anchor] then - local ma = markanchors[anchor] - if ma then - local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true) - if trace_marks then - logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)", - pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) - end - return head, start, true - end - end - end - if trace_bugs then - logwarning("%s: no matching anchors for mark %s and basemark %s",pref(kind,lookupname),gref(markchar),gref(basechar)) - end - end - end - elseif trace_bugs then - -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar)) - onetimemessage(currentfont,basechar,"no base anchors",report_fonts) - end - elseif trace_bugs then - logwarning("%s: prev node is no mark",pref(kind,lookupname)) - end - elseif trace_bugs then - logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar)) - end - return head, start, false -end - -function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence) -- to be checked - local alreadydone = cursonce and start[a_cursbase] - if not alreadydone then - local done = false - local startchar = start.char - if marks[startchar] then - if trace_cursive then - logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar)) - end - else - local nxt = start.next - while not done and nxt and nxt.id == glyph_code and nxt.font == currentfont and nxt.subtype<256 do - local nextchar = nxt.char - if marks[nextchar] then - -- should not happen (maybe warning) - nxt = nxt.next - else - local entryanchors = descriptions[nextchar] - if entryanchors then - entryanchors = entryanchors.anchors - if entryanchors then - entryanchors = entryanchors['centry'] - if entryanchors then - local al = anchorlookups[lookupname] - for anchor, entry in next, entryanchors do - if al[anchor] then - local exit = exitanchors[anchor] - if exit then - local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar]) - if trace_cursive then - logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode) - end - done = true - break - end - end - end - end - end - elseif trace_bugs then - -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar)) - onetimemessage(currentfont,startchar,"no entry anchors",report_fonts) - end - break - end - end - end - return head, start, done - else - if trace_cursive and trace_details then - logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone) - end - return head, start, false - end -end - -function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence) - local startchar = start.char - local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar]) - if trace_kerns then - logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h) - end - return head, start, false -end - -function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence) - -- todo: kerns in disc nodes: pre, post, replace -> loop over disc too - -- todo: kerns in components of ligatures - local snext = start.next - if not snext then - return head, start, false - else - local prev, done = start, false - local factor = tfmdata.parameters.factor - local lookuptype = lookuptypes[lookupname] - while snext and snext.id == glyph_code and snext.font == currentfont and snext.subtype<256 do - local nextchar = snext.char - local krn = kerns[nextchar] - if not krn and marks[nextchar] then - prev = snext - snext = snext.next - else - if not krn then - -- skip - elseif type(krn) == "table" then - if lookuptype == "pair" then -- probably not needed - local a, b = krn[2], krn[3] - if a and #a > 0 then - local startchar = start.char - local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar]) - if trace_kerns then - logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h) - end - end - if b and #b > 0 then - local startchar = start.char - local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar]) - if trace_kerns then - logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h) - end - end - else -- wrong ... position has different entries - report_process("%s: check this out (old kern stuff)",pref(kind,lookupname)) - -- local a, b = krn[2], krn[6] - -- if a and a ~= 0 then - -- local k = setkern(snext,factor,rlmode,a) - -- if trace_kerns then - -- logprocess("%s: inserting first kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar)) - -- end - -- end - -- if b and b ~= 0 then - -- logwarning("%s: ignoring second kern xoff %s",pref(kind,lookupname),b*factor) - -- end - end - done = true - elseif krn ~= 0 then - local k = setkern(snext,factor,rlmode,krn) - if trace_kerns then - logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar)) - end - done = true - end - break - end - end - return head, start, done - end -end - ---[[ldx-- -

I will implement multiple chain replacements once I run into a font that uses -it. It's not that complex to handle.

---ldx]]-- - -local chainmores = { } -local chainprocs = { } - -local function logprocess(...) - if trace_steps then - registermessage(...) - end - report_subchain(...) -end - -local logwarning = report_subchain - -local function logprocess(...) - if trace_steps then - registermessage(...) - end - report_chain(...) -end - -local logwarning = report_chain - --- We could share functions but that would lead to extra function calls with many --- arguments, redundant tests and confusing messages. - -function chainprocs.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname) - logwarning("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname)) - return head, start, false -end - -function chainmores.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname,n) - logprocess("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname)) - return head, start, false -end - --- The reversesub is a special case, which is why we need to store the replacements --- in a bit weird way. There is no lookup and the replacement comes from the lookup --- itself. It is meant mostly for dealing with Urdu. - -function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements) - local char = start.char - local replacement = replacements[char] - if replacement then - if trace_singles then - logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement)) - end - start.char = replacement - return head, start, true - else - return head, start, false - end -end - ---[[ldx-- -

This chain stuff is somewhat tricky since we can have a sequence of actions to be -applied: single, alternate, multiple or ligature where ligature can be an invalid -one in the sense that it will replace multiple by one but not neccessary one that -looks like the combination (i.e. it is the counterpart of multiple then). For -example, the following is valid:

- - -xxxabcdexxx [single a->A][multiple b->BCD][ligature cde->E] xxxABCDExxx - - -

Therefore we we don't really do the replacement here already unless we have the -single lookup case. The efficiency of the replacements can be improved by deleting -as less as needed but that would also make the code even more messy.

---ldx]]-- - --- local function delete_till_stop(head,start,stop,ignoremarks) -- keeps start --- local n = 1 --- if start == stop then --- -- done --- elseif ignoremarks then --- repeat -- start x x m x x stop => start m --- local next = start.next --- if not marks[next.char] then --- local components = next.components --- if components then -- probably not needed --- flush_node_list(components) --- end --- head = delete_node(head,next) --- end --- n = n + 1 --- until next == stop --- else -- start x x x stop => start --- repeat --- local next = start.next --- local components = next.components --- if components then -- probably not needed --- flush_node_list(components) --- end --- head = delete_node(head,next) --- n = n + 1 --- until next == stop --- end --- return head, n --- end - ---[[ldx-- -

Here we replace start by a single variant, First we delete the rest of the -match.

---ldx]]-- - -function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex) - -- todo: marks ? - local current = start - local subtables = currentlookup.subtables - if #subtables > 1 then - logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," ")) - end - while current do - if current.id == glyph_code then - local currentchar = current.char - local lookupname = subtables[1] -- only 1 - local replacement = lookuphash[lookupname] - if not replacement then - if trace_bugs then - logwarning("%s: no single hits",cref(kind,chainname,chainlookupname,lookupname,chainindex)) - end - else - replacement = replacement[currentchar] - if not replacement or replacement == "" then - if trace_bugs then - logwarning("%s: no single for %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar)) - end - else - if trace_singles then - logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement)) - end - current.char = replacement - end - end - return head, start, true - elseif current == stop then - break - else - current = current.next - end - end - return head, start, false -end - -chainmores.gsub_single = chainprocs.gsub_single - ---[[ldx-- -

Here we replace start by a sequence of new glyphs. First we delete the rest of -the match.

---ldx]]-- - -function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - -- local head, n = delete_till_stop(head,start,stop) - local startchar = start.char - local subtables = currentlookup.subtables - local lookupname = subtables[1] - local replacements = lookuphash[lookupname] - if not replacements then - if trace_bugs then - logwarning("%s: no multiple hits",cref(kind,chainname,chainlookupname,lookupname)) - end - else - replacements = replacements[startchar] - if not replacements or replacement == "" then - if trace_bugs then - logwarning("%s: no multiple for %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar)) - end - else - if trace_multiples then - logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements)) - end - return multiple_glyphs(head,start,replacements,currentlookup.flags[1]) - end - end - return head, start, false -end - -chainmores.gsub_multiple = chainprocs.gsub_multiple - ---[[ldx-- -

Here we replace start by new glyph. First we delete the rest of the match.

---ldx]]-- - --- char_1 mark_1 -> char_x mark_1 (ignore marks) --- char_1 mark_1 -> char_x - --- to be checked: do we always have just one glyph? --- we can also have alternates for marks --- marks come last anyway --- are there cases where we need to delete the mark - -function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - local current = start - local subtables = currentlookup.subtables - local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue - while current do - if current.id == glyph_code then -- is this check needed? - local currentchar = current.char - local lookupname = subtables[1] - local alternatives = lookuphash[lookupname] - if not alternatives then - if trace_bugs then - logwarning("%s: no alternative hit",cref(kind,chainname,chainlookupname,lookupname)) - end - else - alternatives = alternatives[currentchar] - if alternatives then - local choice, comment = get_alternative_glyph(current,alternatives,value,trace_alternatives) - if choice then - if trace_alternatives then - logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment) - end - start.char = choice - else - if trace_alternatives then - logwarning("%s: no variant %a for %s, %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(char),comment) - end - end - elseif trace_bugs then - logwarning("%s: no alternative for %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(currentchar),comment) - end - end - return head, start, true - elseif current == stop then - break - else - current = current.next - end - end - return head, start, false -end - -chainmores.gsub_alternate = chainprocs.gsub_alternate - ---[[ldx-- -

When we replace ligatures we use a helper that handles the marks. I might change -this function (move code inline and handle the marks by a separate function). We -assume rather stupid ligatures (no complex disc nodes).

---ldx]]-- - -function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex) - local startchar = start.char - local subtables = currentlookup.subtables - local lookupname = subtables[1] - local ligatures = lookuphash[lookupname] - if not ligatures then - if trace_bugs then - logwarning("%s: no ligature hits",cref(kind,chainname,chainlookupname,lookupname,chainindex)) - end - else - ligatures = ligatures[startchar] - if not ligatures then - if trace_bugs then - logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar)) - end - else - local s = start.next - local discfound = false - local last = stop - local nofreplacements = 0 - local skipmark = currentlookup.flags[1] - while s do - local id = s.id - if id == disc_code then - s = s.next - discfound = true - else - local schar = s.char - if skipmark and marks[schar] then -- marks - s = s.next - else - local lg = ligatures[schar] - if lg then - ligatures, last, nofreplacements = lg, s, nofreplacements + 1 - if s == stop then - break - else - s = s.next - end - else - break - end - end - end - end - local l2 = ligatures.ligature - if l2 then - if chainindex then - stop = last - end - if trace_ligatures then - if start == stop then - logprocess("%s: replacing character %s by ligature %s case 3",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2)) - else - logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char),gref(l2)) - end - end - head, start = toligature(kind,lookupname,head,start,stop,l2,currentlookup.flags[1],discfound) - return head, start, true, nofreplacements - elseif trace_bugs then - if start == stop then - logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar)) - else - logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char)) - end - end - end - end - return head, start, false, 0 -end - -chainmores.gsub_ligature = chainprocs.gsub_ligature - -function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - local markchar = start.char - if marks[markchar] then - local subtables = currentlookup.subtables - local lookupname = subtables[1] - local markanchors = lookuphash[lookupname] - if markanchors then - markanchors = markanchors[markchar] - end - if markanchors then - local base = start.prev -- [glyph] [start=mark] - if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then - local basechar = base.char - if marks[basechar] then - while true do - base = base.prev - if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then - basechar = base.char - if not marks[basechar] then - break - end - else - if trace_bugs then - logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar)) - end - return head, start, false - end - end - end - local baseanchors = descriptions[basechar].anchors - if baseanchors then - local baseanchors = baseanchors['basechar'] - if baseanchors then - local al = anchorlookups[lookupname] - for anchor,ba in next, baseanchors do - if al[anchor] then - local ma = markanchors[anchor] - if ma then - local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) - if trace_marks then - logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)", - cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) - end - return head, start, true - end - end - end - if trace_bugs then - logwarning("%s, no matching anchors for mark %s and base %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar)) - end - end - end - elseif trace_bugs then - logwarning("%s: prev node is no char",cref(kind,chainname,chainlookupname,lookupname)) - end - elseif trace_bugs then - logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar)) - end - elseif trace_bugs then - logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar)) - end - return head, start, false -end - -function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - local markchar = start.char - if marks[markchar] then - local subtables = currentlookup.subtables - local lookupname = subtables[1] - local markanchors = lookuphash[lookupname] - if markanchors then - markanchors = markanchors[markchar] - end - if markanchors then - local base = start.prev -- [glyph] [optional marks] [start=mark] - if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then - local basechar = base.char - if marks[basechar] then - while true do - base = base.prev - if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then - basechar = base.char - if not marks[basechar] then - break - end - else - if trace_bugs then - logwarning("%s: no base for mark %s",cref(kind,chainname,chainlookupname,lookupname),markchar) - end - return head, start, false - end - end - end - -- todo: like marks a ligatures hash - local index = start[a_ligacomp] - local baseanchors = descriptions[basechar].anchors - if baseanchors then - local baseanchors = baseanchors['baselig'] - if baseanchors then - local al = anchorlookups[lookupname] - for anchor,ba in next, baseanchors do - if al[anchor] then - local ma = markanchors[anchor] - if ma then - ba = ba[index] - if ba then - local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) -- index - if trace_marks then - logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)", - cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy) - end - return head, start, true - end - end - end - end - if trace_bugs then - logwarning("%s: no matching anchors for mark %s and baselig %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar)) - end - end - end - elseif trace_bugs then - logwarning("feature %s, lookup %s: prev node is no char",kind,lookupname) - end - elseif trace_bugs then - logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar)) - end - elseif trace_bugs then - logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar)) - end - return head, start, false -end - -function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - local markchar = start.char - if marks[markchar] then - -- local alreadydone = markonce and start[a_markmark] - -- if not alreadydone then - -- local markanchors = descriptions[markchar].anchors markanchors = markanchors and markanchors.mark - local subtables = currentlookup.subtables - local lookupname = subtables[1] - local markanchors = lookuphash[lookupname] - if markanchors then - markanchors = markanchors[markchar] - end - if markanchors then - local base = start.prev -- [glyph] [basemark] [start=mark] - local slc = start[a_ligacomp] - if slc then -- a rather messy loop ... needs checking with husayni - while base do - local blc = base[a_ligacomp] - if blc and blc ~= slc then - base = base.prev - else - break - end - end - end - if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then -- subtype test can go - local basechar = base.char - local baseanchors = descriptions[basechar].anchors - if baseanchors then - baseanchors = baseanchors['basemark'] - if baseanchors then - local al = anchorlookups[lookupname] - for anchor,ba in next, baseanchors do - if al[anchor] then - local ma = markanchors[anchor] - if ma then - local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true) - if trace_marks then - logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)", - cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) - end - return head, start, true - end - end - end - if trace_bugs then - logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar)) - end - end - end - elseif trace_bugs then - logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname)) - end - elseif trace_bugs then - logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar)) - end - -- elseif trace_marks and trace_details then - -- logprocess("%s, mark %s is already bound (n=%s), ignoring mark2mark",pref(kind,lookupname),gref(markchar),alreadydone) - -- end - elseif trace_bugs then - logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar)) - end - return head, start, false -end - -function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - local alreadydone = cursonce and start[a_cursbase] - if not alreadydone then - local startchar = start.char - local subtables = currentlookup.subtables - local lookupname = subtables[1] - local exitanchors = lookuphash[lookupname] - if exitanchors then - exitanchors = exitanchors[startchar] - end - if exitanchors then - local done = false - if marks[startchar] then - if trace_cursive then - logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar)) - end - else - local nxt = start.next - while not done and nxt and nxt.id == glyph_code and nxt.font == currentfont and nxt.subtype<256 do - local nextchar = nxt.char - if marks[nextchar] then - -- should not happen (maybe warning) - nxt = nxt.next - else - local entryanchors = descriptions[nextchar] - if entryanchors then - entryanchors = entryanchors.anchors - if entryanchors then - entryanchors = entryanchors['centry'] - if entryanchors then - local al = anchorlookups[lookupname] - for anchor, entry in next, entryanchors do - if al[anchor] then - local exit = exitanchors[anchor] - if exit then - local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar]) - if trace_cursive then - logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode) - end - done = true - break - end - end - end - end - end - elseif trace_bugs then - -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar)) - onetimemessage(currentfont,startchar,"no entry anchors",report_fonts) - end - break - end - end - end - return head, start, done - else - if trace_cursive and trace_details then - logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone) - end - return head, start, false - end - end - return head, start, false -end - -function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence) - -- untested .. needs checking for the new model - local startchar = start.char - local subtables = currentlookup.subtables - local lookupname = subtables[1] - local kerns = lookuphash[lookupname] - if kerns then - kerns = kerns[startchar] -- needed ? - if kerns then - local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar]) - if trace_kerns then - logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h) - end - end - end - return head, start, false -end - -chainmores.gpos_single = chainprocs.gpos_single -- okay? - --- when machines become faster i will make a shared function - -function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence) - local snext = start.next - if snext then - local startchar = start.char - local subtables = currentlookup.subtables - local lookupname = subtables[1] - local kerns = lookuphash[lookupname] - if kerns then - kerns = kerns[startchar] - if kerns then - local lookuptype = lookuptypes[lookupname] - local prev, done = start, false - local factor = tfmdata.parameters.factor - while snext and snext.id == glyph_code and snext.font == currentfont and snext.subtype<256 do - local nextchar = snext.char - local krn = kerns[nextchar] - if not krn and marks[nextchar] then - prev = snext - snext = snext.next - else - if not krn then - -- skip - elseif type(krn) == "table" then - if lookuptype == "pair" then - local a, b = krn[2], krn[3] - if a and #a > 0 then - local startchar = start.char - local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar]) - if trace_kerns then - logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h) - end - end - if b and #b > 0 then - local startchar = start.char - local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar]) - if trace_kerns then - logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h) - end - end - else - report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname)) - local a, b = krn[2], krn[6] - if a and a ~= 0 then - local k = setkern(snext,factor,rlmode,a) - if trace_kerns then - logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar)) - end - end - if b and b ~= 0 then - logwarning("%s: ignoring second kern xoff %s",cref(kind,chainname,chainlookupname),b*factor) - end - end - done = true - elseif krn ~= 0 then - local k = setkern(snext,factor,rlmode,krn) - if trace_kerns then - logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar)) - end - done = true - end - break - end - end - return head, start, done - end - end - end - return head, start, false -end - -chainmores.gpos_pair = chainprocs.gpos_pair -- okay? - --- what pointer to return, spec says stop --- to be discussed ... is bidi changer a space? --- elseif char == zwnj and sequence[n][32] then -- brrr - --- somehow l or f is global --- we don't need to pass the currentcontext, saves a bit --- make a slow variant then can be activated but with more tracing - -local function show_skip(kind,chainname,char,ck,class) - if ck[9] then - logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a, %a => %a",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10]) - else - logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a",cref(kind,chainname),gref(char),class,ck[1],ck[2]) - end -end - -local function normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash) - -- local rule, lookuptype, sequence, f, l, lookups = ck[1], ck[2] ,ck[3], ck[4], ck[5], ck[6] - local flags = sequence.flags - local done = false - local skipmark = flags[1] - local skipligature = flags[2] - local skipbase = flags[3] - local someskip = skipmark or skipligature or skipbase -- could be stored in flags for a fast test (hm, flags could be false !) - local markclass = sequence.markclass -- todo, first we need a proper test - local skipped = false - for k=1,#contexts do - local match = true - local current = start - local last = start - local ck = contexts[k] - local seq = ck[3] - local s = #seq - -- f..l = mid string - if s == 1 then - -- never happens - match = current.id == glyph_code and current.font == currentfont and current.subtype<256 and seq[1][current.char] - else - -- maybe we need a better space check (maybe check for glue or category or combination) - -- we cannot optimize for n=2 because there can be disc nodes - local f, l = ck[4], ck[5] - -- current match - if f == 1 and f == l then -- current only - -- already a hit - -- match = true - else -- before/current/after | before/current | current/after - -- no need to test first hit (to be optimized) - if f == l then -- new, else last out of sync (f is > 1) - -- match = true - else - local n = f + 1 - last = last.next - while n <= l do - if last then - local id = last.id - if id == glyph_code then - if last.font == currentfont and last.subtype<256 then - local char = last.char - local ccd = descriptions[char] - if ccd then - local class = ccd.class - if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then - skipped = true - if trace_skips then - show_skip(kind,chainname,char,ck,class) - end - last = last.next - elseif seq[n][char] then - if n < l then - last = last.next - end - n = n + 1 - else - match = false - break - end - else - match = false - break - end - else - match = false - break - end - elseif id == disc_code then - last = last.next - else - match = false - break - end - else - match = false - break - end - end - end - end - -- before - if match and f > 1 then - local prev = start.prev - if prev then - local n = f-1 - while n >= 1 do - if prev then - local id = prev.id - if id == glyph_code then - if prev.font == currentfont and prev.subtype<256 then -- normal char - local char = prev.char - local ccd = descriptions[char] - if ccd then - local class = ccd.class - if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then - skipped = true - if trace_skips then - show_skip(kind,chainname,char,ck,class) - end - elseif seq[n][char] then - n = n -1 - else - match = false - break - end - else - match = false - break - end - else - match = false - break - end - elseif id == disc_code then - -- skip 'm - elseif seq[n][32] then - n = n -1 - else - match = false - break - end - prev = prev.prev - elseif seq[n][32] then -- somewhat special, as zapfino can have many preceding spaces - n = n -1 - else - match = false - break - end - end - elseif f == 2 then - match = seq[1][32] - else - for n=f-1,1 do - if not seq[n][32] then - match = false - break - end - end - end - end - -- after - if match and s > l then - local current = last and last.next - if current then - -- removed optimization for s-l == 1, we have to deal with marks anyway - local n = l + 1 - while n <= s do - if current then - local id = current.id - if id == glyph_code then - if current.font == currentfont and current.subtype<256 then -- normal char - local char = current.char - local ccd = descriptions[char] - if ccd then - local class = ccd.class - if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then - skipped = true - if trace_skips then - show_skip(kind,chainname,char,ck,class) - end - elseif seq[n][char] then - n = n + 1 - else - match = false - break - end - else - match = false - break - end - else - match = false - break - end - elseif id == disc_code then - -- skip 'm - elseif seq[n][32] then -- brrr - n = n + 1 - else - match = false - break - end - current = current.next - elseif seq[n][32] then - n = n + 1 - else - match = false - break - end - end - elseif s-l == 1 then - match = seq[s][32] - else - for n=l+1,s do - if not seq[n][32] then - match = false - break - end - end - end - end - end - if match then - -- ck == currentcontext - if trace_contexts then - local rule, lookuptype, f, l = ck[1], ck[2], ck[4], ck[5] - local char = start.char - if ck[9] then - logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a, %a => %a", - cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10]) - else - logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a", - cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype) - end - end - local chainlookups = ck[6] - if chainlookups then - local nofchainlookups = #chainlookups - -- we can speed this up if needed - if nofchainlookups == 1 then - local chainlookupname = chainlookups[1] - local chainlookup = lookuptable[chainlookupname] - if chainlookup then - local cp = chainprocs[chainlookup.type] - if cp then - local ok - head, start, ok = cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence) - if ok then - done = true - end - else - logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type) - end - else -- shouldn't happen - logprocess("%s is not yet supported",cref(kind,chainname,chainlookupname)) - end - else - local i = 1 - repeat - if skipped then - while true do - local char = start.char - local ccd = descriptions[char] - if ccd then - local class = ccd.class - if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then - start = start.next - else - break - end - else - break - end - end - end - local chainlookupname = chainlookups[i] - local chainlookup = lookuptable[chainlookupname] - if not chainlookup then - -- okay, n matches, < n replacements - i = i + 1 - else - local cp = chainmores[chainlookup.type] - if not cp then - -- actually an error - logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type) - i = i + 1 - else - local ok, n - head, start, ok, n = cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence) - -- messy since last can be changed ! - if ok then - done = true - -- skip next one(s) if ligature - i = i + (n or 1) - else - i = i + 1 - end - end - end - if start then - start = start.next - else - -- weird - end - until i > nofchainlookups - end - else - local replacements = ck[7] - if replacements then - head, start, done = chainprocs.reversesub(head,start,last,kind,chainname,ck,lookuphash,replacements) -- sequence - else - done = true -- can be meant to be skipped - if trace_contexts then - logprocess("%s: skipping match",cref(kind,chainname)) - end - end - end - end - end - return head, start, done -end - --- Because we want to keep this elsewhere (an because speed is less an issue) we --- pass the font id so that the verbose variant can access the relevant helper tables. - -local verbose_handle_contextchain = function(font,...) - logwarning("no verbose handler installed, reverting to 'normal'") - otf.setcontextchain() - return normal_handle_contextchain(...) -end - -otf.chainhandlers = { - normal = normal_handle_contextchain, - verbose = verbose_handle_contextchain, -} - -function otf.setcontextchain(method) - if not method or method == "normal" or not otf.chainhandlers[method] then - if handlers.contextchain then -- no need for a message while making the format - logwarning("installing normal contextchain handler") - end - handlers.contextchain = normal_handle_contextchain - else - logwarning("installing contextchain handler %a",method) - local handler = otf.chainhandlers[method] - handlers.contextchain = function(...) - return handler(currentfont,...) -- hm, get rid of ... - end - end - handlers.gsub_context = handlers.contextchain - handlers.gsub_contextchain = handlers.contextchain - handlers.gsub_reversecontextchain = handlers.contextchain - handlers.gpos_contextchain = handlers.contextchain - handlers.gpos_context = handlers.contextchain -end - -otf.setcontextchain() - -local missing = { } -- we only report once - -local function logprocess(...) - if trace_steps then - registermessage(...) - end - report_process(...) -end - -local logwarning = report_process - -local function report_missing_cache(typ,lookup) - local f = missing[currentfont] if not f then f = { } missing[currentfont] = f end - local t = f[typ] if not t then t = { } f[typ] = t end - if not t[lookup] then - t[lookup] = true - logwarning("missing cache for lookup %a, type %a, font %a, name %a",lookup,typ,currentfont,tfmdata.properties.fullname) - end -end - -local resolved = { } -- we only resolve a font,script,language pair once - --- todo: pass all these 'locals' in a table - -local lookuphashes = { } - -setmetatableindex(lookuphashes, function(t,font) - local lookuphash = fontdata[font].resources.lookuphash - if not lookuphash or not next(lookuphash) then - lookuphash = false - end - t[font] = lookuphash - return lookuphash -end) - --- fonts.hashes.lookups = lookuphashes - -local autofeatures = fonts.analyzers.features -- was: constants - -local function initialize(sequence,script,language,enabled) - local features = sequence.features - if features then - for kind, scripts in next, features do - local valid = enabled[kind] - if valid then - local languages = scripts[script] or scripts[wildcard] - if languages and (languages[language] or languages[wildcard]) then - return { valid, autofeatures[kind] or false, sequence.chain or 0, kind, sequence } - end - end - end - end - return false -end - -function otf.dataset(tfmdata,font) -- generic variant, overloaded in context - local shared = tfmdata.shared - local properties = tfmdata.properties - local language = properties.language or "dflt" - local script = properties.script or "dflt" - local enabled = shared.features - local res = resolved[font] - if not res then - res = { } - resolved[font] = res - end - local rs = res[script] - if not rs then - rs = { } - res[script] = rs - end - local rl = rs[language] - if not rl then - rl = { - -- indexed but we can also add specific data by key - } - rs[language] = rl - local sequences = tfmdata.resources.sequences --- setmetatableindex(rl, function(t,k) --- if type(k) == "number" then --- local v = enabled and initialize(sequences[k],script,language,enabled) --- t[k] = v --- return v --- end --- end) -for s=1,#sequences do - local v = enabled and initialize(sequences[s],script,language,enabled) - if v then - rl[#rl+1] = v - end -end - end - return rl -end - --- elseif id == glue_code then --- if p[5] then -- chain --- local pc = pp[32] --- if pc then --- start, ok = start, false -- p[1](start,kind,p[2],pc,p[3],p[4]) --- if ok then --- done = true --- end --- if start then start = start.next end --- else --- start = start.next --- end --- else --- start = start.next --- end - --- there will be a new direction parser (pre-parsed etc) - --- less bytecode: 290 -> 254 --- --- attr = attr or false --- --- local a = getattr(start,0) --- if (a == attr and (not attribute or getattr(start,a_state) == attribute)) or (not attribute or getattr(start,a_state) == attribute) then --- -- the action --- end - -local function featuresprocessor(head,font,attr) - - local lookuphash = lookuphashes[font] -- we can also check sequences here - - if not lookuphash then - return head, false - end - - if trace_steps then - checkstep(head) - end - - tfmdata = fontdata[font] - descriptions = tfmdata.descriptions - characters = tfmdata.characters - resources = tfmdata.resources - - marks = resources.marks - anchorlookups = resources.lookup_to_anchor - lookuptable = resources.lookups - lookuptypes = resources.lookuptypes - - currentfont = font - rlmode = 0 - - local sequences = resources.sequences - local done = false - local datasets = otf.dataset(tfmdata,font,attr) - - local dirstack = { } -- could move outside function - - -- We could work on sub start-stop ranges instead but I wonder if there is that - -- much speed gain (experiments showed that it made not much sense) and we need - -- to keep track of directions anyway. Also at some point I want to play with - -- font interactions and then we do need the full sweeps. - - -- Keeping track of the headnode is needed for devanagari (I generalized it a bit - -- so that multiple cases are also covered.) - - for s=1,#datasets do - local dataset = datasets[s] - featurevalue = dataset[1] -- todo: pass to function instead of using a global - - local sequence = dataset[5] -- sequences[s] -- also dataset[5] - local rlparmode = 0 - local topstack = 0 - local success = false - local attribute = dataset[2] - local chain = dataset[3] -- sequence.chain or 0 - local typ = sequence.type - local subtables = sequence.subtables - if chain < 0 then - -- this is a limited case, no special treatments like 'init' etc - local handler = handlers[typ] - -- we need to get rid of this slide! probably no longer needed in latest luatex - local start = find_node_tail(head) -- slow (we can store tail because there's always a skip at the end): todo - while start do - local id = start.id - if id == glyph_code then - if start.font == font and start.subtype<256 then - local a = start[0] - if a then - a = a == attr - else - a = true - end - if a then - for i=1,#subtables do - local lookupname = subtables[i] - local lookupcache = lookuphash[lookupname] - if lookupcache then - local lookupmatch = lookupcache[start.char] - if lookupmatch then - head, start, success = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) - if success then - break - end - end - else - report_missing_cache(typ,lookupname) - end - end - if start then start = start.prev end - else - start = start.prev - end - else - start = start.prev - end - else - start = start.prev - end - end - else - local handler = handlers[typ] - local ns = #subtables - local start = head -- local ? - rlmode = 0 -- to be checked ? - if ns == 1 then -- happens often - local lookupname = subtables[1] - local lookupcache = lookuphash[lookupname] - if not lookupcache then -- also check for empty cache - report_missing_cache(typ,lookupname) - else - - local function subrun(start) - -- mostly for gsub, gpos would demand a more clever approach - local head = start - local done = false - while start do - local id = start.id - if id == glyph_code and start.font == font and start.subtype <256 then - local a = start[0] - if a then - a = (a == attr) and (not attribute or start[a_state] == attribute) - else - a = not attribute or start[a_state] == attribute - end - if a then - local lookupmatch = lookupcache[start.char] - if lookupmatch then - -- sequence kan weg - local ok - head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) - if ok then - done = true - end - end - if start then start = start.next end - else - start = start.next - end - else - start = start.next - end - end - if done then - success = true - return head - end - end - - local function kerndisc(disc) -- we can assume that prev and next are glyphs - local prev = disc.prev - local next = disc.next - if prev and next then - prev.next = next - -- next.prev = prev - local a = prev[0] - if a then - a = (a == attr) and (not attribute or prev[a_state] == attribute) - else - a = not attribute or prev[a_state] == attribute - end - if a then - local lookupmatch = lookupcache[prev.char] - if lookupmatch then - -- sequence kan weg - local h, d, ok = handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) - if ok then - done = true - success = true - end - end - end - prev.next = disc - -- next.prev = disc - end - return next - end - - while start do - local id = start.id - if id == glyph_code then - if start.font == font and start.subtype<256 then - local a = start[0] - if a then - a = (a == attr) and (not attribute or start[a_state] == attribute) - else - a = not attribute or start[a_state] == attribute - end - if a then - local lookupmatch = lookupcache[start.char] - if lookupmatch then - -- sequence kan weg - local ok - head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) - if ok then - success = true - end - end - if start then start = start.next end - else - start = start.next - end - else - start = start.next - end - elseif id == disc_code then - -- mostly for gsub - if start.subtype == discretionary_code then - local pre = start.pre - if pre then - local new = subrun(pre) - if new then start.pre = new end - end - local post = start.post - if post then - local new = subrun(post) - if new then start.post = new end - end - local replace = start.replace - if replace then - local new = subrun(replace) - if new then start.replace = new end - end -elseif typ == "gpos_single" or typ == "gpos_pair" then - kerndisc(start) - end - start = start.next - elseif id == whatsit_code then -- will be function - local subtype = start.subtype - if subtype == dir_code then - local dir = start.dir - if dir == "+TRT" or dir == "+TLT" then - topstack = topstack + 1 - dirstack[topstack] = dir - elseif dir == "-TRT" or dir == "-TLT" then - topstack = topstack - 1 - end - local newdir = dirstack[topstack] - if newdir == "+TRT" then - rlmode = -1 - elseif newdir == "+TLT" then - rlmode = 1 - else - rlmode = rlparmode - end - if trace_directions then - report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir) - end - elseif subtype == localpar_code then - local dir = start.dir - if dir == "TRT" then - rlparmode = -1 - elseif dir == "TLT" then - rlparmode = 1 - else - rlparmode = 0 - end - -- one might wonder if the par dir should be looked at, so we might as well drop the next line - rlmode = rlparmode - if trace_directions then - report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode) - end - end - start = start.next - elseif id == math_code then - start = end_of_math(start).next - else - start = start.next - end - end - end - else - - local function subrun(start) - -- mostly for gsub, gpos would demand a more clever approach - local head = start - local done = false - while start do - local id = start.id - if id == glyph_code and start.id == font and start.subtype <256 then - local a = start[0] - if a then - a = (a == attr) and (not attribute or start[a_state] == attribute) - else - a = not attribute or start[a_state] == attribute - end - if a then - for i=1,ns do - local lookupname = subtables[i] - local lookupcache = lookuphash[lookupname] - if lookupcache then - local lookupmatch = lookupcache[start.char] - if lookupmatch then - -- we could move all code inline but that makes things even more unreadable - local ok - head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) - if ok then - done = true - break - elseif not start then - -- don't ask why ... shouldn't happen - break - end - end - else - report_missing_cache(typ,lookupname) - end - end - if start then start = start.next end - else - start = start.next - end - else - start = start.next - end - end - if done then - success = true - return head - end - end - - local function kerndisc(disc) -- we can assume that prev and next are glyphs - local prev = disc.prev - local next = disc.next - if prev and next then - prev.next = next - -- next.prev = prev - local a = prev[0] - if a then - a = (a == attr) and (not attribute or prev[a_state] == attribute) - else - a = not attribute or prev[a_state] == attribute - end - if a then - for i=1,ns do - local lookupname = subtables[i] - local lookupcache = lookuphash[lookupname] - if lookupcache then - local lookupmatch = lookupcache[prev.char] - if lookupmatch then - -- we could move all code inline but that makes things even more unreadable - local h, d, ok = handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) - if ok then - done = true - break - end - end - else - report_missing_cache(typ,lookupname) - end - end - end - prev.next = disc - -- next.prev = disc - end - return next - end - - while start do - local id = start.id - if id == glyph_code then - if start.font == font and start.subtype<256 then - local a = start[0] - if a then - a = (a == attr) and (not attribute or start[a_state] == attribute) - else - a = not attribute or start[a_state] == attribute - end - if a then - for i=1,ns do - local lookupname = subtables[i] - local lookupcache = lookuphash[lookupname] - if lookupcache then - local lookupmatch = lookupcache[start.char] - if lookupmatch then - -- we could move all code inline but that makes things even more unreadable - local ok - head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) - if ok then - success = true - break - elseif not start then - -- don't ask why ... shouldn't happen - break - end - end - else - report_missing_cache(typ,lookupname) - end - end - if start then start = start.next end - else - start = start.next - end - else - start = start.next - end - elseif id == disc_code then - -- mostly for gsub - if start.subtype == discretionary_code then - local pre = start.pre - if pre then - local new = subrun(pre) - if new then start.pre = new end - end - local post = start.post - if post then - local new = subrun(post) - if new then start.post = new end - end - local replace = start.replace - if replace then - local new = subrun(replace) - if new then start.replace = new end - end -elseif typ == "gpos_single" or typ == "gpos_pair" then - kerndisc(start) - end - start = start.next - elseif id == whatsit_code then - local subtype = start.subtype - if subtype == dir_code then - local dir = start.dir - if dir == "+TRT" or dir == "+TLT" then - topstack = topstack + 1 - dirstack[topstack] = dir - elseif dir == "-TRT" or dir == "-TLT" then - topstack = topstack - 1 - end - local newdir = dirstack[topstack] - if newdir == "+TRT" then - rlmode = -1 - elseif newdir == "+TLT" then - rlmode = 1 - else - rlmode = rlparmode - end - if trace_directions then - report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir) - end - elseif subtype == localpar_code then - local dir = start.dir - if dir == "TRT" then - rlparmode = -1 - elseif dir == "TLT" then - rlparmode = 1 - else - rlparmode = 0 - end - rlmode = rlparmode - if trace_directions then - report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode) - end - end - start = start.next - elseif id == math_code then - start = end_of_math(start).next - else - start = start.next - end - end - end - end - if success then - done = true - end - if trace_steps then -- ? - registerstep(head) - end - end - return head, done -end - -local function generic(lookupdata,lookupname,unicode,lookuphash) - local target = lookuphash[lookupname] - if target then - target[unicode] = lookupdata - else - lookuphash[lookupname] = { [unicode] = lookupdata } - end -end - -local action = { - - substitution = generic, - multiple = generic, - alternate = generic, - position = generic, - - ligature = function(lookupdata,lookupname,unicode,lookuphash) - local target = lookuphash[lookupname] - if not target then - target = { } - lookuphash[lookupname] = target - end - for i=1,#lookupdata do - local li = lookupdata[i] - local tu = target[li] - if not tu then - tu = { } - target[li] = tu - end - target = tu - end - target.ligature = unicode - end, - - pair = function(lookupdata,lookupname,unicode,lookuphash) - local target = lookuphash[lookupname] - if not target then - target = { } - lookuphash[lookupname] = target - end - local others = target[unicode] - local paired = lookupdata[1] - if others then - others[paired] = lookupdata - else - others = { [paired] = lookupdata } - target[unicode] = others - end - end, - -} - -local function prepare_lookups(tfmdata) - - local rawdata = tfmdata.shared.rawdata - local resources = rawdata.resources - local lookuphash = resources.lookuphash - local anchor_to_lookup = resources.anchor_to_lookup - local lookup_to_anchor = resources.lookup_to_anchor - local lookuptypes = resources.lookuptypes - local characters = tfmdata.characters - local descriptions = tfmdata.descriptions - - -- we cannot free the entries in the descriptions as sometimes we access - -- then directly (for instance anchors) ... selectively freeing does save - -- much memory as it's only a reference to a table and the slot in the - -- description hash is not freed anyway - - for unicode, character in next, characters do -- we cannot loop over descriptions ! - - local description = descriptions[unicode] - - if description then - - local lookups = description.slookups - if lookups then - for lookupname, lookupdata in next, lookups do - action[lookuptypes[lookupname]](lookupdata,lookupname,unicode,lookuphash) - end - end - - local lookups = description.mlookups - if lookups then - for lookupname, lookuplist in next, lookups do - local lookuptype = lookuptypes[lookupname] - for l=1,#lookuplist do - local lookupdata = lookuplist[l] - action[lookuptype](lookupdata,lookupname,unicode,lookuphash) - end - end - end - - local list = description.kerns - if list then - for lookup, krn in next, list do -- ref to glyph, saves lookup - local target = lookuphash[lookup] - if target then - target[unicode] = krn - else - lookuphash[lookup] = { [unicode] = krn } - end - end - end - - local list = description.anchors - if list then - for typ, anchors in next, list do -- types - if typ == "mark" or typ == "cexit" then -- or entry? - for name, anchor in next, anchors do - local lookups = anchor_to_lookup[name] - if lookups then - for lookup, _ in next, lookups do - local target = lookuphash[lookup] - if target then - target[unicode] = anchors - else - lookuphash[lookup] = { [unicode] = anchors } - end - end - end - end - end - end - end - - end - - end - -end - -local function split(replacement,original) - local result = { } - for i=1,#replacement do - result[original[i]] = replacement[i] - end - return result -end - -local valid = { - coverage = { chainsub = true, chainpos = true, contextsub = true }, - reversecoverage = { reversesub = true }, - glyphs = { chainsub = true, chainpos = true }, -} - -local function prepare_contextchains(tfmdata) - local rawdata = tfmdata.shared.rawdata - local resources = rawdata.resources - local lookuphash = resources.lookuphash - local lookups = rawdata.lookups - if lookups then - for lookupname, lookupdata in next, rawdata.lookups do - local lookuptype = lookupdata.type - if lookuptype then - local rules = lookupdata.rules - if rules then - local format = lookupdata.format - local validformat = valid[format] - if not validformat then - report_prepare("unsupported format %a",format) - elseif not validformat[lookuptype] then - -- todo: dejavu-serif has one (but i need to see what use it has) - report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookupname) - else - local contexts = lookuphash[lookupname] - if not contexts then - contexts = { } - lookuphash[lookupname] = contexts - end - local t, nt = { }, 0 - for nofrules=1,#rules do - local rule = rules[nofrules] - local current = rule.current - local before = rule.before - local after = rule.after - local replacements = rule.replacements - local sequence = { } - local nofsequences = 0 - -- Eventually we can store start, stop and sequence in the cached file - -- but then less sharing takes place so best not do that without a lot - -- of profiling so let's forget about it. - if before then - for n=1,#before do - nofsequences = nofsequences + 1 - sequence[nofsequences] = before[n] - end - end - local start = nofsequences + 1 - for n=1,#current do - nofsequences = nofsequences + 1 - sequence[nofsequences] = current[n] - end - local stop = nofsequences - if after then - for n=1,#after do - nofsequences = nofsequences + 1 - sequence[nofsequences] = after[n] - end - end - if sequence[1] then - -- Replacements only happen with reverse lookups as they are single only. We - -- could pack them into current (replacement value instead of true) and then - -- use sequence[start] instead but it's somewhat ugly. - nt = nt + 1 - t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups, replacements } - for unic, _ in next, sequence[start] do - local cu = contexts[unic] - if not cu then - contexts[unic] = t - end - end - end - end - end - else - -- no rules - end - else - report_prepare("missing lookuptype for lookupname %a",lookupname) - end - end - end -end - --- we can consider lookuphash == false (initialized but empty) vs lookuphash == table - -local function featuresinitializer(tfmdata,value) - if true then -- value then - -- beware we need to use the topmost properties table - local rawdata = tfmdata.shared.rawdata - local properties = rawdata.properties - if not properties.initialized then - local starttime = trace_preparing and os.clock() - local resources = rawdata.resources - resources.lookuphash = resources.lookuphash or { } - prepare_contextchains(tfmdata) - prepare_lookups(tfmdata) - properties.initialized = true - if trace_preparing then - report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,tfmdata.properties.fullname) - end - end - end -end - -registerotffeature { - name = "features", - description = "features", - default = true, - initializers = { - position = 1, - node = featuresinitializer, - }, - processors = { - node = featuresprocessor, - } -} - --- This can be used for extra handlers, but should be used with care! - -otf.handlers = handlers diff --git a/src/fontloader/luaotfload-fonts-tfm.lua b/src/fontloader/luaotfload-fonts-tfm.lua deleted file mode 100644 index b9bb1bd..0000000 --- a/src/fontloader/luaotfload-fonts-tfm.lua +++ /dev/null @@ -1,38 +0,0 @@ -if not modules then modules = { } end modules ['luatex-fonts-tfm'] = { - version = 1.001, - comment = "companion to luatex-*.tex", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end - -local fonts = fonts -local tfm = { } -fonts.handlers.tfm = tfm -fonts.formats.tfm = "type1" -- we need to have at least a value here - -function fonts.readers.tfm(specification) - local fullname = specification.filename or "" - if fullname == "" then - local forced = specification.forced or "" - if forced ~= "" then - fullname = specification.name .. "." .. forced - else - fullname = specification.name - end - end - local foundname = resolvers.findbinfile(fullname, 'tfm') or "" - if foundname == "" then - foundname = resolvers.findbinfile(fullname, 'ofm') or "" - end - if foundname ~= "" then - specification.filename = foundname - specification.format = "ofm" - return font.read_tfm(specification.filename,specification.size) - end -end diff --git a/src/luaotfload-main.lua b/src/luaotfload-main.lua index 7dd6c6e..d3c4552 100644 --- a/src/luaotfload-main.lua +++ b/src/luaotfload-main.lua @@ -129,16 +129,15 @@ end --doc]]-- local make_loader = function (prefix) - return function (name) - require ((prefix or "luaotfload") .."-"..name) - end + return prefix and function (name) require (prefix .. "-" .. name .. ".lua") end + or function (name) require (name) end end -local load_luaotfload_module = make_loader () +local load_luaotfload_module = make_loader "luaotfload" ----- load_luaotfload_module = make_loader "luatex" --=> for Luatex-Plain -local load_fontloader_module = make_loader "luaotfload" --- XXX adapt +local load_fontloader_module = make_loader "fontloader" -load_luaotfload_module "log.lua" --- log messages +load_luaotfload_module "log" --- log messages local log = luaotfload.log local logreport = log.report @@ -309,7 +308,7 @@ tex.attribute[0] = 0 --doc]]-- -load_fontloader_module "fontloader.lua" +load_fontloader_module "fontloader" ---load_fontloader_module "font-odv.lua" --- <= Devanagari support from Context if fonts then @@ -320,7 +319,7 @@ if fonts then logreport ("log", 5, "main", [["I am using the merged fontloader here.]]) logreport ("log", 5, "main", [[ If you run into problems or experience unexpected]]) logreport ("log", 5, "main", [[ behaviour, and if you have ConTeXt installed you can try]]) - logreport ("log", 5, "main", [[ to delete the file 'luaotfload-fontloader.lua' as I might]]) + logreport ("log", 5, "main", [[ to delete the file 'fontloader-fontloader.lua' as I might]]) logreport ("log", 5, "main", [[ then use the possibly updated libraries. The merged]]) logreport ("log", 5, "main", [[ version is not supported as it is a frozen instance.]]) logreport ("log", 5, "main", [[ Problems can be reported to the ConTeXt mailing list."]]) @@ -331,38 +330,38 @@ else--- the loading sequence is known to change, so this might have to --- be updated with future updates! --- do not modify it though unless there is a change to the merged --- package! - load_fontloader_module "l-lua.lua" - load_fontloader_module "l-lpeg.lua" - load_fontloader_module "l-function.lua" - load_fontloader_module "l-string.lua" - load_fontloader_module "l-table.lua" - load_fontloader_module "l-io.lua" - load_fontloader_module "l-file.lua" - load_fontloader_module "l-boolean.lua" - load_fontloader_module "l-math.lua" - load_fontloader_module "util-str.lua" - load_fontloader_module "luatex-basics-gen.lua" - load_fontloader_module "data-con.lua" - load_fontloader_module "luatex-basics-nod.lua" - load_fontloader_module "font-ini.lua" - load_fontloader_module "font-con.lua" - load_fontloader_module "luatex-fonts-enc.lua" - load_fontloader_module "font-cid.lua" - load_fontloader_module "font-map.lua" - load_fontloader_module "luatex-fonts-syn.lua" - load_fontloader_module "luatex-fonts-tfm.lua" - load_fontloader_module "font-oti.lua" - load_fontloader_module "font-otf.lua" - load_fontloader_module "font-otb.lua" - load_fontloader_module "luatex-fonts-inj.lua" --> since 2014-01-07, replaces node-inj.lua - load_fontloader_module "font-ota.lua" - load_fontloader_module "luatex-fonts-otn.lua" --> since 2014-01-07, replaces font-otn.lua - load_fontloader_module "font-otp.lua" --> since 2013-04-23 - load_fontloader_module "luatex-fonts-lua.lua" - load_fontloader_module "font-def.lua" - load_fontloader_module "luatex-fonts-def.lua" - load_fontloader_module "luatex-fonts-ext.lua" - load_fontloader_module "luatex-fonts-cbk.lua" + load_fontloader_module "l-lua" + load_fontloader_module "l-lpeg" + load_fontloader_module "l-function" + load_fontloader_module "l-string" + load_fontloader_module "l-table" + load_fontloader_module "l-io" + load_fontloader_module "l-file" + load_fontloader_module "l-boolean" + load_fontloader_module "l-math" + load_fontloader_module "util-str" + load_fontloader_module "luatex-basics-gen" + load_fontloader_module "data-con" + load_fontloader_module "luatex-basics-nod" + load_fontloader_module "font-ini" + load_fontloader_module "font-con" + load_fontloader_module "luatex-fonts-enc" + load_fontloader_module "font-cid" + load_fontloader_module "font-map" + load_fontloader_module "luatex-fonts-syn" + load_fontloader_module "luatex-fonts-tfm" + load_fontloader_module "font-oti" + load_fontloader_module "font-otf" + load_fontloader_module "font-otb" + load_fontloader_module "luatex-fonts-inj" --> since 2014-01-07, replaces node-inj.lua + load_fontloader_module "font-ota" + load_fontloader_module "luatex-fonts-otn" --> since 2014-01-07, replaces font-otn.lua + load_fontloader_module "font-otp" --> since 2013-04-23 + load_fontloader_module "luatex-fonts-lua" + load_fontloader_module "font-def" + load_fontloader_module "luatex-fonts-def" + load_fontloader_module "luatex-fonts-ext" + load_fontloader_module "luatex-fonts-cbk" end --- non-merge fallback scope --[[doc-- @@ -415,7 +414,7 @@ add_to_callback("hpack_filter", add_to_callback("find_vf_file", find_vf_file, "luaotfload.find_vf_file") -load_luaotfload_module "override.lua" --- load glyphlist on demand +load_luaotfload_module "override" --- load glyphlist on demand --[[doc-- @@ -423,16 +422,16 @@ load_luaotfload_module "override.lua" --- load glyphlist on demand --doc]]-- -load_luaotfload_module "parsers.lua" --- fonts.conf and syntax -load_luaotfload_module "configuration.lua" --- configuration options +load_luaotfload_module "parsers" --- fonts.conf and syntax +load_luaotfload_module "configuration" --- configuration options if not config.actions.apply_defaults () then logreport ("log", 0, "load", "Configuration unsuccessful.") end -load_luaotfload_module "loaders.lua" --- Type1 font wrappers -load_luaotfload_module "database.lua" --- Font management. -load_luaotfload_module "colors.lua" --- Per-font colors. +load_luaotfload_module "loaders" --- Type1 font wrappers +load_luaotfload_module "database" --- Font management. +load_luaotfload_module "colors" --- Per-font colors. if not config.actions.reconfigure () then logreport ("log", 0, "load", "Post-configuration hooks failed.") @@ -732,9 +731,9 @@ reset_callback "define_font" local definer = config.luaotfload.run.definer add_to_callback ("define_font", definers[definer], "luaotfload.define_font", 1) -load_luaotfload_module "features.lua" --- font request and feature handling -load_luaotfload_module "letterspace.lua" --- extra character kerning -load_luaotfload_module "auxiliary.lua" --- additional high-level functionality +load_luaotfload_module "features" --- font request and feature handling +load_luaotfload_module "letterspace" --- extra character kerning +load_luaotfload_module "auxiliary" --- additional high-level functionality luaotfload.aux.start_rewrite_fontname () --- to be migrated to fontspec -- cgit v1.2.3 From a74ab8725066a2b439920bc3ce928be69abe88a2 Mon Sep 17 00:00:00 2001 From: Philipp Gesang Date: Mon, 8 Dec 2014 21:10:04 +0100 Subject: [scripts] adapt status script to reflect current fontloader file prefix --- scripts/mkstatus | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/scripts/mkstatus b/scripts/mkstatus index 3665aff..9d04ef9 100755 --- a/scripts/mkstatus +++ b/scripts/mkstatus @@ -37,25 +37,25 @@ local filelist = "./build/luaotfload-status.lua" --- result local names = { --- only the runtime files and scripts { "src", "luaotfload-auxiliary.lua", }, - { "src/fontloader", "luaotfload-basics-gen.lua", }, - { "src/fontloader", "luaotfload-basics-nod.lua", }, + { "src/fontloader", "fontloader-basics-gen.lua", }, + { "src/fontloader", "fontloader-basics-nod.lua", }, { "build", "luaotfload-characters.lua", }, { "src", "luaotfload-colors.lua", }, { "src", "luaotfload-database.lua", }, { "src", "luaotfload-diagnostics.lua", }, { "src", "luaotfload-features.lua", }, - { "src/fontloader", "luaotfload-fonts-cbk.lua", }, - { "src/fontloader", "luaotfload-fonts-def.lua", }, - { "src/fontloader", "luaotfload-fonts-enc.lua", }, - { "src/fontloader", "luaotfload-fonts-ext.lua", }, - { "src/fontloader", "luaotfload-fonts-lua.lua", }, - { "src/fontloader", "luaotfload-fonts-tfm.lua", }, + { "src/fontloader", "fontloader-fonts-cbk.lua", }, + { "src/fontloader", "fontloader-fonts-def.lua", }, + { "src/fontloader", "fontloader-fonts-enc.lua", }, + { "src/fontloader", "fontloader-fonts-ext.lua", }, + { "src/fontloader", "fontloader-fonts-lua.lua", }, + { "src/fontloader", "fontloader-fonts-tfm.lua", }, { "build", "luaotfload-glyphlist.lua", }, { "src", "luaotfload-letterspace.lua", }, { "src", "luaotfload-loaders.lua", }, { "src", "luaotfload-log.lua", }, { "src", "luaotfload-main.lua", }, - { "src/fontloader", "luaotfload-fontloader.lua", }, + { "src/fontloader", "fontloader-fontloader.lua", }, { "src", "luaotfload-override.lua", }, { "src", "luaotfload-parsers.lua", }, { "src", "luaotfload-tool.lua", }, -- cgit v1.2.3 From 44743a73b1862f6acb5e2039e71abb616853697e Mon Sep 17 00:00:00 2001 From: Philipp Gesang Date: Mon, 8 Dec 2014 21:13:21 +0100 Subject: [*] update news --- NEWS | 1 + 1 file changed, 1 insertion(+) diff --git a/NEWS b/NEWS index 95a3640..4926d35 100644 --- a/NEWS +++ b/NEWS @@ -5,6 +5,7 @@ Change History * Add ``sign`` target to makefile for automated package signing * Add ``--dumpconf`` option to luaotfload-tool for generating configuration files + * Move fontloader files to subtree src/fontloader 2014/07/13, luaotfload v2.5 * Remove legacy code. -- cgit v1.2.3 From 3cf2dcca26bee734bc2ba867ff5709af712c8df0 Mon Sep 17 00:00:00 2001 From: Philipp Gesang Date: Mon, 8 Dec 2014 22:34:37 +0100 Subject: [build] include import script in makefile --- Makefile | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 17f583f..b3fce99 100644 --- a/Makefile +++ b/Makefile @@ -20,6 +20,7 @@ CONFDEMO = $(MISCDIR)/luaotfload.conf.example GLYPHSCRIPT = $(SCRIPTSRCDIR)/mkglyphlist CHARSCRIPT = $(SCRIPTSRCDIR)/mkcharacters STATUSSCRIPT = $(SCRIPTSRCDIR)/mkstatus +IMPORTSCRIPT = $(SCRIPTSRCDIR)/mkimport GLYPHSOURCE = $(BUILDDIR)/glyphlist.txt @@ -88,6 +89,7 @@ LUA = texlua DO_GLYPHS = $(LUA) $(GLYPHSCRIPT) > /dev/null DO_CHARS = $(LUA) $(CHARSCRIPT) > /dev/null DO_STATUS = $(LUA) $(STATUSSCRIPT) > /dev/null +DO_IMPORT = $(LUA) $(IMPORTSCRIPT) > /dev/null define check-lua-files @echo validating syntax @@ -179,7 +181,7 @@ $(TDS_ZIP): $(DOCS) $(ALL_STATUS) check sign: $(CTAN_ZIPSIG) -.PHONY: install manifest clean mrproper show showtargets check +.PHONY: install manifest clean mrproper show showtargets check import news install: $(ALL_STATUS) @echo "Installing in '$(TEXMFROOT)'." -- cgit v1.2.3 From 56d99fef8a1ec03daa1921aab018ebf104c64c99 Mon Sep 17 00:00:00 2001 From: Philipp Gesang Date: Tue, 9 Dec 2014 00:33:39 +0100 Subject: [scripts] add early draft of import helper --- scripts/mkimport | 321 +++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 321 insertions(+) create mode 100644 scripts/mkimport diff --git a/scripts/mkimport b/scripts/mkimport new file mode 100644 index 0000000..f0e7410 --- /dev/null +++ b/scripts/mkimport @@ -0,0 +1,321 @@ +#!/usr/bin/env texlua +------------------------------------------------------------------------------- +-- FILE: mkimport.lua +-- USAGE: ./mkimport.lua +-- DESCRIPTION: check luaotfload imports against Context +-- REQUIREMENTS: luatex, the lualibs package, Context MkIV +-- AUTHOR: Philipp Gesang (Phg), +-- VERSION: 42 +-- CREATED: 2014-12-08 22:36:15+0100 +------------------------------------------------------------------------------- +-- + +------------------------------------------------------------------------------- +--- PURPOSE +--- +--- - Facilitate detecting changes in the fontloader source. +--- - Assist in updating source code and (partially) automate importing. +--- - Account for files in the plain fontloader distribution, alert in case of +--- additions or deletions. +--- +------------------------------------------------------------------------------- + +kpse.set_program_name "luatex" + +local lfs = require "lfs" +local md5 = require "md5" + +require "lualibs" + +local ioloaddata = io.loaddata +local iowrite = io.write +local md5sumhexa = md5.sumhexa +local stringformat = string.format + +------------------------------------------------------------------------------- +-- config +------------------------------------------------------------------------------- + +local context_root = "/home/phg/context/tex/texmf-context" +local our_prefix = "fontloader" +local fontloader_subdir = "src/fontloader" + +local paths = { + context = "tex/context/base", + fontloader = "tex/generic/context/luatex", +} + +local prefixes = { + context = nil, + fontloader = "luatex", +} + +------------------------------------------------------------------------------- +-- helpers +------------------------------------------------------------------------------- + +local die = function (...) + io.stderr:write "[fatal error]: " + io.stderr:write (stringformat (...)) + io.stderr:write "\naborting.\n" + os.exit (1) +end + +local emphasis = function (txt) + return stringformat("\x1b[1m%s\x1b[0m", txt) +end + +local msg = function (...) + iowrite (stringformat (...)) + iowrite "\n" +end + +local good_tag = stringformat("[\x1b[1;30;%dmgood\x1b[0m] · ", 42) +local bad_tag = stringformat("[\x1b[1;30;%dmBAD\x1b[0m] · ", 41) +local alert_tag = stringformat("[\x1b[1;%dmalert\x1b[0m] · " , 36) + +local good = function (...) + local msg = (stringformat (...)) + iowrite (good_tag) + iowrite (msg) + iowrite "\n" +end + +local bad = function (...) + local msg = (stringformat (...)) + iowrite (bad_tag) + iowrite (msg) + iowrite "\n" +end + +local attention = function (...) + local msg = (stringformat (...)) + iowrite (alert_tag) + iowrite (msg) + iowrite "\n" +end + +------------------------------------------------------------------------------- +-- definitions +------------------------------------------------------------------------------- + +--- Accounting of upstream files. There are different categories: +--- +--- · *essential*: Files required at runtime. +--- · *merged*: Files merged into the fontloader package. +--- · *ignored*: Lua files not merged, but part of the format. +--- · *tex*: TeX code, i.e. format and examples. +--- · *lualibs*: Files merged, but also provided by the Lualibs package. + +local imports = { + fontloader = { + { name = "basics-gen" , ours = nil , kind = "essential" }, + { name = "basics-nod" , ours = nil , kind = "merged" }, + { name = "basics" , ours = nil , kind = "tex" }, + { name = "fonts-cbk" , ours = nil , kind = "merged" }, + { name = "fonts-def" , ours = nil , kind = "merged" }, + { name = "fonts-demo-vf-1" , ours = nil , kind = "ignored" }, + { name = "fonts-enc" , ours = nil , kind = "merged" }, + { name = "fonts-ext" , ours = nil , kind = "merged" }, + { name = "fonts-inj" , ours = nil , kind = "merged" }, + { name = "fonts-lua" , ours = nil , kind = "merged" }, + { name = "fonts-merged" , ours = "fontloader" , kind = "essential" }, + { name = "fonts-ota" , ours = nil , kind = "merged" }, + { name = "fonts-otn" , ours = nil , kind = "merged" }, + { name = "fonts" , ours = nil , kind = "merged" }, + { name = "fonts" , ours = nil , kind = "tex" }, + { name = "fonts-syn" , ours = nil , kind = "ignored" }, + { name = "fonts-tfm" , ours = nil , kind = "merged" }, + { name = "languages" , ours = nil , kind = "ignored" }, + { name = "languages" , ours = nil , kind = "tex" }, + { name = "math" , ours = nil , kind = "ignored" }, + { name = "math" , ours = nil , kind = "tex" }, + { name = "mplib" , ours = nil , kind = "ignored" }, + { name = "mplib" , ours = nil , kind = "tex" }, + { name = "plain" , ours = nil , kind = "tex" }, + { name = "preprocessor" , ours = nil , kind = "ignored" }, + { name = "preprocessor" , ours = nil , kind = "tex" }, + { name = "preprocessor-test" , ours = nil , kind = "tex" }, + { name = "swiglib" , ours = nil , kind = "ignored" }, + { name = "swiglib" , ours = nil , kind = "tex" }, + { name = "swiglib-test" , ours = nil , kind = "ignored" }, + { name = "swiglib-test" , ours = nil , kind = "tex" }, + { name = "test" , ours = nil , kind = "tex" }, + }, --[[ [fontloader] ]] + context = { --=> all merged + { name = "data-con" , ours = "data-con" , kind = "merged" }, + { name = "font-afk" , ours = "font-afk" , kind = "merged" }, + { name = "font-afm" , ours = "font-afm" , kind = "merged" }, + { name = "font-cid" , ours = "font-cid" , kind = "merged" }, + { name = "font-con" , ours = "font-con" , kind = "merged" }, + { name = "font-def" , ours = "font-def" , kind = "merged" }, + { name = "font-ini" , ours = "font-ini" , kind = "merged" }, + { name = "font-map" , ours = "font-map" , kind = "merged" }, + { name = "font-otb" , ours = "font-otb" , kind = "merged" }, + { name = "font-otf" , ours = "font-otf" , kind = "merged" }, + { name = "font-oti" , ours = "font-oti" , kind = "merged" }, + { name = "font-otp" , ours = "font-otp" , kind = "merged" }, + { name = "font-tfm" , ours = "font-tfm" , kind = "merged" }, + { name = "l-boolean" , ours = "l-boolean" , kind = "lualibs" }, + { name = "l-file" , ours = "l-file" , kind = "lualibs" }, + { name = "l-function" , ours = "l-function" , kind = "lualibs" }, + { name = "l-io" , ours = "l-io" , kind = "lualibs" }, + { name = "l-lpeg" , ours = "l-lpeg" , kind = "lualibs" }, + { name = "l-lua" , ours = "l-lua" , kind = "lualibs" }, + { name = "l-math" , ours = "l-math" , kind = "lualibs" }, + { name = "l-string" , ours = "l-string" , kind = "lualibs" }, + { name = "l-table" , ours = "l-table" , kind = "lualibs" }, + { name = "util-str" , ours = "util-str" , kind = "lualibs" }, + }, --[[ [context] ]] +} --[[ [imports] ]] + +local hash_file = function (fname) + if not lfs.isfile (fname) then + die ("cannot find %s.", fname) + end + local raw = ioloaddata (fname) + if not raw then + die ("cannot read from %s.", fname) + end + return md5sumhexa (raw) +end + +local derive_category_path = function (cat) + local subpath = paths[cat] or die ("category " .. cat .. " unknown") + local location = file.join (context_root, subpath) + if not lfs.isdir (location) then + die ("invalid base path defined for category " + .. cat .. " at " .. location) + end + return location +end + +local derive_fullname = function (cat, name, kind) + local tmp = prefixes[cat] + tmp = tmp and tmp .. "-" .. name or name + return tmp .. (kind == "tex" and ".tex" or ".lua") +end + +local derive_ourname = function (name) + return our_prefix .. "-" .. name .. ".lua" +end + +local is_readable = function (f) + local fh = io.open (f, "r") + if fh then + fh:close() + return true + end + return false +end + +local summarize_news = function (status) + local ni = #status.import + local nc = #status.create + local ng = #status.good + local nm = #status.missing + + msg "-----------------------------------------------------------------" + msg ("Summary: Inspected %d files.", ni + nc + ng + nm) + msg "-----------------------------------------------------------------" + if ng > 0 then good ("%d are up to date", ng) end + if ni > 0 then attention ("%d changed" , ni) end + if nc > 0 then attention ("%d new" , nc) end + if nm > 0 then bad ("%d missing" , nm) end + msg "-----------------------------------------------------------------" + + if nm == 0 and nc == 0 and ni == 0 then + return 0 + end + + return -1 +end + +local news = function () + local status = { + import = { }, + good = { }, + create = { }, + missing = { }, + } + + for cat, entries in next, imports do + local location = derive_category_path (cat) + local nfiles = #entries + + for i = 1, nfiles do + local def = entries[i] + local name = def.name + local ours = def.ours + local kind = def.kind + local fullname = derive_fullname (cat, name, kind) + local fullpath = file.join (location, fullname) + local ourname = derive_ourname (ours or name) + local ourpath = file.join (fontloader_subdir, ourname) -- relative + local imported = false + + if not is_readable (fullpath) then + bad ("source for file %s not found at %s", + emphasis (ourname), + emphasis (fullpath)) + status.missing[#status.missing + 1] = ourname + else + --- Source file exists and is readable. + if not lfs.isdir (fontloader_subdir) then + die ("path for fontloader tree (" + .. fontloader_subdir .. ") is not a directory") + end + if is_readable (ourpath) then imported = true end + local src_hash = hash_file (fullpath) + local dst_hash = imported and hash_file (ourpath) + local same = src_hash == dst_hash -- same! + + if same then + good ("file %s unchanged", emphasis (ourname)) + status.good[#status.good + 1] = ourname + elseif not dst_hash then + attention ("new file %s requires import from %s", + emphasis (ourname), + emphasis (fullpath)) + status.create[#status.create + 1] = ourname + else --- src and dst exist but differ + attention ("file %s requires import", emphasis (ourname)) + status.import[#status.import + 1] = ourname + end + end + + end + end + + return summarize_news (status) +end + +local job_kind = table.mirrored { + news = news, + import = function () end, + tell = function () end, +} + +------------------------------------------------------------------------------- +-- functionality +------------------------------------------------------------------------------- + +--- job_kind -> bool +local check_job = function (j) + return job_kind[j] +end + +------------------------------------------------------------------------------- +-- entry point +------------------------------------------------------------------------------- + +local main = function () + local job = arg[1] or "news" + local runner = check_job (job) + if not runner then die ("invalid job type “" .. job .. "”.") end + return runner(arg) +end + +os.exit (main ()) + +--- vim:ft=lua:ts=2:et:sw=2 -- cgit v1.2.3 From 7080c45e3d7fc651606f86fea7cbd670b4f0ad4f Mon Sep 17 00:00:00 2001 From: Philipp Gesang Date: Tue, 9 Dec 2014 07:53:06 +0100 Subject: [import] implement file lookup --- scripts/mkimport | 44 ++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 40 insertions(+), 4 deletions(-) diff --git a/scripts/mkimport b/scripts/mkimport index f0e7410..4f6875d 100644 --- a/scripts/mkimport +++ b/scripts/mkimport @@ -108,6 +108,7 @@ end --- · *lualibs*: Files merged, but also provided by the Lualibs package. local imports = { + fontloader = { { name = "basics-gen" , ours = nil , kind = "essential" }, { name = "basics-nod" , ours = nil , kind = "merged" }, @@ -142,6 +143,7 @@ local imports = { { name = "swiglib-test" , ours = nil , kind = "tex" }, { name = "test" , ours = nil , kind = "tex" }, }, --[[ [fontloader] ]] + context = { --=> all merged { name = "data-con" , ours = "data-con" , kind = "merged" }, { name = "font-afk" , ours = "font-afk" , kind = "merged" }, @@ -196,8 +198,9 @@ local derive_fullname = function (cat, name, kind) return tmp .. (kind == "tex" and ".tex" or ".lua") end -local derive_ourname = function (name) - return our_prefix .. "-" .. name .. ".lua" +local derive_ourname = function (name, kind) + local suffix = kind == "tex" and ".tex" or ".lua" + return our_prefix .. "-" .. name .. suffix end local is_readable = function (f) @@ -288,11 +291,44 @@ local news = function () end return summarize_news (status) -end +end --[[ [local news = function ()] ]] + +local get_file_definition = function (name, ourname, kind) + kind = kind or "lua" + for cat, defs in next, imports do + local fullname = derive_fullname (cat, name, kind) + local ndefs = #defs + for i = 1, ndefs do + local def = defs[i] + local dname = def.name + local dours = def.ours or def.name + local dkind = def.kind + + --- test properties + if derive_ourname (dours, dkind) == ourname then return def end + if derive_fullname (cat, dname, dkind) == fullname then return def end + if dours == ourname then return def end + if dname == fullname then return def end + end + end + --- search unsuccessful +end --[[ [local get_file_definition = function (name, ourname, kind)] ]] + +local import_file = function (name, kind) + local ourname = derive_ourname (name) + local def = get_file_definition (name, ourname, kind) + if not def then die ("unable to find a definition matching " .. name) end +end --[[ [local import_file = function (name, kind)] ]] + +local import = function (arg) + if #arg > 1 then + return import_file (arg[2]) + end +end --[[ [local import = function (arg)] ]] local job_kind = table.mirrored { news = news, - import = function () end, + import = import, tell = function () end, } -- cgit v1.2.3 From 7f02fde8764b58fadf93ea9d778ad05a42d28abb Mon Sep 17 00:00:00 2001 From: Philipp Gesang Date: Tue, 9 Dec 2014 22:00:50 +0100 Subject: [import] add single file importing routine --- scripts/mkimport | 32 ++++++++++++++++++++++---------- 1 file changed, 22 insertions(+), 10 deletions(-) diff --git a/scripts/mkimport b/scripts/mkimport index 4f6875d..2ac1549 100644 --- a/scripts/mkimport +++ b/scripts/mkimport @@ -55,7 +55,7 @@ local prefixes = { ------------------------------------------------------------------------------- local die = function (...) - io.stderr:write "[fatal error]: " + io.stderr:write "[\x1b[1;30;41mfatal error\x1b[0m]: " io.stderr:write (stringformat (...)) io.stderr:write "\naborting.\n" os.exit (1) @@ -305,19 +305,32 @@ local get_file_definition = function (name, ourname, kind) local dkind = def.kind --- test properties - if derive_ourname (dours, dkind) == ourname then return def end - if derive_fullname (cat, dname, dkind) == fullname then return def end - if dours == ourname then return def end - if dname == fullname then return def end + if derive_ourname (dours, dkind) == ourname then return def, cat end + if derive_fullname (cat, dname, dkind) == fullname then return def, cat end + if dours == ourname then return def, cat end + if dname == fullname then return def, cat end end end --- search unsuccessful end --[[ [local get_file_definition = function (name, ourname, kind)] ]] -local import_file = function (name, kind) - local ourname = derive_ourname (name) - local def = get_file_definition (name, ourname, kind) +local import_file = function (name, kind, def, cat) + local expected_ourname = derive_ourname (name) + if not def or not cat then + def, cat = get_file_definition (name, expected_ourname, kind) + end + if not def then die ("unable to find a definition matching " .. name) end + if not cat then die ("missing category for file " .. name .. " -- WTF‽") end + + local dname = def.name + local dours = def.ours + local dkind = def.kind + local srcdir = derive_category_path (cat) + local src = file.join (srcdir, derive_fullname (cat, dname, kind)) + local dst = file.join (fontloader_subdir, expected_ourname) + file.copy (src, dst) + return (hash_file (src) == hash_file (dst)) and 0 or 1 end --[[ [local import_file = function (name, kind)] ]] local import = function (arg) @@ -338,7 +351,7 @@ local job_kind = table.mirrored { --- job_kind -> bool local check_job = function (j) - return job_kind[j] + return job_kind[j] or die ("invalid job type “" .. job .. "”.") end ------------------------------------------------------------------------------- @@ -348,7 +361,6 @@ end local main = function () local job = arg[1] or "news" local runner = check_job (job) - if not runner then die ("invalid job type “" .. job .. "”.") end return runner(arg) end -- cgit v1.2.3 From 7d03847cddef515e6cbbe8afc3e9b663371e57cd Mon Sep 17 00:00:00 2001 From: Philipp Gesang Date: Tue, 9 Dec 2014 22:34:54 +0100 Subject: [import] implement automated batch import Already being used for the next update commit. --- scripts/mkimport | 91 +++++++++++++++++++++++++++++++++++++++++++++++++------- 1 file changed, 80 insertions(+), 11 deletions(-) diff --git a/scripts/mkimport b/scripts/mkimport index 2ac1549..8e22690 100644 --- a/scripts/mkimport +++ b/scripts/mkimport @@ -70,9 +70,10 @@ local msg = function (...) iowrite "\n" end -local good_tag = stringformat("[\x1b[1;30;%dmgood\x1b[0m] · ", 42) -local bad_tag = stringformat("[\x1b[1;30;%dmBAD\x1b[0m] · ", 41) -local alert_tag = stringformat("[\x1b[1;%dmalert\x1b[0m] · " , 36) +local good_tag = stringformat("[\x1b[1;30;%dmgood\x1b[0m] · ", 42) +local bad_tag = stringformat("[\x1b[1;30;%dmBAD\x1b[0m] · ", 41) +local alert_tag = stringformat("[\x1b[1;%dmalert\x1b[0m] · " , 36) +local status_tag = stringformat("[\x1b[0;%dmstatus\x1b[0m] · " , 36) local good = function (...) local msg = (stringformat (...)) @@ -95,6 +96,13 @@ local attention = function (...) iowrite "\n" end +local status = function (...) + local msg = (stringformat (...)) + iowrite (status_tag) + iowrite (msg) + iowrite "\n" +end + ------------------------------------------------------------------------------- -- definitions ------------------------------------------------------------------------------- @@ -314,6 +322,34 @@ local get_file_definition = function (name, ourname, kind) --- search unsuccessful end --[[ [local get_file_definition = function (name, ourname, kind)] ]] +local import_imported = 0 +local import_skipped = 1 +local import_failed = 2 +local import_created = 3 + +local import_status = { + [import_imported] = "imported", + [import_skipped ] = "skipped", + [import_failed ] = "failed", + [import_created ] = "created", +} + +local summarize_status = function (counters) + local imported = counters[import_imported] or 0 + local skipped = counters[import_skipped ] or 0 + local created = counters[import_created ] or 0 + local failed = counters[import_failed ] or 0 + local sum = imported + skipped + created + failed + if sum < 1 then die ("garbage total of imported files: %s", sum) end + status ("-----------------------------------------------------------------") + status (" RESULT: %d files processed", sum) + status ("-----------------------------------------------------------------") + if created > 0 then status ("created: %d (%d %%)", created , created * 100 / sum) end + if imported > 0 then status ("imported: %d (%d %%)", imported, imported * 100 / sum) end + if skipped > 0 then status ("skipped: %d (%d %%)", skipped , skipped * 100 / sum) end + status ("-----------------------------------------------------------------") +end + local import_file = function (name, kind, def, cat) local expected_ourname = derive_ourname (name) if not def or not cat then @@ -323,20 +359,53 @@ local import_file = function (name, kind, def, cat) if not def then die ("unable to find a definition matching " .. name) end if not cat then die ("missing category for file " .. name .. " -- WTF‽") end - local dname = def.name - local dours = def.ours - local dkind = def.kind - local srcdir = derive_category_path (cat) - local src = file.join (srcdir, derive_fullname (cat, dname, kind)) - local dst = file.join (fontloader_subdir, expected_ourname) + local dname = def.name + local dours = def.ours + local dkind = def.kind + local srcdir = derive_category_path (cat) + local fullname = derive_fullname (cat, dname, kind) + local ourname = derive_ourname (dname, kind) + local src = file.join (srcdir, fullname) + local dst = file.join (fontloader_subdir, ourname) + local new = not lfs.isfile (dst) + if not new and hash_file (src) == hash_file (dst) then + status ("file %s is unchanged, skipping", fullname) + return import_skipped + end + status ("importing file %s", fullname) file.copy (src, dst) - return (hash_file (src) == hash_file (dst)) and 0 or 1 + if hash_file (src) == hash_file (dst) then + if new then return import_created end + return import_imported end + return import_failed end --[[ [local import_file = function (name, kind)] ]] local import = function (arg) if #arg > 1 then - return import_file (arg[2]) + local name = arg[2] or die ("invalid filename " .. tostring (arg[2])) + local stat = import_file (name) + if stat == import_failed then + die ("failed to import file " .. name) + end + status ("import status for file %s: %s", name, import_status[stat]) + end + --- Multiple files + local statcount = { } -- import status codes -> size_t + for cat, defs in next, imports do + local ndefs = #defs + for i = 1, ndefs do + local def = defs[i] + local stat = import_file (def.name, def.kind, def, cat) + if stat == import_failed then + die (stringformat ("import failed at file %d of %d (%s)", + i, ndefs, def.name)) + end + statcount[stat] = statcount[stat] or 0 + statcount[stat] = statcount[stat] + 1 + end end + summarize_status (statcount) + return 0 end --[[ [local import = function (arg)] ]] local job_kind = table.mirrored { -- cgit v1.2.3 From 9f614b56bf48ebdd59e81e9e3e874b8f25bb174f Mon Sep 17 00:00:00 2001 From: Philipp Gesang Date: Tue, 9 Dec 2014 22:41:09 +0100 Subject: [import] prefer enumerations instead of strings --- scripts/mkimport | 120 +++++++++++++++++++++++++++++-------------------------- 1 file changed, 63 insertions(+), 57 deletions(-) diff --git a/scripts/mkimport b/scripts/mkimport index 8e22690..1ee695b 100644 --- a/scripts/mkimport +++ b/scripts/mkimport @@ -115,67 +115,73 @@ end --- · *tex*: TeX code, i.e. format and examples. --- · *lualibs*: Files merged, but also provided by the Lualibs package. +local kind_essential = 0 +local kind_merged = 1 +local kind_tex = 2 +local kind_ignored = 3 +local kind_lualibs = 4 + local imports = { fontloader = { - { name = "basics-gen" , ours = nil , kind = "essential" }, - { name = "basics-nod" , ours = nil , kind = "merged" }, - { name = "basics" , ours = nil , kind = "tex" }, - { name = "fonts-cbk" , ours = nil , kind = "merged" }, - { name = "fonts-def" , ours = nil , kind = "merged" }, - { name = "fonts-demo-vf-1" , ours = nil , kind = "ignored" }, - { name = "fonts-enc" , ours = nil , kind = "merged" }, - { name = "fonts-ext" , ours = nil , kind = "merged" }, - { name = "fonts-inj" , ours = nil , kind = "merged" }, - { name = "fonts-lua" , ours = nil , kind = "merged" }, - { name = "fonts-merged" , ours = "fontloader" , kind = "essential" }, - { name = "fonts-ota" , ours = nil , kind = "merged" }, - { name = "fonts-otn" , ours = nil , kind = "merged" }, - { name = "fonts" , ours = nil , kind = "merged" }, - { name = "fonts" , ours = nil , kind = "tex" }, - { name = "fonts-syn" , ours = nil , kind = "ignored" }, - { name = "fonts-tfm" , ours = nil , kind = "merged" }, - { name = "languages" , ours = nil , kind = "ignored" }, - { name = "languages" , ours = nil , kind = "tex" }, - { name = "math" , ours = nil , kind = "ignored" }, - { name = "math" , ours = nil , kind = "tex" }, - { name = "mplib" , ours = nil , kind = "ignored" }, - { name = "mplib" , ours = nil , kind = "tex" }, - { name = "plain" , ours = nil , kind = "tex" }, - { name = "preprocessor" , ours = nil , kind = "ignored" }, - { name = "preprocessor" , ours = nil , kind = "tex" }, - { name = "preprocessor-test" , ours = nil , kind = "tex" }, - { name = "swiglib" , ours = nil , kind = "ignored" }, - { name = "swiglib" , ours = nil , kind = "tex" }, - { name = "swiglib-test" , ours = nil , kind = "ignored" }, - { name = "swiglib-test" , ours = nil , kind = "tex" }, - { name = "test" , ours = nil , kind = "tex" }, + { name = "basics-gen" , ours = nil , kind = kind_essential }, + { name = "basics-nod" , ours = nil , kind = kind_merged }, + { name = "basics" , ours = nil , kind = kind_tex }, + { name = "fonts-cbk" , ours = nil , kind = kind_merged }, + { name = "fonts-def" , ours = nil , kind = kind_merged }, + { name = "fonts-demo-vf-1" , ours = nil , kind = kind_ignored }, + { name = "fonts-enc" , ours = nil , kind = kind_merged }, + { name = "fonts-ext" , ours = nil , kind = kind_merged }, + { name = "fonts-inj" , ours = nil , kind = kind_merged }, + { name = "fonts-lua" , ours = nil , kind = kind_merged }, + { name = "fonts-merged" , ours = "fontloader" , kind = kind_essential }, + { name = "fonts-ota" , ours = nil , kind = kind_merged }, + { name = "fonts-otn" , ours = nil , kind = kind_merged }, + { name = "fonts" , ours = nil , kind = kind_merged }, + { name = "fonts" , ours = nil , kind = kind_tex }, + { name = "fonts-syn" , ours = nil , kind = kind_ignored }, + { name = "fonts-tfm" , ours = nil , kind = kind_merged }, + { name = "languages" , ours = nil , kind = kind_ignored }, + { name = "languages" , ours = nil , kind = kind_tex }, + { name = "math" , ours = nil , kind = kind_ignored }, + { name = "math" , ours = nil , kind = kind_tex }, + { name = "mplib" , ours = nil , kind = kind_ignored }, + { name = "mplib" , ours = nil , kind = kind_tex }, + { name = "plain" , ours = nil , kind = kind_tex }, + { name = "preprocessor" , ours = nil , kind = kind_ignored }, + { name = "preprocessor" , ours = nil , kind = kind_tex }, + { name = "preprocessor-test" , ours = nil , kind = kind_tex }, + { name = "swiglib" , ours = nil , kind = kind_ignored }, + { name = "swiglib" , ours = nil , kind = kind_tex }, + { name = "swiglib-test" , ours = nil , kind = kind_ignored }, + { name = "swiglib-test" , ours = nil , kind = kind_tex }, + { name = "test" , ours = nil , kind = kind_tex }, }, --[[ [fontloader] ]] context = { --=> all merged - { name = "data-con" , ours = "data-con" , kind = "merged" }, - { name = "font-afk" , ours = "font-afk" , kind = "merged" }, - { name = "font-afm" , ours = "font-afm" , kind = "merged" }, - { name = "font-cid" , ours = "font-cid" , kind = "merged" }, - { name = "font-con" , ours = "font-con" , kind = "merged" }, - { name = "font-def" , ours = "font-def" , kind = "merged" }, - { name = "font-ini" , ours = "font-ini" , kind = "merged" }, - { name = "font-map" , ours = "font-map" , kind = "merged" }, - { name = "font-otb" , ours = "font-otb" , kind = "merged" }, - { name = "font-otf" , ours = "font-otf" , kind = "merged" }, - { name = "font-oti" , ours = "font-oti" , kind = "merged" }, - { name = "font-otp" , ours = "font-otp" , kind = "merged" }, - { name = "font-tfm" , ours = "font-tfm" , kind = "merged" }, - { name = "l-boolean" , ours = "l-boolean" , kind = "lualibs" }, - { name = "l-file" , ours = "l-file" , kind = "lualibs" }, - { name = "l-function" , ours = "l-function" , kind = "lualibs" }, - { name = "l-io" , ours = "l-io" , kind = "lualibs" }, - { name = "l-lpeg" , ours = "l-lpeg" , kind = "lualibs" }, - { name = "l-lua" , ours = "l-lua" , kind = "lualibs" }, - { name = "l-math" , ours = "l-math" , kind = "lualibs" }, - { name = "l-string" , ours = "l-string" , kind = "lualibs" }, - { name = "l-table" , ours = "l-table" , kind = "lualibs" }, - { name = "util-str" , ours = "util-str" , kind = "lualibs" }, + { name = "data-con" , ours = "data-con" , kind = kind_merged }, + { name = "font-afk" , ours = "font-afk" , kind = kind_merged }, + { name = "font-afm" , ours = "font-afm" , kind = kind_merged }, + { name = "font-cid" , ours = "font-cid" , kind = kind_merged }, + { name = "font-con" , ours = "font-con" , kind = kind_merged }, + { name = "font-def" , ours = "font-def" , kind = kind_merged }, + { name = "font-ini" , ours = "font-ini" , kind = kind_merged }, + { name = "font-map" , ours = "font-map" , kind = kind_merged }, + { name = "font-otb" , ours = "font-otb" , kind = kind_merged }, + { name = "font-otf" , ours = "font-otf" , kind = kind_merged }, + { name = "font-oti" , ours = "font-oti" , kind = kind_merged }, + { name = "font-otp" , ours = "font-otp" , kind = kind_merged }, + { name = "font-tfm" , ours = "font-tfm" , kind = kind_merged }, + { name = "l-boolean" , ours = "l-boolean" , kind = kind_lualibs }, + { name = "l-file" , ours = "l-file" , kind = kind_lualibs }, + { name = "l-function" , ours = "l-function" , kind = kind_lualibs }, + { name = "l-io" , ours = "l-io" , kind = kind_lualibs }, + { name = "l-lpeg" , ours = "l-lpeg" , kind = kind_lualibs }, + { name = "l-lua" , ours = "l-lua" , kind = kind_lualibs }, + { name = "l-math" , ours = "l-math" , kind = kind_lualibs }, + { name = "l-string" , ours = "l-string" , kind = kind_lualibs }, + { name = "l-table" , ours = "l-table" , kind = kind_lualibs }, + { name = "util-str" , ours = "util-str" , kind = kind_lualibs }, }, --[[ [context] ]] } --[[ [imports] ]] @@ -203,11 +209,11 @@ end local derive_fullname = function (cat, name, kind) local tmp = prefixes[cat] tmp = tmp and tmp .. "-" .. name or name - return tmp .. (kind == "tex" and ".tex" or ".lua") + return tmp .. (kind == kind_tex and ".tex" or ".lua") end local derive_ourname = function (name, kind) - local suffix = kind == "tex" and ".tex" or ".lua" + local suffix = kind == kind_tex and ".tex" or ".lua" return our_prefix .. "-" .. name .. suffix end -- cgit v1.2.3 From c6d79f40c1fd9ba9401b0f4edc688f29144048df Mon Sep 17 00:00:00 2001 From: Philipp Gesang Date: Tue, 9 Dec 2014 22:54:52 +0100 Subject: [import] classify essential and non-essential files into appropriate subdirectories Simplifies writing make rules ;) --- scripts/mkimport | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/scripts/mkimport b/scripts/mkimport index 1ee695b..f211c2f 100644 --- a/scripts/mkimport +++ b/scripts/mkimport @@ -214,7 +214,8 @@ end local derive_ourname = function (name, kind) local suffix = kind == kind_tex and ".tex" or ".lua" - return our_prefix .. "-" .. name .. suffix + local subdir = kind == kind_essential and "runtime" or "misc" + return subdir, our_prefix .. "-" .. name .. suffix end local is_readable = function (f) @@ -267,8 +268,8 @@ local news = function () local kind = def.kind local fullname = derive_fullname (cat, name, kind) local fullpath = file.join (location, fullname) - local ourname = derive_ourname (ours or name) - local ourpath = file.join (fontloader_subdir, ourname) -- relative + local subdir, ourname = derive_ourname (ours or name) + local ourpath = file.join (fontloader_subdir, subdir, ourname) -- relative local imported = false if not is_readable (fullpath) then @@ -319,7 +320,8 @@ local get_file_definition = function (name, ourname, kind) local dkind = def.kind --- test properties - if derive_ourname (dours, dkind) == ourname then return def, cat end + local subdir, derived = derive_ourname (dours, dkind) + if derived == ourname then return def, cat end if derive_fullname (cat, dname, dkind) == fullname then return def, cat end if dours == ourname then return def, cat end if dname == fullname then return def, cat end @@ -370,14 +372,19 @@ local import_file = function (name, kind, def, cat) local dkind = def.kind local srcdir = derive_category_path (cat) local fullname = derive_fullname (cat, dname, kind) - local ourname = derive_ourname (dname, kind) + local subdir, ourname = derive_ourname (dname, kind) + local ourpath = file.join (fontloader_subdir, subdir) local src = file.join (srcdir, fullname) - local dst = file.join (fontloader_subdir, ourname) + local dst = file.join (ourpath, ourname) local new = not lfs.isfile (dst) if not new and hash_file (src) == hash_file (dst) then status ("file %s is unchanged, skipping", fullname) return import_skipped end + if not (lfs.isdir (ourpath) or not lfs.mkdirs (ourpath)) then + die ("failed to create directory %s for file %s", + ourpath, ourname) + end status ("importing file %s", fullname) file.copy (src, dst) if hash_file (src) == hash_file (dst) then -- cgit v1.2.3 From 3247d7476f65bcc7164764fc373c171c67e813e7 Mon Sep 17 00:00:00 2001 From: Philipp Gesang Date: Tue, 9 Dec 2014 22:56:29 +0100 Subject: [build] update import path in makefile As a consequence, imported files not needed at runtime will no longer be packaged. --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index b3fce99..ab5c464 100644 --- a/Makefile +++ b/Makefile @@ -5,7 +5,7 @@ NAME = luaotfload DOCSRCDIR = ./doc SCRIPTSRCDIR = ./scripts SRCSRCDIR = ./src -FONTLOADERDIR = $(SRCSRCDIR)/fontloader +FONTLOADERDIR = $(SRCSRCDIR)/fontloader/runtime BUILDDIR = ./build MISCDIR = ./misc -- cgit v1.2.3 From 0c443eae5ad75d999e0c41d42d5ab3530024a611 Mon Sep 17 00:00:00 2001 From: Philipp Gesang Date: Tue, 9 Dec 2014 22:57:46 +0100 Subject: [import] fix error message --- scripts/mkimport | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/mkimport b/scripts/mkimport index f211c2f..8622d5e 100644 --- a/scripts/mkimport +++ b/scripts/mkimport @@ -433,7 +433,7 @@ local job_kind = table.mirrored { --- job_kind -> bool local check_job = function (j) - return job_kind[j] or die ("invalid job type “" .. job .. "”.") + return job_kind[j] or die ("invalid job type “%s”.", j) end ------------------------------------------------------------------------------- -- cgit v1.2.3 From ff9c47695c00c96ade844f4b2644fca5e1f96be1 Mon Sep 17 00:00:00 2001 From: Philipp Gesang Date: Tue, 9 Dec 2014 23:03:24 +0100 Subject: [import] fix destination path building in import procedure MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit D’oh! --- scripts/mkimport | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/mkimport b/scripts/mkimport index 8622d5e..fe9f4b4 100644 --- a/scripts/mkimport +++ b/scripts/mkimport @@ -268,7 +268,7 @@ local news = function () local kind = def.kind local fullname = derive_fullname (cat, name, kind) local fullpath = file.join (location, fullname) - local subdir, ourname = derive_ourname (ours or name) + local subdir, ourname = derive_ourname (ours or name, kind) local ourpath = file.join (fontloader_subdir, subdir, ourname) -- relative local imported = false @@ -368,11 +368,11 @@ local import_file = function (name, kind, def, cat) if not cat then die ("missing category for file " .. name .. " -- WTF‽") end local dname = def.name - local dours = def.ours + local dours = def.ours or dname local dkind = def.kind local srcdir = derive_category_path (cat) local fullname = derive_fullname (cat, dname, kind) - local subdir, ourname = derive_ourname (dname, kind) + local subdir, ourname = derive_ourname (dours, kind) local ourpath = file.join (fontloader_subdir, subdir) local src = file.join (srcdir, fullname) local dst = file.join (ourpath, ourname) -- cgit v1.2.3 From c8808efcf8bd69b6038627f92298818d9028c088 Mon Sep 17 00:00:00 2001 From: Philipp Gesang Date: Tue, 9 Dec 2014 23:07:13 +0100 Subject: [import] unify appearance --- scripts/mkimport | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/scripts/mkimport b/scripts/mkimport index fe9f4b4..1b4dc39 100644 --- a/scripts/mkimport +++ b/scripts/mkimport @@ -70,6 +70,12 @@ local msg = function (...) iowrite "\n" end +local separator_string = string.rep ("-", 79) +local separator = function () + iowrite (separator_string) + iowrite "\n" +end + local good_tag = stringformat("[\x1b[1;30;%dmgood\x1b[0m] · ", 42) local bad_tag = stringformat("[\x1b[1;30;%dmBAD\x1b[0m] · ", 41) local alert_tag = stringformat("[\x1b[1;%dmalert\x1b[0m] · " , 36) @@ -233,14 +239,14 @@ local summarize_news = function (status) local ng = #status.good local nm = #status.missing - msg "-----------------------------------------------------------------" + separator () msg ("Summary: Inspected %d files.", ni + nc + ng + nm) - msg "-----------------------------------------------------------------" + separator () if ng > 0 then good ("%d are up to date", ng) end if ni > 0 then attention ("%d changed" , ni) end if nc > 0 then attention ("%d new" , nc) end if nm > 0 then bad ("%d missing" , nm) end - msg "-----------------------------------------------------------------" + separator () if nm == 0 and nc == 0 and ni == 0 then return 0 @@ -349,13 +355,13 @@ local summarize_status = function (counters) local failed = counters[import_failed ] or 0 local sum = imported + skipped + created + failed if sum < 1 then die ("garbage total of imported files: %s", sum) end - status ("-----------------------------------------------------------------") + separator () status (" RESULT: %d files processed", sum) - status ("-----------------------------------------------------------------") + separator () if created > 0 then status ("created: %d (%d %%)", created , created * 100 / sum) end if imported > 0 then status ("imported: %d (%d %%)", imported, imported * 100 / sum) end if skipped > 0 then status ("skipped: %d (%d %%)", skipped , skipped * 100 / sum) end - status ("-----------------------------------------------------------------") + separator () end local import_file = function (name, kind, def, cat) -- cgit v1.2.3 From c076033eae06ee3f30c02bfa4d529001379eff6c Mon Sep 17 00:00:00 2001 From: Philipp Gesang Date: Tue, 9 Dec 2014 23:07:55 +0100 Subject: [fontloader] reorganize under the new import scheme MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit For better orientation, the fontloader tree now contains two subdirectories into which files have been reorganized: The two files required at runtime for the fontloader and luaotfload-tool are: × ``fontloader-basics-gen.lua`` × ``fontloader-fontloader.lua`` They are now kept in the ``src/fontloader/runtime/`` subdirectory. All other files from upstream are now located in ``src/fontloader/misc``. This includes a number of files that have not yet been part of Luaotfload. Currently, the *misc* set of files is not packaged along with Luaotfload. This may change in the future when there is an option to switch the merged fontloader for its constituent files, or even for upstream. --- src/fontloader/fontloader-basics-gen.lua | 368 - src/fontloader/fontloader-basics-nod.lua | 178 - src/fontloader/fontloader-fontloader.lua | 14628 ------------------- src/fontloader/fontloader-fonts-cbk.lua | 68 - src/fontloader/fontloader-fonts-def.lua | 97 - src/fontloader/fontloader-fonts-enc.lua | 28 - src/fontloader/fontloader-fonts-ext.lua | 272 - src/fontloader/fontloader-fonts-inj.lua | 526 - src/fontloader/fontloader-fonts-lua.lua | 33 - src/fontloader/fontloader-fonts-otn.lua | 2848 ---- src/fontloader/fontloader-fonts-tfm.lua | 38 - src/fontloader/misc/fontloader-basics-nod.lua | 244 + src/fontloader/misc/fontloader-basics.tex | 23 + src/fontloader/misc/fontloader-data-con.lua | 138 + src/fontloader/misc/fontloader-font-afk.lua | 200 + src/fontloader/misc/fontloader-font-afm.lua | 1042 ++ src/fontloader/misc/fontloader-font-cid.lua | 177 + src/fontloader/misc/fontloader-font-con.lua | 1404 ++ src/fontloader/misc/fontloader-font-def.lua | 452 + src/fontloader/misc/fontloader-font-ini.lua | 32 + src/fontloader/misc/fontloader-font-map.lua | 533 + src/fontloader/misc/fontloader-font-otb.lua | 707 + src/fontloader/misc/fontloader-font-otf.lua | 2592 ++++ src/fontloader/misc/fontloader-font-oti.lua | 91 + src/fontloader/misc/fontloader-font-otp.lua | 906 ++ src/fontloader/misc/fontloader-font-tfm.lua | 158 + src/fontloader/misc/fontloader-fonts-cbk.lua | 95 + src/fontloader/misc/fontloader-fonts-def.lua | 97 + src/fontloader/misc/fontloader-fonts-demo-vf-1.lua | 38 + src/fontloader/misc/fontloader-fonts-enc.lua | 29 + src/fontloader/misc/fontloader-fonts-ext.lua | 272 + src/fontloader/misc/fontloader-fonts-inj.lua | 603 + src/fontloader/misc/fontloader-fonts-lua.lua | 33 + src/fontloader/misc/fontloader-fonts-ota.lua | 459 + src/fontloader/misc/fontloader-fonts-otn.lua | 2888 ++++ src/fontloader/misc/fontloader-fonts-syn.lua | 106 + src/fontloader/misc/fontloader-fonts-tfm.lua | 38 + src/fontloader/misc/fontloader-fonts.lua | 275 + src/fontloader/misc/fontloader-fonts.tex | 140 + src/fontloader/misc/fontloader-l-boolean.lua | 69 + src/fontloader/misc/fontloader-l-file.lua | 691 + src/fontloader/misc/fontloader-l-function.lua | 11 + src/fontloader/misc/fontloader-l-io.lua | 363 + src/fontloader/misc/fontloader-l-lpeg.lua | 1107 ++ src/fontloader/misc/fontloader-l-lua.lua | 167 + src/fontloader/misc/fontloader-l-math.lua | 34 + src/fontloader/misc/fontloader-l-string.lua | 212 + src/fontloader/misc/fontloader-l-table.lua | 1173 ++ src/fontloader/misc/fontloader-languages.lua | 45 + src/fontloader/misc/fontloader-languages.tex | 17 + src/fontloader/misc/fontloader-math.lua | 53 + src/fontloader/misc/fontloader-math.tex | 1874 +++ src/fontloader/misc/fontloader-mplib.lua | 491 + src/fontloader/misc/fontloader-mplib.tex | 124 + src/fontloader/misc/fontloader-plain.tex | 27 + .../misc/fontloader-preprocessor-test.tex | 30 + src/fontloader/misc/fontloader-preprocessor.lua | 163 + src/fontloader/misc/fontloader-preprocessor.tex | 14 + src/fontloader/misc/fontloader-swiglib-test.lua | 25 + src/fontloader/misc/fontloader-swiglib-test.tex | 11 + src/fontloader/misc/fontloader-swiglib.lua | 62 + src/fontloader/misc/fontloader-swiglib.tex | 20 + src/fontloader/misc/fontloader-test.tex | 112 + src/fontloader/misc/fontloader-util-str.lua | 1117 ++ src/fontloader/runtime/fontloader-basics-gen.lua | 373 + src/fontloader/runtime/fontloader-fontloader.lua | 14628 +++++++++++++++++++ 66 files changed, 36785 insertions(+), 19084 deletions(-) delete mode 100644 src/fontloader/fontloader-basics-gen.lua delete mode 100644 src/fontloader/fontloader-basics-nod.lua delete mode 100644 src/fontloader/fontloader-fontloader.lua delete mode 100644 src/fontloader/fontloader-fonts-cbk.lua delete mode 100644 src/fontloader/fontloader-fonts-def.lua delete mode 100644 src/fontloader/fontloader-fonts-enc.lua delete mode 100644 src/fontloader/fontloader-fonts-ext.lua delete mode 100644 src/fontloader/fontloader-fonts-inj.lua delete mode 100644 src/fontloader/fontloader-fonts-lua.lua delete mode 100644 src/fontloader/fontloader-fonts-otn.lua delete mode 100644 src/fontloader/fontloader-fonts-tfm.lua create mode 100644 src/fontloader/misc/fontloader-basics-nod.lua create mode 100644 src/fontloader/misc/fontloader-basics.tex create mode 100644 src/fontloader/misc/fontloader-data-con.lua create mode 100644 src/fontloader/misc/fontloader-font-afk.lua create mode 100644 src/fontloader/misc/fontloader-font-afm.lua create mode 100644 src/fontloader/misc/fontloader-font-cid.lua create mode 100644 src/fontloader/misc/fontloader-font-con.lua create mode 100644 src/fontloader/misc/fontloader-font-def.lua create mode 100644 src/fontloader/misc/fontloader-font-ini.lua create mode 100644 src/fontloader/misc/fontloader-font-map.lua create mode 100644 src/fontloader/misc/fontloader-font-otb.lua create mode 100644 src/fontloader/misc/fontloader-font-otf.lua create mode 100644 src/fontloader/misc/fontloader-font-oti.lua create mode 100644 src/fontloader/misc/fontloader-font-otp.lua create mode 100644 src/fontloader/misc/fontloader-font-tfm.lua create mode 100644 src/fontloader/misc/fontloader-fonts-cbk.lua create mode 100644 src/fontloader/misc/fontloader-fonts-def.lua create mode 100644 src/fontloader/misc/fontloader-fonts-demo-vf-1.lua create mode 100644 src/fontloader/misc/fontloader-fonts-enc.lua create mode 100644 src/fontloader/misc/fontloader-fonts-ext.lua create mode 100644 src/fontloader/misc/fontloader-fonts-inj.lua create mode 100644 src/fontloader/misc/fontloader-fonts-lua.lua create mode 100644 src/fontloader/misc/fontloader-fonts-ota.lua create mode 100644 src/fontloader/misc/fontloader-fonts-otn.lua create mode 100644 src/fontloader/misc/fontloader-fonts-syn.lua create mode 100644 src/fontloader/misc/fontloader-fonts-tfm.lua create mode 100644 src/fontloader/misc/fontloader-fonts.lua create mode 100644 src/fontloader/misc/fontloader-fonts.tex create mode 100644 src/fontloader/misc/fontloader-l-boolean.lua create mode 100644 src/fontloader/misc/fontloader-l-file.lua create mode 100644 src/fontloader/misc/fontloader-l-function.lua create mode 100644 src/fontloader/misc/fontloader-l-io.lua create mode 100644 src/fontloader/misc/fontloader-l-lpeg.lua create mode 100644 src/fontloader/misc/fontloader-l-lua.lua create mode 100644 src/fontloader/misc/fontloader-l-math.lua create mode 100644 src/fontloader/misc/fontloader-l-string.lua create mode 100644 src/fontloader/misc/fontloader-l-table.lua create mode 100644 src/fontloader/misc/fontloader-languages.lua create mode 100644 src/fontloader/misc/fontloader-languages.tex create mode 100644 src/fontloader/misc/fontloader-math.lua create mode 100644 src/fontloader/misc/fontloader-math.tex create mode 100644 src/fontloader/misc/fontloader-mplib.lua create mode 100644 src/fontloader/misc/fontloader-mplib.tex create mode 100644 src/fontloader/misc/fontloader-plain.tex create mode 100644 src/fontloader/misc/fontloader-preprocessor-test.tex create mode 100644 src/fontloader/misc/fontloader-preprocessor.lua create mode 100644 src/fontloader/misc/fontloader-preprocessor.tex create mode 100644 src/fontloader/misc/fontloader-swiglib-test.lua create mode 100644 src/fontloader/misc/fontloader-swiglib-test.tex create mode 100644 src/fontloader/misc/fontloader-swiglib.lua create mode 100644 src/fontloader/misc/fontloader-swiglib.tex create mode 100644 src/fontloader/misc/fontloader-test.tex create mode 100644 src/fontloader/misc/fontloader-util-str.lua create mode 100644 src/fontloader/runtime/fontloader-basics-gen.lua create mode 100644 src/fontloader/runtime/fontloader-fontloader.lua diff --git a/src/fontloader/fontloader-basics-gen.lua b/src/fontloader/fontloader-basics-gen.lua deleted file mode 100644 index c19a49a..0000000 --- a/src/fontloader/fontloader-basics-gen.lua +++ /dev/null @@ -1,368 +0,0 @@ -if not modules then modules = { } end modules ['luat-basics-gen'] = { - version = 1.100, - comment = "companion to luatex-*.tex", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end - -local dummyfunction = function() -end - -local dummyreporter = function(c) - return function(...) - (texio.reporter or texio.write_nl)(c .. " : " .. string.formatters(...)) - end -end - -statistics = { - register = dummyfunction, - starttiming = dummyfunction, - stoptiming = dummyfunction, - elapsedtime = nil, -} - -directives = { - register = dummyfunction, - enable = dummyfunction, - disable = dummyfunction, -} - -trackers = { - register = dummyfunction, - enable = dummyfunction, - disable = dummyfunction, -} - -experiments = { - register = dummyfunction, - enable = dummyfunction, - disable = dummyfunction, -} - -storage = { -- probably no longer needed - register = dummyfunction, - shared = { }, -} - -logs = { - new = dummyreporter, - reporter = dummyreporter, - messenger = dummyreporter, - report = dummyfunction, -} - -callbacks = { - register = function(n,f) return callback.register(n,f) end, - -} - -utilities = { - storage = { - allocate = function(t) return t or { } end, - mark = function(t) return t or { } end, - }, -} - -characters = characters or { - data = { } -} - --- we need to cheat a bit here - -texconfig.kpse_init = true - -resolvers = resolvers or { } -- no fancy file helpers used - -local remapper = { - otf = "opentype fonts", - ttf = "truetype fonts", - ttc = "truetype fonts", - dfont = "truetype fonts", -- "truetype dictionary", - cid = "cid maps", - cidmap = "cid maps", - fea = "font feature files", - pfa = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this! - pfb = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this! - afm = "afm", -} - -function resolvers.findfile(name,fileformat) - name = string.gsub(name,"\\","/") - if not fileformat or fileformat == "" then - fileformat = file.suffix(name) - if fileformat == "" then - fileformat = "tex" - end - end - fileformat = string.lower(fileformat) - fileformat = remapper[fileformat] or fileformat - local found = kpse.find_file(name,fileformat) - if not found or found == "" then - found = kpse.find_file(name,"other text files") - end - return found -end - --- function resolvers.findbinfile(name,fileformat) --- if not fileformat or fileformat == "" then --- fileformat = file.suffix(name) --- end --- return resolvers.findfile(name,(fileformat and remapper[fileformat]) or fileformat) --- end - -resolvers.findbinfile = resolvers.findfile - -function resolvers.loadbinfile(filename,filetype) - local data = io.loaddata(filename) - return true, data, #data -end - -function resolvers.resolve(s) - return s -end - -function resolvers.unresolve(s) - return s -end - --- Caches ... I will make a real stupid version some day when I'm in the --- mood. After all, the generic code does not need the more advanced --- ConTeXt features. Cached data is not shared between ConTeXt and other --- usage as I don't want any dependency at all. Also, ConTeXt might have --- different needs and tricks added. - ---~ containers.usecache = true - -caches = { } - -local writable = nil -local readables = { } -local usingjit = jit - -if not caches.namespace or caches.namespace == "" or caches.namespace == "context" then - caches.namespace = 'generic' -end - -do - - -- standard context tree setup - - local cachepaths = kpse.expand_var('$TEXMFCACHE') or "" - - -- quite like tex live or so (the weird $TEXMFCACHE test seems to be needed on miktex) - - if cachepaths == "" or cachepaths == "$TEXMFCACHE" then - cachepaths = kpse.expand_var('$TEXMFVAR') or "" - end - - -- this also happened to be used (the weird $TEXMFVAR test seems to be needed on miktex) - - if cachepaths == "" or cachepaths == "$TEXMFVAR" then - cachepaths = kpse.expand_var('$VARTEXMF') or "" - end - - -- and this is a last resort (hm, we could use TEMP or TEMPDIR) - - if cachepaths == "" then - local fallbacks = { "TMPDIR", "TEMPDIR", "TMP", "TEMP", "HOME", "HOMEPATH" } - for i=1,#fallbacks do - cachepaths = os.getenv(fallbacks[i]) or "" - if cachepath ~= "" and lfs.isdir(cachepath) then - break - end - end - end - - if cachepaths == "" then - cachepaths = "." - end - - cachepaths = string.split(cachepaths,os.type == "windows" and ";" or ":") - - for i=1,#cachepaths do - local cachepath = cachepaths[i] - if not lfs.isdir(cachepath) then - lfs.mkdirs(cachepath) -- needed for texlive and latex - if lfs.isdir(cachepath) then - texio.write(string.format("(created cache path: %s)",cachepath)) - end - end - if file.is_writable(cachepath) then - writable = file.join(cachepath,"luatex-cache") - lfs.mkdir(writable) - writable = file.join(writable,caches.namespace) - lfs.mkdir(writable) - break - end - end - - for i=1,#cachepaths do - if file.is_readable(cachepaths[i]) then - readables[#readables+1] = file.join(cachepaths[i],"luatex-cache",caches.namespace) - end - end - - if not writable then - texio.write_nl("quiting: fix your writable cache path") - os.exit() - elseif #readables == 0 then - texio.write_nl("quiting: fix your readable cache path") - os.exit() - elseif #readables == 1 and readables[1] == writable then - texio.write(string.format("(using cache: %s)",writable)) - else - texio.write(string.format("(using write cache: %s)",writable)) - texio.write(string.format("(using read cache: %s)",table.concat(readables, " "))) - end - -end - -function caches.getwritablepath(category,subcategory) - local path = file.join(writable,category) - lfs.mkdir(path) - path = file.join(path,subcategory) - lfs.mkdir(path) - return path -end - -function caches.getreadablepaths(category,subcategory) - local t = { } - for i=1,#readables do - t[i] = file.join(readables[i],category,subcategory) - end - return t -end - -local function makefullname(path,name) - if path and path ~= "" then - return file.addsuffix(file.join(path,name),"lua"), file.addsuffix(file.join(path,name),usingjit and "lub" or "luc") - end -end - -function caches.is_writable(path,name) - local fullname = makefullname(path,name) - return fullname and file.is_writable(fullname) -end - -function caches.loaddata(paths,name) - for i=1,#paths do - local data = false - local luaname, lucname = makefullname(paths[i],name) - if lucname and not lfs.isfile(lucname) and type(caches.compile) == "function" then - -- in case we used luatex and luajittex mixed ... lub or luc file - texio.write(string.format("(compiling luc: %s)",lucname)) - data = loadfile(luaname) - if data then - data = data() - end - if data then - caches.compile(data,luaname,lucname) - return data - end - end - if lucname and lfs.isfile(lucname) then -- maybe also check for size - texio.write(string.format("(load luc: %s)",lucname)) - data = loadfile(lucname) - if data then - data = data() - end - if data then - return data - else - texio.write(string.format("(loading failed: %s)",lucname)) - end - end - if luaname and lfs.isfile(luaname) then - texio.write(string.format("(load lua: %s)",luaname)) - data = loadfile(luaname) - if data then - data = data() - end - if data then - return data - end - end - end -end - -function caches.savedata(path,name,data) - local luaname, lucname = makefullname(path,name) - if luaname then - texio.write(string.format("(save: %s)",luaname)) - table.tofile(luaname,data,true) - if lucname and type(caches.compile) == "function" then - os.remove(lucname) -- better be safe - texio.write(string.format("(save: %s)",lucname)) - caches.compile(data,luaname,lucname) - end - end -end - --- According to KH os.execute is not permitted in plain/latex so there is --- no reason to use the normal context way. So the method here is slightly --- different from the one we have in context. We also use different suffixes --- as we don't want any clashes (sharing cache files is not that handy as --- context moves on faster.) --- --- Beware: serialization might fail on large files (so maybe we should pcall --- this) in which case one should limit the method to luac and enable support --- for execution. - --- function caches.compile(data,luaname,lucname) --- local d = io.loaddata(luaname) --- if not d or d == "" then --- d = table.serialize(data,true) -- slow --- end --- if d and d ~= "" then --- local f = io.open(lucname,'w') --- if f then --- local s = loadstring(d) --- if s then --- f:write(string.dump(s,true)) --- end --- f:close() --- end --- end --- end - -function caches.compile(data,luaname,lucname) - local d = io.loaddata(luaname) - if not d or d == "" then - d = table.serialize(data,true) -- slow - end - if d and d ~= "" then - local f = io.open(lucname,'wb') - if f then - local s = loadstring(d) - if s then - f:write(string.dump(s,true)) - end - f:close() - end - end -end - --- - -function table.setmetatableindex(t,f) - setmetatable(t,{ __index = f }) -end - --- helper for plain: - -arguments = { } - -if arg then - for i=1,#arg do - local k, v = string.match(arg[i],"^%-%-([^=]+)=?(.-)$") - if k and v then - arguments[k] = v - end - end -end diff --git a/src/fontloader/fontloader-basics-nod.lua b/src/fontloader/fontloader-basics-nod.lua deleted file mode 100644 index 373dab5..0000000 --- a/src/fontloader/fontloader-basics-nod.lua +++ /dev/null @@ -1,178 +0,0 @@ -if not modules then modules = { } end modules ['luatex-fonts-nod'] = { - version = 1.001, - comment = "companion to luatex-fonts.lua", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end - --- Don't depend on code here as it is only needed to complement the --- font handler code. - --- Attributes: - -if tex.attribute[0] ~= 0 then - - texio.write_nl("log","!") - texio.write_nl("log","! Attribute 0 is reserved for ConTeXt's font feature management and has to be") - texio.write_nl("log","! set to zero. Also, some attributes in the range 1-255 are used for special") - texio.write_nl("log","! purposes so setting them at the TeX end might break the font handler.") - texio.write_nl("log","!") - - tex.attribute[0] = 0 -- else no features - -end - -attributes = attributes or { } -attributes.unsetvalue = -0x7FFFFFFF - -local numbers, last = { }, 127 - -attributes.private = attributes.private or function(name) - local number = numbers[name] - if not number then - if last < 255 then - last = last + 1 - end - number = last - numbers[name] = number - end - return number -end - --- Nodes: - -nodes = { } -nodes.pool = { } -nodes.handlers = { } - -local nodecodes = { } for k,v in next, node.types () do nodecodes[string.gsub(v,"_","")] = k end -local whatcodes = { } for k,v in next, node.whatsits() do whatcodes[string.gsub(v,"_","")] = k end -local glyphcodes = { [0] = "character", "glyph", "ligature", "ghost", "left", "right" } -local disccodes = { [0] = "discretionary", "explicit", "automatic", "regular", "first", "second" } - -nodes.nodecodes = nodecodes -nodes.whatcodes = whatcodes -nodes.whatsitcodes = whatcodes -nodes.glyphcodes = glyphcodes -nodes.disccodes = disccodes - -local free_node = node.free -local remove_node = node.remove -local new_node = node.new -local traverse_id = node.traverse_id - -nodes.handlers.protectglyphs = node.protect_glyphs -nodes.handlers.unprotectglyphs = node.unprotect_glyphs - -local math_code = nodecodes.math -local end_of_math = node.end_of_math - -function node.end_of_math(n) - if n.id == math_code and n.subtype == 1 then - return n - else - return end_of_math(n) - end -end - -function nodes.remove(head, current, free_too) - local t = current - head, current = remove_node(head,current) - if t then - if free_too then - free_node(t) - t = nil - else - t.next, t.prev = nil, nil - end - end - return head, current, t -end - -function nodes.delete(head,current) - return nodes.remove(head,current,true) -end - -function nodes.pool.kern(k) - local n = new_node("kern",1) - n.kern = k - return n -end - --- experimental - -local getfield = node.getfield or function(n,tag) return n[tag] end -local setfield = node.setfield or function(n,tag,value) n[tag] = value end - -nodes.getfield = getfield -nodes.setfield = setfield - -nodes.getattr = getfield -nodes.setattr = setfield - -if node.getid then nodes.getid = node.getid else function nodes.getid (n) return getfield(n,"id") end end -if node.getsubtype then nodes.getsubtype = node.getsubtype else function nodes.getsubtype(n) return getfield(n,"subtype") end end -if node.getnext then nodes.getnext = node.getnext else function nodes.getnext (n) return getfield(n,"next") end end -if node.getprev then nodes.getprev = node.getprev else function nodes.getprev (n) return getfield(n,"prev") end end -if node.getchar then nodes.getchar = node.getchar else function nodes.getchar (n) return getfield(n,"char") end end -if node.getfont then nodes.getfont = node.getfont else function nodes.getfont (n) return getfield(n,"font") end end -if node.getlist then nodes.getlist = node.getlist else function nodes.getlist (n) return getfield(n,"list") end end - -function nodes.tonut (n) return n end -function nodes.tonode(n) return n end - --- being lazy ... just copy a bunch ... not all needed in generic but we assume --- nodes to be kind of private anyway - -nodes.tostring = node.tostring or tostring -nodes.copy = node.copy -nodes.copy_list = node.copy_list -nodes.delete = node.delete -nodes.dimensions = node.dimensions -nodes.end_of_math = node.end_of_math -nodes.flush_list = node.flush_list -nodes.flush_node = node.flush_node -nodes.free = node.free -nodes.insert_after = node.insert_after -nodes.insert_before = node.insert_before -nodes.hpack = node.hpack -nodes.new = node.new -nodes.tail = node.tail -nodes.traverse = node.traverse -nodes.traverse_id = node.traverse_id -nodes.slide = node.slide -nodes.vpack = node.vpack - -nodes.first_glyph = node.first_glyph -nodes.first_character = node.first_character -nodes.has_glyph = node.has_glyph or node.first_glyph - -nodes.current_attr = node.current_attr -nodes.do_ligature_n = node.do_ligature_n -nodes.has_field = node.has_field -nodes.last_node = node.last_node -nodes.usedlist = node.usedlist -nodes.protrusion_skippable = node.protrusion_skippable -nodes.write = node.write - -nodes.has_attribute = node.has_attribute -nodes.set_attribute = node.set_attribute -nodes.unset_attribute = node.unset_attribute - -nodes.protect_glyphs = node.protect_glyphs -nodes.unprotect_glyphs = node.unprotect_glyphs -nodes.kerning = node.kerning -nodes.ligaturing = node.ligaturing -nodes.mlist_to_hlist = node.mlist_to_hlist - --- in generic code, at least for some time, we stay nodes, while in context --- we can go nuts (e.g. experimental); this split permits us us keep code --- used elsewhere stable but at the same time play around in context - -nodes.nuts = nodes diff --git a/src/fontloader/fontloader-fontloader.lua b/src/fontloader/fontloader-fontloader.lua deleted file mode 100644 index e9c6638..0000000 --- a/src/fontloader/fontloader-fontloader.lua +++ /dev/null @@ -1,14628 +0,0 @@ --- merged file : luatex-fonts-merged.lua --- parent file : luatex-fonts.lua --- merge date : 12/06/14 14:20:08 - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['l-lua']={ - version=1.001, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local major,minor=string.match(_VERSION,"^[^%d]+(%d+)%.(%d+).*$") -_MAJORVERSION=tonumber(major) or 5 -_MINORVERSION=tonumber(minor) or 1 -_LUAVERSION=_MAJORVERSION+_MINORVERSION/10 -if not lpeg then - lpeg=require("lpeg") -end -if loadstring then - local loadnormal=load - function load(first,...) - if type(first)=="string" then - return loadstring(first,...) - else - return loadnormal(first,...) - end - end -else - loadstring=load -end -if not ipairs then - local function iterate(a,i) - i=i+1 - local v=a[i] - if v~=nil then - return i,v - end - end - function ipairs(a) - return iterate,a,0 - end -end -if not pairs then - function pairs(t) - return next,t - end -end -if not table.unpack then - table.unpack=_G.unpack -elseif not unpack then - _G.unpack=table.unpack -end -if not package.loaders then - package.loaders=package.searchers -end -local print,select,tostring=print,select,tostring -local inspectors={} -function setinspector(inspector) - inspectors[#inspectors+1]=inspector -end -function inspect(...) - for s=1,select("#",...) do - local value=select(s,...) - local done=false - for i=1,#inspectors do - done=inspectors[i](value) - if done then - break - end - end - if not done then - print(tostring(value)) - end - end -end -local dummy=function() end -function optionalrequire(...) - local ok,result=xpcall(require,dummy,...) - if ok then - return result - end -end -if lua then - lua.mask=load([[τεχ = 1]]) and "utf" or "ascii" -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['l-lpeg']={ - version=1.001, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -lpeg=require("lpeg") -if not lpeg.print then function lpeg.print(...) print(lpeg.pcode(...)) end end -local type,next,tostring=type,next,tostring -local byte,char,gmatch,format=string.byte,string.char,string.gmatch,string.format -local floor=math.floor -local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt -local lpegtype,lpegmatch,lpegprint=lpeg.type,lpeg.match,lpeg.print -if setinspector then - setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end) -end -lpeg.patterns=lpeg.patterns or {} -local patterns=lpeg.patterns -local anything=P(1) -local endofstring=P(-1) -local alwaysmatched=P(true) -patterns.anything=anything -patterns.endofstring=endofstring -patterns.beginofstring=alwaysmatched -patterns.alwaysmatched=alwaysmatched -local sign=S('+-') -local zero=P('0') -local digit=R('09') -local octdigit=R("07") -local lowercase=R("az") -local uppercase=R("AZ") -local underscore=P("_") -local hexdigit=digit+lowercase+uppercase -local cr,lf,crlf=P("\r"),P("\n"),P("\r\n") -local newline=P("\r")*(P("\n")+P(true))+P("\n") -local escaped=P("\\")*anything -local squote=P("'") -local dquote=P('"') -local space=P(" ") -local period=P(".") -local comma=P(",") -local utfbom_32_be=P('\000\000\254\255') -local utfbom_32_le=P('\255\254\000\000') -local utfbom_16_be=P('\254\255') -local utfbom_16_le=P('\255\254') -local utfbom_8=P('\239\187\191') -local utfbom=utfbom_32_be+utfbom_32_le+utfbom_16_be+utfbom_16_le+utfbom_8 -local utftype=utfbom_32_be*Cc("utf-32-be")+utfbom_32_le*Cc("utf-32-le")+utfbom_16_be*Cc("utf-16-be")+utfbom_16_le*Cc("utf-16-le")+utfbom_8*Cc("utf-8")+alwaysmatched*Cc("utf-8") -local utfstricttype=utfbom_32_be*Cc("utf-32-be")+utfbom_32_le*Cc("utf-32-le")+utfbom_16_be*Cc("utf-16-be")+utfbom_16_le*Cc("utf-16-le")+utfbom_8*Cc("utf-8") -local utfoffset=utfbom_32_be*Cc(4)+utfbom_32_le*Cc(4)+utfbom_16_be*Cc(2)+utfbom_16_le*Cc(2)+utfbom_8*Cc(3)+Cc(0) -local utf8next=R("\128\191") -patterns.utfbom_32_be=utfbom_32_be -patterns.utfbom_32_le=utfbom_32_le -patterns.utfbom_16_be=utfbom_16_be -patterns.utfbom_16_le=utfbom_16_le -patterns.utfbom_8=utfbom_8 -patterns.utf_16_be_nl=P("\000\r\000\n")+P("\000\r")+P("\000\n") -patterns.utf_16_le_nl=P("\r\000\n\000")+P("\r\000")+P("\n\000") -patterns.utf_32_be_nl=P("\000\000\000\r\000\000\000\n")+P("\000\000\000\r")+P("\000\000\000\n") -patterns.utf_32_le_nl=P("\r\000\000\000\n\000\000\000")+P("\r\000\000\000")+P("\n\000\000\000") -patterns.utf8one=R("\000\127") -patterns.utf8two=R("\194\223")*utf8next -patterns.utf8three=R("\224\239")*utf8next*utf8next -patterns.utf8four=R("\240\244")*utf8next*utf8next*utf8next -patterns.utfbom=utfbom -patterns.utftype=utftype -patterns.utfstricttype=utfstricttype -patterns.utfoffset=utfoffset -local utf8char=patterns.utf8one+patterns.utf8two+patterns.utf8three+patterns.utf8four -local validutf8char=utf8char^0*endofstring*Cc(true)+Cc(false) -local utf8character=P(1)*R("\128\191")^0 -patterns.utf8=utf8char -patterns.utf8char=utf8char -patterns.utf8character=utf8character -patterns.validutf8=validutf8char -patterns.validutf8char=validutf8char -local eol=S("\n\r") -local spacer=S(" \t\f\v") -local whitespace=eol+spacer -local nonspacer=1-spacer -local nonwhitespace=1-whitespace -patterns.eol=eol -patterns.spacer=spacer -patterns.whitespace=whitespace -patterns.nonspacer=nonspacer -patterns.nonwhitespace=nonwhitespace -local stripper=spacer^0*C((spacer^0*nonspacer^1)^0) -local fullstripper=whitespace^0*C((whitespace^0*nonwhitespace^1)^0) -local collapser=Cs(spacer^0/""*nonspacer^0*((spacer^0/" "*nonspacer^1)^0)) -local b_collapser=Cs(whitespace^0/""*(nonwhitespace^1+whitespace^1/" ")^0) -local e_collapser=Cs((whitespace^1*P(-1)/""+nonwhitespace^1+whitespace^1/" ")^0) -local m_collapser=Cs((nonwhitespace^1+whitespace^1/" ")^0) -local b_stripper=Cs(spacer^0/""*(nonspacer^1+spacer^1/" ")^0) -local e_stripper=Cs((spacer^1*P(-1)/""+nonspacer^1+spacer^1/" ")^0) -local m_stripper=Cs((nonspacer^1+spacer^1/" ")^0) -patterns.stripper=stripper -patterns.fullstripper=fullstripper -patterns.collapser=collapser -patterns.b_collapser=b_collapser -patterns.m_collapser=m_collapser -patterns.e_collapser=e_collapser -patterns.b_stripper=b_stripper -patterns.m_stripper=m_stripper -patterns.e_stripper=e_stripper -patterns.lowercase=lowercase -patterns.uppercase=uppercase -patterns.letter=patterns.lowercase+patterns.uppercase -patterns.space=space -patterns.tab=P("\t") -patterns.spaceortab=patterns.space+patterns.tab -patterns.newline=newline -patterns.emptyline=newline^1 -patterns.equal=P("=") -patterns.comma=comma -patterns.commaspacer=comma*spacer^0 -patterns.period=period -patterns.colon=P(":") -patterns.semicolon=P(";") -patterns.underscore=underscore -patterns.escaped=escaped -patterns.squote=squote -patterns.dquote=dquote -patterns.nosquote=(escaped+(1-squote))^0 -patterns.nodquote=(escaped+(1-dquote))^0 -patterns.unsingle=(squote/"")*patterns.nosquote*(squote/"") -patterns.undouble=(dquote/"")*patterns.nodquote*(dquote/"") -patterns.unquoted=patterns.undouble+patterns.unsingle -patterns.unspacer=((patterns.spacer^1)/"")^0 -patterns.singlequoted=squote*patterns.nosquote*squote -patterns.doublequoted=dquote*patterns.nodquote*dquote -patterns.quoted=patterns.doublequoted+patterns.singlequoted -patterns.digit=digit -patterns.octdigit=octdigit -patterns.hexdigit=hexdigit -patterns.sign=sign -patterns.cardinal=digit^1 -patterns.integer=sign^-1*digit^1 -patterns.unsigned=digit^0*period*digit^1 -patterns.float=sign^-1*patterns.unsigned -patterns.cunsigned=digit^0*comma*digit^1 -patterns.cpunsigned=digit^0*(period+comma)*digit^1 -patterns.cfloat=sign^-1*patterns.cunsigned -patterns.cpfloat=sign^-1*patterns.cpunsigned -patterns.number=patterns.float+patterns.integer -patterns.cnumber=patterns.cfloat+patterns.integer -patterns.cpnumber=patterns.cpfloat+patterns.integer -patterns.oct=zero*octdigit^1 -patterns.octal=patterns.oct -patterns.HEX=zero*P("X")*(digit+uppercase)^1 -patterns.hex=zero*P("x")*(digit+lowercase)^1 -patterns.hexadecimal=zero*S("xX")*hexdigit^1 -patterns.hexafloat=sign^-1*zero*S("xX")*(hexdigit^0*period*hexdigit^1+hexdigit^1*period*hexdigit^0+hexdigit^1)*(S("pP")*sign^-1*hexdigit^1)^-1 -patterns.decafloat=sign^-1*(digit^0*period*digit^1+digit^1*period*digit^0+digit^1)*S("eE")*sign^-1*digit^1 -patterns.propername=(uppercase+lowercase+underscore)*(uppercase+lowercase+underscore+digit)^0*endofstring -patterns.somecontent=(anything-newline-space)^1 -patterns.beginline=#(1-newline) -patterns.longtostring=Cs(whitespace^0/""*((patterns.quoted+nonwhitespace^1+whitespace^1/""*(P(-1)+Cc(" ")))^0)) -local function anywhere(pattern) - return P { P(pattern)+1*V(1) } -end -lpeg.anywhere=anywhere -function lpeg.instringchecker(p) - p=anywhere(p) - return function(str) - return lpegmatch(p,str) and true or false - end -end -function lpeg.splitter(pattern,action) - return (((1-P(pattern))^1)/action+1)^0 -end -function lpeg.tsplitter(pattern,action) - return Ct((((1-P(pattern))^1)/action+1)^0) -end -local splitters_s,splitters_m,splitters_t={},{},{} -local function splitat(separator,single) - local splitter=(single and splitters_s[separator]) or splitters_m[separator] - if not splitter then - separator=P(separator) - local other=C((1-separator)^0) - if single then - local any=anything - splitter=other*(separator*C(any^0)+"") - splitters_s[separator]=splitter - else - splitter=other*(separator*other)^0 - splitters_m[separator]=splitter - end - end - return splitter -end -local function tsplitat(separator) - local splitter=splitters_t[separator] - if not splitter then - splitter=Ct(splitat(separator)) - splitters_t[separator]=splitter - end - return splitter -end -lpeg.splitat=splitat -lpeg.tsplitat=tsplitat -function string.splitup(str,separator) - if not separator then - separator="," - end - return lpegmatch(splitters_m[separator] or splitat(separator),str) -end -local cache={} -function lpeg.split(separator,str) - local c=cache[separator] - if not c then - c=tsplitat(separator) - cache[separator]=c - end - return lpegmatch(c,str) -end -function string.split(str,separator) - if separator then - local c=cache[separator] - if not c then - c=tsplitat(separator) - cache[separator]=c - end - return lpegmatch(c,str) - else - return { str } - end -end -local spacing=patterns.spacer^0*newline -local empty=spacing*Cc("") -local nonempty=Cs((1-spacing)^1)*spacing^-1 -local content=(empty+nonempty)^1 -patterns.textline=content -local linesplitter=tsplitat(newline) -patterns.linesplitter=linesplitter -function string.splitlines(str) - return lpegmatch(linesplitter,str) -end -local cache={} -function lpeg.checkedsplit(separator,str) - local c=cache[separator] - if not c then - separator=P(separator) - local other=C((1-separator)^1) - c=Ct(separator^0*other*(separator^1*other)^0) - cache[separator]=c - end - return lpegmatch(c,str) -end -function string.checkedsplit(str,separator) - local c=cache[separator] - if not c then - separator=P(separator) - local other=C((1-separator)^1) - c=Ct(separator^0*other*(separator^1*other)^0) - cache[separator]=c - end - return lpegmatch(c,str) -end -local function f2(s) local c1,c2=byte(s,1,2) return c1*64+c2-12416 end -local function f3(s) local c1,c2,c3=byte(s,1,3) return (c1*64+c2)*64+c3-925824 end -local function f4(s) local c1,c2,c3,c4=byte(s,1,4) return ((c1*64+c2)*64+c3)*64+c4-63447168 end -local utf8byte=patterns.utf8one/byte+patterns.utf8two/f2+patterns.utf8three/f3+patterns.utf8four/f4 -patterns.utf8byte=utf8byte -local cache={} -function lpeg.stripper(str) - if type(str)=="string" then - local s=cache[str] - if not s then - s=Cs(((S(str)^1)/""+1)^0) - cache[str]=s - end - return s - else - return Cs(((str^1)/""+1)^0) - end -end -local cache={} -function lpeg.keeper(str) - if type(str)=="string" then - local s=cache[str] - if not s then - s=Cs((((1-S(str))^1)/""+1)^0) - cache[str]=s - end - return s - else - return Cs((((1-str)^1)/""+1)^0) - end -end -function lpeg.frontstripper(str) - return (P(str)+P(true))*Cs(anything^0) -end -function lpeg.endstripper(str) - return Cs((1-P(str)*endofstring)^0) -end -function lpeg.replacer(one,two,makefunction,isutf) - local pattern - local u=isutf and utf8char or 1 - if type(one)=="table" then - local no=#one - local p=P(false) - if no==0 then - for k,v in next,one do - p=p+P(k)/v - end - pattern=Cs((p+u)^0) - elseif no==1 then - local o=one[1] - one,two=P(o[1]),o[2] - pattern=Cs((one/two+u)^0) - else - for i=1,no do - local o=one[i] - p=p+P(o[1])/o[2] - end - pattern=Cs((p+u)^0) - end - else - pattern=Cs((P(one)/(two or "")+u)^0) - end - if makefunction then - return function(str) - return lpegmatch(pattern,str) - end - else - return pattern - end -end -function lpeg.finder(lst,makefunction,isutf) - local pattern - if type(lst)=="table" then - pattern=P(false) - if #lst==0 then - for k,v in next,lst do - pattern=pattern+P(k) - end - else - for i=1,#lst do - pattern=pattern+P(lst[i]) - end - end - else - pattern=P(lst) - end - if isutf then - pattern=((utf8char or 1)-pattern)^0*pattern - else - pattern=(1-pattern)^0*pattern - end - if makefunction then - return function(str) - return lpegmatch(pattern,str) - end - else - return pattern - end -end -local splitters_f,splitters_s={},{} -function lpeg.firstofsplit(separator) - local splitter=splitters_f[separator] - if not splitter then - local pattern=P(separator) - splitter=C((1-pattern)^0) - splitters_f[separator]=splitter - end - return splitter -end -function lpeg.secondofsplit(separator) - local splitter=splitters_s[separator] - if not splitter then - local pattern=P(separator) - splitter=(1-pattern)^0*pattern*C(anything^0) - splitters_s[separator]=splitter - end - return splitter -end -local splitters_s,splitters_p={},{} -function lpeg.beforesuffix(separator) - local splitter=splitters_s[separator] - if not splitter then - local pattern=P(separator) - splitter=C((1-pattern)^0)*pattern*endofstring - splitters_s[separator]=splitter - end - return splitter -end -function lpeg.afterprefix(separator) - local splitter=splitters_p[separator] - if not splitter then - local pattern=P(separator) - splitter=pattern*C(anything^0) - splitters_p[separator]=splitter - end - return splitter -end -function lpeg.balancer(left,right) - left,right=P(left),P(right) - return P { left*((1-left-right)+V(1))^0*right } -end -local nany=utf8char/"" -function lpeg.counter(pattern) - pattern=Cs((P(pattern)/" "+nany)^0) - return function(str) - return #lpegmatch(pattern,str) - end -end -utf=utf or (unicode and unicode.utf8) or {} -local utfcharacters=utf and utf.characters or string.utfcharacters -local utfgmatch=utf and utf.gmatch -local utfchar=utf and utf.char -lpeg.UP=lpeg.P -if utfcharacters then - function lpeg.US(str) - local p=P(false) - for uc in utfcharacters(str) do - p=p+P(uc) - end - return p - end -elseif utfgmatch then - function lpeg.US(str) - local p=P(false) - for uc in utfgmatch(str,".") do - p=p+P(uc) - end - return p - end -else - function lpeg.US(str) - local p=P(false) - local f=function(uc) - p=p+P(uc) - end - lpegmatch((utf8char/f)^0,str) - return p - end -end -local range=utf8byte*utf8byte+Cc(false) -function lpeg.UR(str,more) - local first,last - if type(str)=="number" then - first=str - last=more or first - else - first,last=lpegmatch(range,str) - if not last then - return P(str) - end - end - if first==last then - return P(str) - elseif utfchar and (last-first<8) then - local p=P(false) - for i=first,last do - p=p+P(utfchar(i)) - end - return p - else - local f=function(b) - return b>=first and b<=last - end - return utf8byte/f - end -end -function lpeg.is_lpeg(p) - return p and lpegtype(p)=="pattern" -end -function lpeg.oneof(list,...) - if type(list)~="table" then - list={ list,... } - end - local p=P(list[1]) - for l=2,#list do - p=p+P(list[l]) - end - return p -end -local sort=table.sort -local function copyindexed(old) - local new={} - for i=1,#old do - new[i]=old - end - return new -end -local function sortedkeys(tab) - local keys,s={},0 - for key,_ in next,tab do - s=s+1 - keys[s]=key - end - sort(keys) - return keys -end -function lpeg.append(list,pp,delayed,checked) - local p=pp - if #list>0 then - local keys=copyindexed(list) - sort(keys) - for i=#keys,1,-1 do - local k=keys[i] - if p then - p=P(k)+p - else - p=P(k) - end - end - elseif delayed then - local keys=sortedkeys(list) - if p then - for i=1,#keys,1 do - local k=keys[i] - local v=list[k] - p=P(k)/list+p - end - else - for i=1,#keys do - local k=keys[i] - local v=list[k] - if p then - p=P(k)+p - else - p=P(k) - end - end - if p then - p=p/list - end - end - elseif checked then - local keys=sortedkeys(list) - for i=1,#keys do - local k=keys[i] - local v=list[k] - if p then - if k==v then - p=P(k)+p - else - p=P(k)/v+p - end - else - if k==v then - p=P(k) - else - p=P(k)/v - end - end - end - else - local keys=sortedkeys(list) - for i=1,#keys do - local k=keys[i] - local v=list[k] - if p then - p=P(k)/v+p - else - p=P(k)/v - end - end - end - return p -end -local function make(t,hash) - local p=P(false) - local keys=sortedkeys(t) - for i=1,#keys do - local k=keys[i] - local v=t[k] - local h=hash[v] - if h then - if next(v) then - p=p+P(k)*(make(v,hash)+P(true)) - else - p=p+P(k)*P(true) - end - else - if next(v) then - p=p+P(k)*make(v,hash) - else - p=p+P(k) - end - end - end - return p -end -function lpeg.utfchartabletopattern(list) - local tree={} - local hash={} - local n=#list - if n==0 then - for s in next,list do - local t=tree - for c in gmatch(s,".") do - local tc=t[c] - if not tc then - tc={} - t[c]=tc - end - t=tc - end - hash[t]=s - end - else - for i=1,n do - local t=tree - local s=list[i] - for c in gmatch(s,".") do - local tc=t[c] - if not tc then - tc={} - t[c]=tc - end - t=tc - end - hash[t]=s - end - end - return make(tree,hash) -end -patterns.containseol=lpeg.finder(eol) -local function nextstep(n,step,result) - local m=n%step - local d=floor(n/step) - if d>0 then - local v=V(tostring(step)) - local s=result.start - for i=1,d do - if s then - s=v*s - else - s=v - end - end - result.start=s - end - if step>1 and result.start then - local v=V(tostring(step/2)) - result[tostring(step)]=v*v - end - if step>0 then - return nextstep(m,step/2,result) - else - return result - end -end -function lpeg.times(pattern,n) - return P(nextstep(n,2^16,{ "start",["1"]=pattern })) -end -local trailingzeros=zero^0*-digit -local case_1=period*trailingzeros/"" -local case_2=period*(digit-trailingzeros)^1*(trailingzeros/"") -local number=digit^1*(case_1+case_2) -local stripper=Cs((number+1)^0) -lpeg.patterns.stripzeros=stripper -local byte_to_HEX={} -local byte_to_hex={} -local byte_to_dec={} -local hex_to_byte={} -for i=0,255 do - local H=format("%02X",i) - local h=format("%02x",i) - local d=format("%03i",i) - local c=char(i) - byte_to_HEX[c]=H - byte_to_hex[c]=h - byte_to_dec[c]=d - hex_to_byte[h]=c - hex_to_byte[H]=c -end -local hextobyte=P(2)/hex_to_byte -local bytetoHEX=P(1)/byte_to_HEX -local bytetohex=P(1)/byte_to_hex -local bytetodec=P(1)/byte_to_dec -local hextobytes=Cs(hextobyte^0) -local bytestoHEX=Cs(bytetoHEX^0) -local bytestohex=Cs(bytetohex^0) -local bytestodec=Cs(bytetodec^0) -patterns.hextobyte=hextobyte -patterns.bytetoHEX=bytetoHEX -patterns.bytetohex=bytetohex -patterns.bytetodec=bytetodec -patterns.hextobytes=hextobytes -patterns.bytestoHEX=bytestoHEX -patterns.bytestohex=bytestohex -patterns.bytestodec=bytestodec -function string.toHEX(s) - if not s or s=="" then - return s - else - return lpegmatch(bytestoHEX,s) - end -end -function string.tohex(s) - if not s or s=="" then - return s - else - return lpegmatch(bytestohex,s) - end -end -function string.todec(s) - if not s or s=="" then - return s - else - return lpegmatch(bytestodec,s) - end -end -function string.tobytes(s) - if not s or s=="" then - return s - else - return lpegmatch(hextobytes,s) - end -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['l-functions']={ - version=1.001, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -functions=functions or {} -function functions.dummy() end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['l-string']={ - version=1.001, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local string=string -local sub,gmatch,format,char,byte,rep,lower=string.sub,string.gmatch,string.format,string.char,string.byte,string.rep,string.lower -local lpegmatch,patterns=lpeg.match,lpeg.patterns -local P,S,C,Ct,Cc,Cs=lpeg.P,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cc,lpeg.Cs -local unquoted=patterns.squote*C(patterns.nosquote)*patterns.squote+patterns.dquote*C(patterns.nodquote)*patterns.dquote -function string.unquoted(str) - return lpegmatch(unquoted,str) or str -end -function string.quoted(str) - return format("%q",str) -end -function string.count(str,pattern) - local n=0 - for _ in gmatch(str,pattern) do - n=n+1 - end - return n -end -function string.limit(str,n,sentinel) - if #str>n then - sentinel=sentinel or "..." - return sub(str,1,(n-#sentinel))..sentinel - else - return str - end -end -local stripper=patterns.stripper -local fullstripper=patterns.fullstripper -local collapser=patterns.collapser -local longtostring=patterns.longtostring -function string.strip(str) - return lpegmatch(stripper,str) or "" -end -function string.fullstrip(str) - return lpegmatch(fullstripper,str) or "" -end -function string.collapsespaces(str) - return lpegmatch(collapser,str) or "" -end -function string.longtostring(str) - return lpegmatch(longtostring,str) or "" -end -local pattern=P(" ")^0*P(-1) -function string.is_empty(str) - if str=="" then - return true - else - return lpegmatch(pattern,str) and true or false - end -end -local anything=patterns.anything -local allescapes=Cc("%")*S(".-+%?()[]*") -local someescapes=Cc("%")*S(".-+%()[]") -local matchescapes=Cc(".")*S("*?") -local pattern_a=Cs ((allescapes+anything )^0 ) -local pattern_b=Cs ((someescapes+matchescapes+anything )^0 ) -local pattern_c=Cs (Cc("^")*(someescapes+matchescapes+anything )^0*Cc("$") ) -function string.escapedpattern(str,simple) - return lpegmatch(simple and pattern_b or pattern_a,str) -end -function string.topattern(str,lowercase,strict) - if str=="" or type(str)~="string" then - return ".*" - elseif strict then - str=lpegmatch(pattern_c,str) - else - str=lpegmatch(pattern_b,str) - end - if lowercase then - return lower(str) - else - return str - end -end -function string.valid(str,default) - return (type(str)=="string" and str~="" and str) or default or nil -end -string.itself=function(s) return s end -local pattern=Ct(C(1)^0) -function string.totable(str) - return lpegmatch(pattern,str) -end -local replacer=lpeg.replacer("@","%%") -function string.tformat(fmt,...) - return format(lpegmatch(replacer,fmt),...) -end -string.quote=string.quoted -string.unquote=string.unquoted - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['l-table']={ - version=1.001, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local type,next,tostring,tonumber,ipairs,select=type,next,tostring,tonumber,ipairs,select -local table,string=table,string -local concat,sort,insert,remove=table.concat,table.sort,table.insert,table.remove -local format,lower,dump=string.format,string.lower,string.dump -local getmetatable,setmetatable=getmetatable,setmetatable -local getinfo=debug.getinfo -local lpegmatch,patterns=lpeg.match,lpeg.patterns -local floor=math.floor -local stripper=patterns.stripper -function table.strip(tab) - local lst,l={},0 - for i=1,#tab do - local s=lpegmatch(stripper,tab[i]) or "" - if s=="" then - else - l=l+1 - lst[l]=s - end - end - return lst -end -function table.keys(t) - if t then - local keys,k={},0 - for key,_ in next,t do - k=k+1 - keys[k]=key - end - return keys - else - return {} - end -end -local function compare(a,b) - local ta,tb=type(a),type(b) - if ta==tb then - return a0 then - local n=0 - for _,v in next,t do - n=n+1 - end - if n==#t then - local tt,nt={},0 - for i=1,#t do - local v=t[i] - local tv=type(v) - if tv=="number" then - nt=nt+1 - if hexify then - tt[nt]=format("0x%X",v) - else - tt[nt]=tostring(v) - end - elseif tv=="string" then - nt=nt+1 - tt[nt]=format("%q",v) - elseif tv=="boolean" then - nt=nt+1 - tt[nt]=v and "true" or "false" - else - tt=nil - break - end - end - return tt - end - end - return nil -end -local propername=patterns.propername -local function dummy() end -local function do_serialize(root,name,depth,level,indexed) - if level>0 then - depth=depth.." " - if indexed then - handle(format("%s{",depth)) - else - local tn=type(name) - if tn=="number" then - if hexify then - handle(format("%s[0x%X]={",depth,name)) - else - handle(format("%s[%s]={",depth,name)) - end - elseif tn=="string" then - if noquotes and not reserved[name] and lpegmatch(propername,name) then - handle(format("%s%s={",depth,name)) - else - handle(format("%s[%q]={",depth,name)) - end - elseif tn=="boolean" then - handle(format("%s[%s]={",depth,name and "true" or "false")) - else - handle(format("%s{",depth)) - end - end - end - if root and next(root) then - local first,last=nil,0 - if compact then - last=#root - for k=1,last do - if root[k]==nil then - last=k-1 - break - end - end - if last>0 then - first=1 - end - end - local sk=sortedkeys(root) - for i=1,#sk do - local k=sk[i] - local v=root[k] - local tv,tk=type(v),type(k) - if compact and first and tk=="number" and k>=first and k<=last then - if tv=="number" then - if hexify then - handle(format("%s 0x%X,",depth,v)) - else - handle(format("%s %s,",depth,v)) - end - elseif tv=="string" then - if reduce and tonumber(v) then - handle(format("%s %s,",depth,v)) - else - handle(format("%s %q,",depth,v)) - end - elseif tv=="table" then - if not next(v) then - handle(format("%s {},",depth)) - elseif inline then - local st=simple_table(v) - if st then - handle(format("%s { %s },",depth,concat(st,", "))) - else - do_serialize(v,k,depth,level+1,true) - end - else - do_serialize(v,k,depth,level+1,true) - end - elseif tv=="boolean" then - handle(format("%s %s,",depth,v and "true" or "false")) - elseif tv=="function" then - if functions then - handle(format('%s load(%q),',depth,dump(v))) - else - handle(format('%s "function",',depth)) - end - else - handle(format("%s %q,",depth,tostring(v))) - end - elseif k=="__p__" then - if false then - handle(format("%s __p__=nil,",depth)) - end - elseif tv=="number" then - if tk=="number" then - if hexify then - handle(format("%s [0x%X]=0x%X,",depth,k,v)) - else - handle(format("%s [%s]=%s,",depth,k,v)) - end - elseif tk=="boolean" then - if hexify then - handle(format("%s [%s]=0x%X,",depth,k and "true" or "false",v)) - else - handle(format("%s [%s]=%s,",depth,k and "true" or "false",v)) - end - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - if hexify then - handle(format("%s %s=0x%X,",depth,k,v)) - else - handle(format("%s %s=%s,",depth,k,v)) - end - else - if hexify then - handle(format("%s [%q]=0x%X,",depth,k,v)) - else - handle(format("%s [%q]=%s,",depth,k,v)) - end - end - elseif tv=="string" then - if reduce and tonumber(v) then - if tk=="number" then - if hexify then - handle(format("%s [0x%X]=%s,",depth,k,v)) - else - handle(format("%s [%s]=%s,",depth,k,v)) - end - elseif tk=="boolean" then - handle(format("%s [%s]=%s,",depth,k and "true" or "false",v)) - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - handle(format("%s %s=%s,",depth,k,v)) - else - handle(format("%s [%q]=%s,",depth,k,v)) - end - else - if tk=="number" then - if hexify then - handle(format("%s [0x%X]=%q,",depth,k,v)) - else - handle(format("%s [%s]=%q,",depth,k,v)) - end - elseif tk=="boolean" then - handle(format("%s [%s]=%q,",depth,k and "true" or "false",v)) - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - handle(format("%s %s=%q,",depth,k,v)) - else - handle(format("%s [%q]=%q,",depth,k,v)) - end - end - elseif tv=="table" then - if not next(v) then - if tk=="number" then - if hexify then - handle(format("%s [0x%X]={},",depth,k)) - else - handle(format("%s [%s]={},",depth,k)) - end - elseif tk=="boolean" then - handle(format("%s [%s]={},",depth,k and "true" or "false")) - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - handle(format("%s %s={},",depth,k)) - else - handle(format("%s [%q]={},",depth,k)) - end - elseif inline then - local st=simple_table(v) - if st then - if tk=="number" then - if hexify then - handle(format("%s [0x%X]={ %s },",depth,k,concat(st,", "))) - else - handle(format("%s [%s]={ %s },",depth,k,concat(st,", "))) - end - elseif tk=="boolean" then - handle(format("%s [%s]={ %s },",depth,k and "true" or "false",concat(st,", "))) - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - handle(format("%s %s={ %s },",depth,k,concat(st,", "))) - else - handle(format("%s [%q]={ %s },",depth,k,concat(st,", "))) - end - else - do_serialize(v,k,depth,level+1) - end - else - do_serialize(v,k,depth,level+1) - end - elseif tv=="boolean" then - if tk=="number" then - if hexify then - handle(format("%s [0x%X]=%s,",depth,k,v and "true" or "false")) - else - handle(format("%s [%s]=%s,",depth,k,v and "true" or "false")) - end - elseif tk=="boolean" then - handle(format("%s [%s]=%s,",depth,tostring(k),v and "true" or "false")) - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - handle(format("%s %s=%s,",depth,k,v and "true" or "false")) - else - handle(format("%s [%q]=%s,",depth,k,v and "true" or "false")) - end - elseif tv=="function" then - if functions then - local f=getinfo(v).what=="C" and dump(dummy) or dump(v) - if tk=="number" then - if hexify then - handle(format("%s [0x%X]=load(%q),",depth,k,f)) - else - handle(format("%s [%s]=load(%q),",depth,k,f)) - end - elseif tk=="boolean" then - handle(format("%s [%s]=load(%q),",depth,k and "true" or "false",f)) - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - handle(format("%s %s=load(%q),",depth,k,f)) - else - handle(format("%s [%q]=load(%q),",depth,k,f)) - end - end - else - if tk=="number" then - if hexify then - handle(format("%s [0x%X]=%q,",depth,k,tostring(v))) - else - handle(format("%s [%s]=%q,",depth,k,tostring(v))) - end - elseif tk=="boolean" then - handle(format("%s [%s]=%q,",depth,k and "true" or "false",tostring(v))) - elseif noquotes and not reserved[k] and lpegmatch(propername,k) then - handle(format("%s %s=%q,",depth,k,tostring(v))) - else - handle(format("%s [%q]=%q,",depth,k,tostring(v))) - end - end - end - end - if level>0 then - handle(format("%s},",depth)) - end -end -local function serialize(_handle,root,name,specification) - local tname=type(name) - if type(specification)=="table" then - noquotes=specification.noquotes - hexify=specification.hexify - handle=_handle or specification.handle or print - reduce=specification.reduce or false - functions=specification.functions - compact=specification.compact - inline=specification.inline and compact - if functions==nil then - functions=true - end - if compact==nil then - compact=true - end - if inline==nil then - inline=compact - end - else - noquotes=false - hexify=false - handle=_handle or print - reduce=false - compact=true - inline=true - functions=true - end - if tname=="string" then - if name=="return" then - handle("return {") - else - handle(name.."={") - end - elseif tname=="number" then - if hexify then - handle(format("[0x%X]={",name)) - else - handle("["..name.."]={") - end - elseif tname=="boolean" then - if name then - handle("return {") - else - handle("{") - end - else - handle("t={") - end - if root then - if getmetatable(root) then - local dummy=root._w_h_a_t_e_v_e_r_ - root._w_h_a_t_e_v_e_r_=nil - end - if next(root) then - do_serialize(root,name,"",0) - end - end - handle("}") -end -function table.serialize(root,name,specification) - local t,n={},0 - local function flush(s) - n=n+1 - t[n]=s - end - serialize(flush,root,name,specification) - return concat(t,"\n") -end -table.tohandle=serialize -local maxtab=2*1024 -function table.tofile(filename,root,name,specification) - local f=io.open(filename,'w') - if f then - if maxtab>1 then - local t,n={},0 - local function flush(s) - n=n+1 - t[n]=s - if n>maxtab then - f:write(concat(t,"\n"),"\n") - t,n={},0 - end - end - serialize(flush,root,name,specification) - f:write(concat(t,"\n"),"\n") - else - local function flush(s) - f:write(s,"\n") - end - serialize(flush,root,name,specification) - end - f:close() - io.flush() - end -end -local function flattened(t,f,depth) - if f==nil then - f={} - depth=0xFFFF - elseif tonumber(f) then - depth=f - f={} - elseif not depth then - depth=0xFFFF - end - for k,v in next,t do - if type(k)~="number" then - if depth>0 and type(v)=="table" then - flattened(v,f,depth-1) - else - f[#f+1]=v - end - end - end - for k=1,#t do - local v=t[k] - if depth>0 and type(v)=="table" then - flattened(v,f,depth-1) - else - f[#f+1]=v - end - end - return f -end -table.flattened=flattened -local function unnest(t,f) - if not f then - f={} - end - for i=1,#t do - local v=t[i] - if type(v)=="table" then - if type(v[1])=="table" then - unnest(v,f) - else - f[#f+1]=v - end - else - f[#f+1]=v - end - end - return f -end -function table.unnest(t) - return unnest(t) -end -local function are_equal(a,b,n,m) - if a and b and #a==#b then - n=n or 1 - m=m or #a - for i=n,m do - local ai,bi=a[i],b[i] - if ai==bi then - elseif type(ai)=="table" and type(bi)=="table" then - if not are_equal(ai,bi) then - return false - end - else - return false - end - end - return true - else - return false - end -end -local function identical(a,b) - for ka,va in next,a do - local vb=b[ka] - if va==vb then - elseif type(va)=="table" and type(vb)=="table" then - if not identical(va,vb) then - return false - end - else - return false - end - end - return true -end -table.identical=identical -table.are_equal=are_equal -local function sparse(old,nest,keeptables) - local new={} - for k,v in next,old do - if not (v=="" or v==false) then - if nest and type(v)=="table" then - v=sparse(v,nest) - if keeptables or next(v) then - new[k]=v - end - else - new[k]=v - end - end - end - return new -end -table.sparse=sparse -function table.compact(t) - return sparse(t,true,true) -end -function table.contains(t,v) - if t then - for i=1,#t do - if t[i]==v then - return i - end - end - end - return false -end -function table.count(t) - local n=0 - for k,v in next,t do - n=n+1 - end - return n -end -function table.swapped(t,s) - local n={} - if s then - for k,v in next,s do - n[k]=v - end - end - for k,v in next,t do - n[v]=k - end - return n -end -function table.mirrored(t) - local n={} - for k,v in next,t do - n[v]=k - n[k]=v - end - return n -end -function table.reversed(t) - if t then - local tt,tn={},#t - if tn>0 then - local ttn=0 - for i=tn,1,-1 do - ttn=ttn+1 - tt[ttn]=t[i] - end - end - return tt - end -end -function table.reverse(t) - if t then - local n=#t - for i=1,floor(n/2) do - local j=n-i+1 - t[i],t[j]=t[j],t[i] - end - return t - end -end -function table.sequenced(t,sep,simple) - if not t then - return "" - end - local n=#t - local s={} - if n>0 then - for i=1,n do - s[i]=tostring(t[i]) - end - else - n=0 - for k,v in sortedhash(t) do - if simple then - if v==true then - n=n+1 - s[n]=k - elseif v and v~="" then - n=n+1 - s[n]=k.."="..tostring(v) - end - else - n=n+1 - s[n]=k.."="..tostring(v) - end - end - end - return concat(s,sep or " | ") -end -function table.print(t,...) - if type(t)~="table" then - print(tostring(t)) - else - serialize(print,t,...) - end -end -if setinspector then - setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end) -end -function table.sub(t,i,j) - return { unpack(t,i,j) } -end -function table.is_empty(t) - return not t or not next(t) -end -function table.has_one_entry(t) - return t and not next(t,next(t)) -end -function table.loweredkeys(t) - local l={} - for k,v in next,t do - l[lower(k)]=v - end - return l -end -function table.unique(old) - local hash={} - local new={} - local n=0 - for i=1,#old do - local oi=old[i] - if not hash[oi] then - n=n+1 - new[n]=oi - hash[oi]=true - end - end - return new -end -function table.sorted(t,...) - sort(t,...) - return t -end -function table.values(t,s) - if t then - local values,keys,v={},{},0 - for key,value in next,t do - if not keys[value] then - v=v+1 - values[v]=value - keys[k]=key - end - end - if s then - sort(values) - end - return values - else - return {} - end -end -function table.filtered(t,pattern,sort,cmp) - if t and type(pattern)=="string" then - if sort then - local s - if cmp then - s=sortedhashkeys(t,function(a,b) return cmp(t,a,b) end) - else - s=sortedkeys(t) - end - local n=0 - local m=#s - local function kv(s) - while n16*1024*1024 then - step=16*1024*1024 - else - step=floor(size/(1024*1024))*1024*1024/8 - end - local data={} - while true do - local r=f:read(step) - if not r then - return concat(data) - else - data[#data+1]=r - end - end - end -end -io.readall=readall -function io.loaddata(filename,textmode) - local f=io.open(filename,(textmode and 'r') or 'rb') - if f then - local data=readall(f) - f:close() - if #data>0 then - return data - end - end -end -function io.savedata(filename,data,joiner) - local f=io.open(filename,"wb") - if f then - if type(data)=="table" then - f:write(concat(data,joiner or "")) - elseif type(data)=="function" then - data(f) - else - f:write(data or "") - end - f:close() - io.flush() - return true - else - return false - end -end -function io.loadlines(filename,n) - local f=io.open(filename,'r') - if not f then - elseif n then - local lines={} - for i=1,n do - local line=f:read("*lines") - if line then - lines[#lines+1]=line - else - break - end - end - f:close() - lines=concat(lines,"\n") - if #lines>0 then - return lines - end - else - local line=f:read("*line") or "" - f:close() - if #line>0 then - return line - end - end -end -function io.loadchunk(filename,n) - local f=io.open(filename,'rb') - if f then - local data=f:read(n or 1024) - f:close() - if #data>0 then - return data - end - end -end -function io.exists(filename) - local f=io.open(filename) - if f==nil then - return false - else - f:close() - return true - end -end -function io.size(filename) - local f=io.open(filename) - if f==nil then - return 0 - else - local s=f:seek("end") - f:close() - return s - end -end -function io.noflines(f) - if type(f)=="string" then - local f=io.open(filename) - if f then - local n=f and io.noflines(f) or 0 - f:close() - return n - else - return 0 - end - else - local n=0 - for _ in f:lines() do - n=n+1 - end - f:seek('set',0) - return n - end -end -local nextchar={ - [ 4]=function(f) - return f:read(1,1,1,1) - end, - [ 2]=function(f) - return f:read(1,1) - end, - [ 1]=function(f) - return f:read(1) - end, - [-2]=function(f) - local a,b=f:read(1,1) - return b,a - end, - [-4]=function(f) - local a,b,c,d=f:read(1,1,1,1) - return d,c,b,a - end -} -function io.characters(f,n) - if f then - return nextchar[n or 1],f - end -end -local nextbyte={ - [4]=function(f) - local a,b,c,d=f:read(1,1,1,1) - if d then - return byte(a),byte(b),byte(c),byte(d) - end - end, - [3]=function(f) - local a,b,c=f:read(1,1,1) - if b then - return byte(a),byte(b),byte(c) - end - end, - [2]=function(f) - local a,b=f:read(1,1) - if b then - return byte(a),byte(b) - end - end, - [1]=function (f) - local a=f:read(1) - if a then - return byte(a) - end - end, - [-2]=function (f) - local a,b=f:read(1,1) - if b then - return byte(b),byte(a) - end - end, - [-3]=function(f) - local a,b,c=f:read(1,1,1) - if b then - return byte(c),byte(b),byte(a) - end - end, - [-4]=function(f) - local a,b,c,d=f:read(1,1,1,1) - if d then - return byte(d),byte(c),byte(b),byte(a) - end - end -} -function io.bytes(f,n) - if f then - return nextbyte[n or 1],f - else - return nil,nil - end -end -function io.ask(question,default,options) - while true do - io.write(question) - if options then - io.write(format(" [%s]",concat(options,"|"))) - end - if default then - io.write(format(" [%s]",default)) - end - io.write(format(" ")) - io.flush() - local answer=io.read() - answer=gsub(answer,"^%s*(.*)%s*$","%1") - if answer=="" and default then - return default - elseif not options then - return answer - else - for k=1,#options do - if options[k]==answer then - return answer - end - end - local pattern="^"..answer - for k=1,#options do - local v=options[k] - if find(v,pattern) then - return v - end - end - end - end -end -local function readnumber(f,n,m) - if m then - f:seek("set",n) - n=m - end - if n==1 then - return byte(f:read(1)) - elseif n==2 then - local a,b=byte(f:read(2),1,2) - return 256*a+b - elseif n==3 then - local a,b,c=byte(f:read(3),1,3) - return 256*256*a+256*b+c - elseif n==4 then - local a,b,c,d=byte(f:read(4),1,4) - return 256*256*256*a+256*256*b+256*c+d - elseif n==8 then - local a,b=readnumber(f,4),readnumber(f,4) - return 256*a+b - elseif n==12 then - local a,b,c=readnumber(f,4),readnumber(f,4),readnumber(f,4) - return 256*256*a+256*b+c - elseif n==-2 then - local b,a=byte(f:read(2),1,2) - return 256*a+b - elseif n==-3 then - local c,b,a=byte(f:read(3),1,3) - return 256*256*a+256*b+c - elseif n==-4 then - local d,c,b,a=byte(f:read(4),1,4) - return 256*256*256*a+256*256*b+256*c+d - elseif n==-8 then - local h,g,f,e,d,c,b,a=byte(f:read(8),1,8) - return 256*256*256*256*256*256*256*a+256*256*256*256*256*256*b+256*256*256*256*256*c+256*256*256*256*d+256*256*256*e+256*256*f+256*g+h - else - return 0 - end -end -io.readnumber=readnumber -function io.readstring(f,n,m) - if m then - f:seek("set",n) - n=m - end - local str=gsub(f:read(n),"\000","") - return str -end -if not io.i_limiter then function io.i_limiter() end end -if not io.o_limiter then function io.o_limiter() end end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['l-file']={ - version=1.001, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -file=file or {} -local file=file -if not lfs then - lfs=optionalrequire("lfs") -end -if not lfs then - lfs={ - getcurrentdir=function() - return "." - end, - attributes=function() - return nil - end, - isfile=function(name) - local f=io.open(name,'rb') - if f then - f:close() - return true - end - end, - isdir=function(name) - print("you need to load lfs") - return false - end - } -elseif not lfs.isfile then - local attributes=lfs.attributes - function lfs.isdir(name) - return attributes(name,"mode")=="directory" - end - function lfs.isfile(name) - return attributes(name,"mode")=="file" - end -end -local insert,concat=table.insert,table.concat -local match,find,gmatch=string.match,string.find,string.gmatch -local lpegmatch=lpeg.match -local getcurrentdir,attributes=lfs.currentdir,lfs.attributes -local checkedsplit=string.checkedsplit -local P,R,S,C,Cs,Cp,Cc,Ct=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Cs,lpeg.Cp,lpeg.Cc,lpeg.Ct -local colon=P(":") -local period=P(".") -local periods=P("..") -local fwslash=P("/") -local bwslash=P("\\") -local slashes=S("\\/") -local noperiod=1-period -local noslashes=1-slashes -local name=noperiod^1 -local suffix=period/""*(1-period-slashes)^1*-1 -local pattern=C((1-(slashes^1*noslashes^1*-1))^1)*P(1) -local function pathpart(name,default) - return name and lpegmatch(pattern,name) or default or "" -end -local pattern=(noslashes^0*slashes)^1*C(noslashes^1)*-1 -local function basename(name) - return name and lpegmatch(pattern,name) or name -end -local pattern=(noslashes^0*slashes^1)^0*Cs((1-suffix)^1)*suffix^0 -local function nameonly(name) - return name and lpegmatch(pattern,name) or name -end -local pattern=(noslashes^0*slashes)^0*(noperiod^1*period)^1*C(noperiod^1)*-1 -local function suffixonly(name) - return name and lpegmatch(pattern,name) or "" -end -local pattern=(noslashes^0*slashes)^0*noperiod^1*((period*C(noperiod^1))^1)*-1+Cc("") -local function suffixesonly(name) - if name then - return lpegmatch(pattern,name) - else - return "" - end -end -file.pathpart=pathpart -file.basename=basename -file.nameonly=nameonly -file.suffixonly=suffixonly -file.suffix=suffixonly -file.suffixesonly=suffixesonly -file.suffixes=suffixesonly -file.dirname=pathpart -file.extname=suffixonly -local drive=C(R("az","AZ"))*colon -local path=C((noslashes^0*slashes)^0) -local suffix=period*C(P(1-period)^0*P(-1)) -local base=C((1-suffix)^0) -local rest=C(P(1)^0) -drive=drive+Cc("") -path=path+Cc("") -base=base+Cc("") -suffix=suffix+Cc("") -local pattern_a=drive*path*base*suffix -local pattern_b=path*base*suffix -local pattern_c=C(drive*path)*C(base*suffix) -local pattern_d=path*rest -function file.splitname(str,splitdrive) - if not str then - elseif splitdrive then - return lpegmatch(pattern_a,str) - else - return lpegmatch(pattern_b,str) - end -end -function file.splitbase(str) - if str then - return lpegmatch(pattern_d,str) - else - return "",str - end -end -function file.nametotable(str,splitdrive) - if str then - local path,drive,subpath,name,base,suffix=lpegmatch(pattern_c,str) - if splitdrive then - return { - path=path, - drive=drive, - subpath=subpath, - name=name, - base=base, - suffix=suffix, - } - else - return { - path=path, - name=name, - base=base, - suffix=suffix, - } - end - end -end -local pattern=Cs(((period*(1-period-slashes)^1*-1)/""+1)^1) -function file.removesuffix(name) - return name and lpegmatch(pattern,name) -end -local suffix=period/""*(1-period-slashes)^1*-1 -local pattern=Cs((noslashes^0*slashes^1)^0*((1-suffix)^1))*Cs(suffix) -function file.addsuffix(filename,suffix,criterium) - if not filename or not suffix or suffix=="" then - return filename - elseif criterium==true then - return filename.."."..suffix - elseif not criterium then - local n,s=lpegmatch(pattern,filename) - if not s or s=="" then - return filename.."."..suffix - else - return filename - end - else - local n,s=lpegmatch(pattern,filename) - if s and s~="" then - local t=type(criterium) - if t=="table" then - for i=1,#criterium do - if s==criterium[i] then - return filename - end - end - elseif t=="string" then - if s==criterium then - return filename - end - end - end - return (n or filename).."."..suffix - end -end -local suffix=period*(1-period-slashes)^1*-1 -local pattern=Cs((1-suffix)^0) -function file.replacesuffix(name,suffix) - if name and suffix and suffix~="" then - return lpegmatch(pattern,name).."."..suffix - else - return name - end -end -local reslasher=lpeg.replacer(P("\\"),"/") -function file.reslash(str) - return str and lpegmatch(reslasher,str) -end -function file.is_writable(name) - if not name then - elseif lfs.isdir(name) then - name=name.."/m_t_x_t_e_s_t.tmp" - local f=io.open(name,"wb") - if f then - f:close() - os.remove(name) - return true - end - elseif lfs.isfile(name) then - local f=io.open(name,"ab") - if f then - f:close() - return true - end - else - local f=io.open(name,"ab") - if f then - f:close() - os.remove(name) - return true - end - end - return false -end -local readable=P("r")*Cc(true) -function file.is_readable(name) - if name then - local a=attributes(name) - return a and lpegmatch(readable,a.permissions) or false - else - return false - end -end -file.isreadable=file.is_readable -file.iswritable=file.is_writable -function file.size(name) - if name then - local a=attributes(name) - return a and a.size or 0 - else - return 0 - end -end -function file.splitpath(str,separator) - return str and checkedsplit(lpegmatch(reslasher,str),separator or io.pathseparator) -end -function file.joinpath(tab,separator) - return tab and concat(tab,separator or io.pathseparator) -end -local someslash=S("\\/") -local stripper=Cs(P(fwslash)^0/""*reslasher) -local isnetwork=someslash*someslash*(1-someslash)+(1-fwslash-colon)^1*colon -local isroot=fwslash^1*-1 -local hasroot=fwslash^1 -local reslasher=lpeg.replacer(S("\\/"),"/") -local deslasher=lpeg.replacer(S("\\/")^1,"/") -function file.join(one,two,three,...) - if not two then - return one=="" and one or lpegmatch(stripper,one) - end - if one=="" then - return lpegmatch(stripper,three and concat({ two,three,... },"/") or two) - end - if lpegmatch(isnetwork,one) then - local one=lpegmatch(reslasher,one) - local two=lpegmatch(deslasher,three and concat({ two,three,... },"/") or two) - if lpegmatch(hasroot,two) then - return one..two - else - return one.."/"..two - end - elseif lpegmatch(isroot,one) then - local two=lpegmatch(deslasher,three and concat({ two,three,... },"/") or two) - if lpegmatch(hasroot,two) then - return two - else - return "/"..two - end - else - return lpegmatch(deslasher,concat({ one,two,three,... },"/")) - end -end -local drivespec=R("az","AZ")^1*colon -local anchors=fwslash+drivespec -local untouched=periods+(1-period)^1*P(-1) -local mswindrive=Cs(drivespec*(bwslash/"/"+fwslash)^0) -local mswinuncpath=(bwslash+fwslash)*(bwslash+fwslash)*Cc("//") -local splitstarter=(mswindrive+mswinuncpath+Cc(false))*Ct(lpeg.splitat(S("/\\")^1)) -local absolute=fwslash -function file.collapsepath(str,anchor) - if not str then - return - end - if anchor==true and not lpegmatch(anchors,str) then - str=getcurrentdir().."/"..str - end - if str=="" or str=="." then - return "." - elseif lpegmatch(untouched,str) then - return lpegmatch(reslasher,str) - end - local starter,oldelements=lpegmatch(splitstarter,str) - local newelements={} - local i=#oldelements - while i>0 do - local element=oldelements[i] - if element=='.' then - elseif element=='..' then - local n=i-1 - while n>0 do - local element=oldelements[n] - if element~='..' and element~='.' then - oldelements[n]='.' - break - else - n=n-1 - end - end - if n<1 then - insert(newelements,1,'..') - end - elseif element~="" then - insert(newelements,1,element) - end - i=i-1 - end - if #newelements==0 then - return starter or "." - elseif starter then - return starter..concat(newelements,'/') - elseif lpegmatch(absolute,str) then - return "/"..concat(newelements,'/') - else - newelements=concat(newelements,'/') - if anchor=="." and find(str,"^%./") then - return "./"..newelements - else - return newelements - end - end -end -local tricky=S("/\\")*P(-1) -local attributes=lfs.attributes -function lfs.isdir(name) - if lpegmatch(tricky,name) then - return attributes(name,"mode")=="directory" - else - return attributes(name.."/.","mode")=="directory" - end -end -function lfs.isfile(name) - return attributes(name,"mode")=="file" -end -local validchars=R("az","09","AZ","--","..") -local pattern_a=lpeg.replacer(1-validchars) -local pattern_a=Cs((validchars+P(1)/"-")^1) -local whatever=P("-")^0/"" -local pattern_b=Cs(whatever*(1-whatever*-1)^1) -function file.robustname(str,strict) - if str then - str=lpegmatch(pattern_a,str) or str - if strict then - return lpegmatch(pattern_b,str) or str - else - return str - end - end -end -file.readdata=io.loaddata -file.savedata=io.savedata -function file.copy(oldname,newname) - if oldname and newname then - local data=io.loaddata(oldname) - if data and data~="" then - file.savedata(newname,data) - end - end -end -local letter=R("az","AZ")+S("_-+") -local separator=P("://") -local qualified=period^0*fwslash+letter*colon+letter^1*separator+letter^1*fwslash -local rootbased=fwslash+letter*colon -lpeg.patterns.qualified=qualified -lpeg.patterns.rootbased=rootbased -function file.is_qualified_path(filename) - return filename and lpegmatch(qualified,filename)~=nil -end -function file.is_rootbased_path(filename) - return filename and lpegmatch(rootbased,filename)~=nil -end -function file.strip(name,dir) - if name then - local b,a=match(name,"^(.-)"..dir.."(.*)$") - return a~="" and a or name - end -end -function lfs.mkdirs(path) - local full="" - for sub in gmatch(path,"(/*[^\\/]+)") do - full=full..sub - lfs.mkdir(full) - end -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['l-boolean']={ - version=1.001, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local type,tonumber=type,tonumber -boolean=boolean or {} -local boolean=boolean -function boolean.tonumber(b) - if b then return 1 else return 0 end -end -function toboolean(str,tolerant) - if str==nil then - return false - elseif str==false then - return false - elseif str==true then - return true - elseif str=="true" then - return true - elseif str=="false" then - return false - elseif not tolerant then - return false - elseif str==0 then - return false - elseif (tonumber(str) or 0)>0 then - return true - else - return str=="yes" or str=="on" or str=="t" - end -end -string.toboolean=toboolean -function string.booleanstring(str) - if str=="0" then - return false - elseif str=="1" then - return true - elseif str=="" then - return false - elseif str=="false" then - return false - elseif str=="true" then - return true - elseif (tonumber(str) or 0)>0 then - return true - else - return str=="yes" or str=="on" or str=="t" - end -end -function string.is_boolean(str,default,strict) - if type(str)=="string" then - if str=="true" or str=="yes" or str=="on" or str=="t" or (not strict and str=="1") then - return true - elseif str=="false" or str=="no" or str=="off" or str=="f" or (not strict and str=="0") then - return false - end - end - return default -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['l-math']={ - version=1.001, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local floor,sin,cos,tan=math.floor,math.sin,math.cos,math.tan -if not math.round then - function math.round(x) return floor(x+0.5) end -end -if not math.div then - function math.div(n,m) return floor(n/m) end -end -if not math.mod then - function math.mod(n,m) return n%m end -end -local pipi=2*math.pi/360 -if not math.sind then - function math.sind(d) return sin(d*pipi) end - function math.cosd(d) return cos(d*pipi) end - function math.tand(d) return tan(d*pipi) end -end -if not math.odd then - function math.odd (n) return n%2~=0 end - function math.even(n) return n%2==0 end -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['util-str']={ - version=1.001, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -utilities=utilities or {} -utilities.strings=utilities.strings or {} -local strings=utilities.strings -local format,gsub,rep,sub=string.format,string.gsub,string.rep,string.sub -local load,dump=load,string.dump -local tonumber,type,tostring=tonumber,type,tostring -local unpack,concat=table.unpack,table.concat -local P,V,C,S,R,Ct,Cs,Cp,Carg,Cc=lpeg.P,lpeg.V,lpeg.C,lpeg.S,lpeg.R,lpeg.Ct,lpeg.Cs,lpeg.Cp,lpeg.Carg,lpeg.Cc -local patterns,lpegmatch=lpeg.patterns,lpeg.match -local utfchar,utfbyte=utf.char,utf.byte -local loadstripped=nil -if _LUAVERSION<5.2 then - loadstripped=function(str,shortcuts) - return load(str) - end -else - loadstripped=function(str,shortcuts) - if shortcuts then - return load(dump(load(str),true),nil,nil,shortcuts) - else - return load(dump(load(str),true)) - end - end -end -if not number then number={} end -local stripper=patterns.stripzeros -local function points(n) - n=tonumber(n) - return (not n or n==0) and "0pt" or lpegmatch(stripper,format("%.5fpt",n/65536)) -end -local function basepoints(n) - n=tonumber(n) - return (not n or n==0) and "0bp" or lpegmatch(stripper,format("%.5fbp",n*(7200/7227)/65536)) -end -number.points=points -number.basepoints=basepoints -local rubish=patterns.spaceortab^0*patterns.newline -local anyrubish=patterns.spaceortab+patterns.newline -local anything=patterns.anything -local stripped=(patterns.spaceortab^1/"")*patterns.newline -local leading=rubish^0/"" -local trailing=(anyrubish^1*patterns.endofstring)/"" -local redundant=rubish^3/"\n" -local pattern=Cs(leading*(trailing+redundant+stripped+anything)^0) -function strings.collapsecrlf(str) - return lpegmatch(pattern,str) -end -local repeaters={} -function strings.newrepeater(str,offset) - offset=offset or 0 - local s=repeaters[str] - if not s then - s={} - repeaters[str]=s - end - local t=s[offset] - if t then - return t - end - t={} - setmetatable(t,{ __index=function(t,k) - if not k then - return "" - end - local n=k+offset - local s=n>0 and rep(str,n) or "" - t[k]=s - return s - end }) - s[offset]=t - return t -end -local extra,tab,start=0,0,4,0 -local nspaces=strings.newrepeater(" ") -string.nspaces=nspaces -local pattern=Carg(1)/function(t) - extra,tab,start=0,t or 7,1 - end*Cs(( - Cp()*patterns.tab/function(position) - local current=(position-start+1)+extra - local spaces=tab-(current-1)%tab - if spaces>0 then - extra=extra+spaces-1 - return nspaces[spaces] - else - return "" - end - end+patterns.newline*Cp()/function(position) - extra,start=0,position - end+patterns.anything - )^1) -function strings.tabtospace(str,tab) - return lpegmatch(pattern,str,1,tab or 7) -end -local newline=patterns.newline -local endofstring=patterns.endofstring -local whitespace=patterns.whitespace -local spacer=patterns.spacer -local space=spacer^0 -local nospace=space/"" -local endofline=nospace*newline -local stripend=(whitespace^1*endofstring)/"" -local normalline=(nospace*((1-space*(newline+endofstring))^1)*nospace) -local stripempty=endofline^1/"" -local normalempty=endofline^1 -local singleempty=endofline*(endofline^0/"") -local doubleempty=endofline*endofline^-1*(endofline^0/"") -local stripstart=stripempty^0 -local p_prune_normal=Cs (stripstart*(stripend+normalline+normalempty )^0 ) -local p_prune_collapse=Cs (stripstart*(stripend+normalline+doubleempty )^0 ) -local p_prune_noempty=Cs (stripstart*(stripend+normalline+singleempty )^0 ) -local p_retain_normal=Cs ((normalline+normalempty )^0 ) -local p_retain_collapse=Cs ((normalline+doubleempty )^0 ) -local p_retain_noempty=Cs ((normalline+singleempty )^0 ) -local striplinepatterns={ - ["prune"]=p_prune_normal, - ["prune and collapse"]=p_prune_collapse, - ["prune and no empty"]=p_prune_noempty, - ["retain"]=p_retain_normal, - ["retain and collapse"]=p_retain_collapse, - ["retain and no empty"]=p_retain_noempty, - ["collapse"]=patterns.collapser, -} -strings.striplinepatterns=striplinepatterns -function strings.striplines(str,how) - return str and lpegmatch(how and striplinepatterns[how] or p_prune_collapse,str) or str -end -strings.striplong=strings.striplines -function strings.nice(str) - str=gsub(str,"[:%-+_]+"," ") - return str -end -local n=0 -local sequenced=table.sequenced -function string.autodouble(s,sep) - if s==nil then - return '""' - end - local t=type(s) - if t=="number" then - return tostring(s) - end - if t=="table" then - return ('"'..sequenced(s,sep or ",")..'"') - end - return ('"'..tostring(s)..'"') -end -function string.autosingle(s,sep) - if s==nil then - return "''" - end - local t=type(s) - if t=="number" then - return tostring(s) - end - if t=="table" then - return ("'"..sequenced(s,sep or ",").."'") - end - return ("'"..tostring(s).."'") -end -local tracedchars={} -string.tracedchars=tracedchars -strings.tracers=tracedchars -function string.tracedchar(b) - if type(b)=="number" then - return tracedchars[b] or (utfchar(b).." (U+"..format('%05X',b)..")") - else - local c=utfbyte(b) - return tracedchars[c] or (b.." (U+"..format('%05X',c)..")") - end -end -function number.signed(i) - if i>0 then - return "+",i - else - return "-",-i - end -end -local zero=P("0")^1/"" -local plus=P("+")/"" -local minus=P("-") -local separator=S(".") -local digit=R("09") -local trailing=zero^1*#S("eE") -local exponent=(S("eE")*(plus+Cs((minus*zero^0*P(-1))/"")+minus)*zero^0*(P(-1)*Cc("0")+P(1)^1)) -local pattern_a=Cs(minus^0*digit^1*(separator/""*trailing+separator*(trailing+digit)^0)*exponent) -local pattern_b=Cs((exponent+P(1))^0) -function number.sparseexponent(f,n) - if not n then - n=f - f="%e" - end - local tn=type(n) - if tn=="string" then - local m=tonumber(n) - if m then - return lpegmatch((f=="%e" or f=="%E") and pattern_a or pattern_b,format(f,m)) - end - elseif tn=="number" then - return lpegmatch((f=="%e" or f=="%E") and pattern_a or pattern_b,format(f,n)) - end - return tostring(n) -end -local template=[[ -%s -%s -return function(%s) return %s end -]] -local preamble,environment="",{} -if _LUAVERSION<5.2 then - preamble=[[ -local lpeg=lpeg -local type=type -local tostring=tostring -local tonumber=tonumber -local format=string.format -local concat=table.concat -local signed=number.signed -local points=number.points -local basepoints= number.basepoints -local utfchar=utf.char -local utfbyte=utf.byte -local lpegmatch=lpeg.match -local nspaces=string.nspaces -local tracedchar=string.tracedchar -local autosingle=string.autosingle -local autodouble=string.autodouble -local sequenced=table.sequenced -local formattednumber=number.formatted -local sparseexponent=number.sparseexponent - ]] -else - environment={ - global=global or _G, - lpeg=lpeg, - type=type, - tostring=tostring, - tonumber=tonumber, - format=string.format, - concat=table.concat, - signed=number.signed, - points=number.points, - basepoints=number.basepoints, - utfchar=utf.char, - utfbyte=utf.byte, - lpegmatch=lpeg.match, - nspaces=string.nspaces, - tracedchar=string.tracedchar, - autosingle=string.autosingle, - autodouble=string.autodouble, - sequenced=table.sequenced, - formattednumber=number.formatted, - sparseexponent=number.sparseexponent, - } -end -local arguments={ "a1" } -setmetatable(arguments,{ __index=function(t,k) - local v=t[k-1]..",a"..k - t[k]=v - return v - end -}) -local prefix_any=C((S("+- .")+R("09"))^0) -local prefix_tab=P("{")*C((1-P("}"))^0)*P("}")+C((1-R("az","AZ","09","%%"))^0) -local format_s=function(f) - n=n+1 - if f and f~="" then - return format("format('%%%ss',a%s)",f,n) - else - return format("(a%s or '')",n) - end -end -local format_S=function(f) - n=n+1 - if f and f~="" then - return format("format('%%%ss',tostring(a%s))",f,n) - else - return format("tostring(a%s)",n) - end -end -local format_q=function() - n=n+1 - return format("(a%s and format('%%q',a%s) or '')",n,n) -end -local format_Q=function() - n=n+1 - return format("format('%%q',tostring(a%s))",n) -end -local format_i=function(f) - n=n+1 - if f and f~="" then - return format("format('%%%si',a%s)",f,n) - else - return format("format('%%i',a%s)",n) - end -end -local format_d=format_i -local format_I=function(f) - n=n+1 - return format("format('%%s%%%si',signed(a%s))",f,n) -end -local format_f=function(f) - n=n+1 - return format("format('%%%sf',a%s)",f,n) -end -local format_F=function(f) - n=n+1 - if not f or f=="" then - return format("(((a%s > -0.0000000005 and a%s < 0.0000000005) and '0') or format((a%s %% 1 == 0) and '%%i' or '%%.9f',a%s))",n,n,n,n) - else - return format("format((a%s %% 1 == 0) and '%%i' or '%%%sf',a%s)",n,f,n) - end -end -local format_g=function(f) - n=n+1 - return format("format('%%%sg',a%s)",f,n) -end -local format_G=function(f) - n=n+1 - return format("format('%%%sG',a%s)",f,n) -end -local format_e=function(f) - n=n+1 - return format("format('%%%se',a%s)",f,n) -end -local format_E=function(f) - n=n+1 - return format("format('%%%sE',a%s)",f,n) -end -local format_j=function(f) - n=n+1 - return format("sparseexponent('%%%se',a%s)",f,n) -end -local format_J=function(f) - n=n+1 - return format("sparseexponent('%%%sE',a%s)",f,n) -end -local format_x=function(f) - n=n+1 - return format("format('%%%sx',a%s)",f,n) -end -local format_X=function(f) - n=n+1 - return format("format('%%%sX',a%s)",f,n) -end -local format_o=function(f) - n=n+1 - return format("format('%%%so',a%s)",f,n) -end -local format_c=function() - n=n+1 - return format("utfchar(a%s)",n) -end -local format_C=function() - n=n+1 - return format("tracedchar(a%s)",n) -end -local format_r=function(f) - n=n+1 - return format("format('%%%s.0f',a%s)",f,n) -end -local format_h=function(f) - n=n+1 - if f=="-" then - f=sub(f,2) - return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) - else - return format("format('0x%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) - end -end -local format_H=function(f) - n=n+1 - if f=="-" then - f=sub(f,2) - return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) - else - return format("format('0x%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) - end -end -local format_u=function(f) - n=n+1 - if f=="-" then - f=sub(f,2) - return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) - else - return format("format('u+%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) - end -end -local format_U=function(f) - n=n+1 - if f=="-" then - f=sub(f,2) - return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) - else - return format("format('U+%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) - end -end -local format_p=function() - n=n+1 - return format("points(a%s)",n) -end -local format_b=function() - n=n+1 - return format("basepoints(a%s)",n) -end -local format_t=function(f) - n=n+1 - if f and f~="" then - return format("concat(a%s,%q)",n,f) - else - return format("concat(a%s)",n) - end -end -local format_T=function(f) - n=n+1 - if f and f~="" then - return format("sequenced(a%s,%q)",n,f) - else - return format("sequenced(a%s)",n) - end -end -local format_l=function() - n=n+1 - return format("(a%s and 'true' or 'false')",n) -end -local format_L=function() - n=n+1 - return format("(a%s and 'TRUE' or 'FALSE')",n) -end -local format_N=function() - n=n+1 - return format("tostring(tonumber(a%s) or a%s)",n,n) -end -local format_a=function(f) - n=n+1 - if f and f~="" then - return format("autosingle(a%s,%q)",n,f) - else - return format("autosingle(a%s)",n) - end -end -local format_A=function(f) - n=n+1 - if f and f~="" then - return format("autodouble(a%s,%q)",n,f) - else - return format("autodouble(a%s)",n) - end -end -local format_w=function(f) - n=n+1 - f=tonumber(f) - if f then - return format("nspaces[%s+a%s]",f,n) - else - return format("nspaces[a%s]",n) - end -end -local format_W=function(f) - return format("nspaces[%s]",tonumber(f) or 0) -end -local digit=patterns.digit -local period=patterns.period -local three=digit*digit*digit -local splitter=Cs ( - (((1-(three^1*period))^1+C(three))*(Carg(1)*three)^1+C((1-period)^1))*(P(1)/""*Carg(2))*C(2) -) -patterns.formattednumber=splitter -function number.formatted(n,sep1,sep2) - local s=type(s)=="string" and n or format("%0.2f",n) - if sep1==true then - return lpegmatch(splitter,s,1,".",",") - elseif sep1=="." then - return lpegmatch(splitter,s,1,sep1,sep2 or ",") - elseif sep1=="," then - return lpegmatch(splitter,s,1,sep1,sep2 or ".") - else - return lpegmatch(splitter,s,1,sep1 or ",",sep2 or ".") - end -end -local format_m=function(f) - n=n+1 - if not f or f=="" then - f="," - end - return format([[formattednumber(a%s,%q,".")]],n,f) -end -local format_M=function(f) - n=n+1 - if not f or f=="" then - f="." - end - return format([[formattednumber(a%s,%q,",")]],n,f) -end -local format_z=function(f) - n=n+(tonumber(f) or 1) - return "''" -end -local format_rest=function(s) - return format("%q",s) -end -local format_extension=function(extensions,f,name) - local extension=extensions[name] or "tostring(%s)" - local f=tonumber(f) or 1 - if f==0 then - return extension - elseif f==1 then - n=n+1 - local a="a"..n - return format(extension,a,a) - elseif f<0 then - local a="a"..(n+f+1) - return format(extension,a,a) - else - local t={} - for i=1,f do - n=n+1 - t[#t+1]="a"..n - end - return format(extension,unpack(t)) - end -end -local builder=Cs { "start", - start=( - ( - P("%")/""*( - V("!") -+V("s")+V("q")+V("i")+V("d")+V("f")+V("F")+V("g")+V("G")+V("e")+V("E")+V("x")+V("X")+V("o") -+V("c")+V("C")+V("S") -+V("Q") -+V("N") -+V("r")+V("h")+V("H")+V("u")+V("U")+V("p")+V("b")+V("t")+V("T")+V("l")+V("L")+V("I")+V("w") -+V("W") -+V("a") -+V("A") -+V("j")+V("J") -+V("m")+V("M") -+V("z") - )+V("*") - )*(P(-1)+Carg(1)) - )^0, - ["s"]=(prefix_any*P("s"))/format_s, - ["q"]=(prefix_any*P("q"))/format_q, - ["i"]=(prefix_any*P("i"))/format_i, - ["d"]=(prefix_any*P("d"))/format_d, - ["f"]=(prefix_any*P("f"))/format_f, - ["F"]=(prefix_any*P("F"))/format_F, - ["g"]=(prefix_any*P("g"))/format_g, - ["G"]=(prefix_any*P("G"))/format_G, - ["e"]=(prefix_any*P("e"))/format_e, - ["E"]=(prefix_any*P("E"))/format_E, - ["x"]=(prefix_any*P("x"))/format_x, - ["X"]=(prefix_any*P("X"))/format_X, - ["o"]=(prefix_any*P("o"))/format_o, - ["S"]=(prefix_any*P("S"))/format_S, - ["Q"]=(prefix_any*P("Q"))/format_S, - ["N"]=(prefix_any*P("N"))/format_N, - ["c"]=(prefix_any*P("c"))/format_c, - ["C"]=(prefix_any*P("C"))/format_C, - ["r"]=(prefix_any*P("r"))/format_r, - ["h"]=(prefix_any*P("h"))/format_h, - ["H"]=(prefix_any*P("H"))/format_H, - ["u"]=(prefix_any*P("u"))/format_u, - ["U"]=(prefix_any*P("U"))/format_U, - ["p"]=(prefix_any*P("p"))/format_p, - ["b"]=(prefix_any*P("b"))/format_b, - ["t"]=(prefix_tab*P("t"))/format_t, - ["T"]=(prefix_tab*P("T"))/format_T, - ["l"]=(prefix_any*P("l"))/format_l, - ["L"]=(prefix_any*P("L"))/format_L, - ["I"]=(prefix_any*P("I"))/format_I, - ["w"]=(prefix_any*P("w"))/format_w, - ["W"]=(prefix_any*P("W"))/format_W, - ["j"]=(prefix_any*P("j"))/format_j, - ["J"]=(prefix_any*P("J"))/format_J, - ["m"]=(prefix_tab*P("m"))/format_m, - ["M"]=(prefix_tab*P("M"))/format_M, - ["z"]=(prefix_any*P("z"))/format_z, - ["a"]=(prefix_any*P("a"))/format_a, - ["A"]=(prefix_any*P("A"))/format_A, - ["*"]=Cs(((1-P("%"))^1+P("%%")/"%%")^1)/format_rest, - ["?"]=Cs(((1-P("%"))^1 )^1)/format_rest, - ["!"]=Carg(2)*prefix_any*P("!")*C((1-P("!"))^1)*P("!")/format_extension, -} -local direct=Cs ( - P("%")*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*P(-1)/[[local format = string.format return function(str) return format("%0",str) end]] -) -local function make(t,str) - local f - local p - local p=lpegmatch(direct,str) - if p then - f=loadstripped(p)() - else - n=0 - p=lpegmatch(builder,str,1,t._connector_,t._extensions_) - if n>0 then - p=format(template,preamble,t._preamble_,arguments[n],p) - f=loadstripped(p,t._environment_)() - else - f=function() return str end - end - end - t[str]=f - return f -end -local function use(t,fmt,...) - return t[fmt](...) -end -strings.formatters={} -if _LUAVERSION<5.2 then - function strings.formatters.new(noconcat) - local t={ _type_="formatter",_connector_=noconcat and "," or "..",_extensions_={},_preamble_=preamble,_environment_={} } - setmetatable(t,{ __index=make,__call=use }) - return t - end -else - function strings.formatters.new(noconcat) - local e={} - for k,v in next,environment do - e[k]=v - end - local t={ _type_="formatter",_connector_=noconcat and "," or "..",_extensions_={},_preamble_="",_environment_=e } - setmetatable(t,{ __index=make,__call=use }) - return t - end -end -local formatters=strings.formatters.new() -string.formatters=formatters -string.formatter=function(str,...) return formatters[str](...) end -local function add(t,name,template,preamble) - if type(t)=="table" and t._type_=="formatter" then - t._extensions_[name]=template or "%s" - if type(preamble)=="string" then - t._preamble_=preamble.."\n"..t._preamble_ - elseif type(preamble)=="table" then - for k,v in next,preamble do - t._environment_[k]=v - end - end - end -end -strings.formatters.add=add -patterns.xmlescape=Cs((P("<")/"<"+P(">")/">"+P("&")/"&"+P('"')/"""+P(1))^0) -patterns.texescape=Cs((C(S("#$%\\{}"))/"\\%1"+P(1))^0) -patterns.luaescape=Cs(((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0) -patterns.luaquoted=Cs(Cc('"')*((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0*Cc('"')) -if _LUAVERSION<5.2 then - add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],"local xmlescape = lpeg.patterns.xmlescape") - add(formatters,"tex",[[lpegmatch(texescape,%s)]],"local texescape = lpeg.patterns.texescape") - add(formatters,"lua",[[lpegmatch(luaescape,%s)]],"local luaescape = lpeg.patterns.luaescape") -else - add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],{ xmlescape=lpeg.patterns.xmlescape }) - add(formatters,"tex",[[lpegmatch(texescape,%s)]],{ texescape=lpeg.patterns.texescape }) - add(formatters,"lua",[[lpegmatch(luaescape,%s)]],{ luaescape=lpeg.patterns.luaescape }) -end -local dquote=patterns.dquote -local equote=patterns.escaped+dquote/'\\"'+1 -local space=patterns.space -local cquote=Cc('"') -local pattern=Cs(dquote*(equote-P(-2))^0*dquote) -+Cs(cquote*(equote-space)^0*space*equote^0*cquote) -function string.optionalquoted(str) - return lpegmatch(pattern,str) or str -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['luat-basics-gen']={ - version=1.100, - comment="companion to luatex-*.tex", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end -local dummyfunction=function() -end -local dummyreporter=function(c) - return function(...) - (texio.reporter or texio.write_nl)(c.." : "..string.formatters(...)) - end -end -statistics={ - register=dummyfunction, - starttiming=dummyfunction, - stoptiming=dummyfunction, - elapsedtime=nil, -} -directives={ - register=dummyfunction, - enable=dummyfunction, - disable=dummyfunction, -} -trackers={ - register=dummyfunction, - enable=dummyfunction, - disable=dummyfunction, -} -experiments={ - register=dummyfunction, - enable=dummyfunction, - disable=dummyfunction, -} -storage={ - register=dummyfunction, - shared={}, -} -logs={ - new=dummyreporter, - reporter=dummyreporter, - messenger=dummyreporter, - report=dummyfunction, -} -callbacks={ - register=function(n,f) return callback.register(n,f) end, -} -utilities={ - storage={ - allocate=function(t) return t or {} end, - mark=function(t) return t or {} end, - }, -} -characters=characters or { - data={} -} -texconfig.kpse_init=true -resolvers=resolvers or {} -local remapper={ - otf="opentype fonts", - ttf="truetype fonts", - ttc="truetype fonts", - dfont="truetype fonts", - cid="cid maps", - cidmap="cid maps", - fea="font feature files", - pfa="type1 fonts", - pfb="type1 fonts", - afm="afm", -} -function resolvers.findfile(name,fileformat) - name=string.gsub(name,"\\","/") - if not fileformat or fileformat=="" then - fileformat=file.suffix(name) - if fileformat=="" then - fileformat="tex" - end - end - fileformat=string.lower(fileformat) - fileformat=remapper[fileformat] or fileformat - local found=kpse.find_file(name,fileformat) - if not found or found=="" then - found=kpse.find_file(name,"other text files") - end - return found -end -resolvers.findbinfile=resolvers.findfile -function resolvers.loadbinfile(filename,filetype) - local data=io.loaddata(filename) - return true,data,#data -end -function resolvers.resolve(s) - return s -end -function resolvers.unresolve(s) - return s -end -caches={} -local writable=nil -local readables={} -local usingjit=jit -if not caches.namespace or caches.namespace=="" or caches.namespace=="context" then - caches.namespace='generic' -end -do - local cachepaths=kpse.expand_var('$TEXMFCACHE') or "" - if cachepaths=="" or cachepaths=="$TEXMFCACHE" then - cachepaths=kpse.expand_var('$TEXMFVAR') or "" - end - if cachepaths=="" or cachepaths=="$TEXMFVAR" then - cachepaths=kpse.expand_var('$VARTEXMF') or "" - end - if cachepaths=="" then - local fallbacks={ "TMPDIR","TEMPDIR","TMP","TEMP","HOME","HOMEPATH" } - for i=1,#fallbacks do - cachepaths=os.getenv(fallbacks[i]) or "" - if cachepath~="" and lfs.isdir(cachepath) then - break - end - end - end - if cachepaths=="" then - cachepaths="." - end - cachepaths=string.split(cachepaths,os.type=="windows" and ";" or ":") - for i=1,#cachepaths do - local cachepath=cachepaths[i] - if not lfs.isdir(cachepath) then - lfs.mkdirs(cachepath) - if lfs.isdir(cachepath) then - texio.write(string.format("(created cache path: %s)",cachepath)) - end - end - if file.is_writable(cachepath) then - writable=file.join(cachepath,"luatex-cache") - lfs.mkdir(writable) - writable=file.join(writable,caches.namespace) - lfs.mkdir(writable) - break - end - end - for i=1,#cachepaths do - if file.is_readable(cachepaths[i]) then - readables[#readables+1]=file.join(cachepaths[i],"luatex-cache",caches.namespace) - end - end - if not writable then - texio.write_nl("quiting: fix your writable cache path") - os.exit() - elseif #readables==0 then - texio.write_nl("quiting: fix your readable cache path") - os.exit() - elseif #readables==1 and readables[1]==writable then - texio.write(string.format("(using cache: %s)",writable)) - else - texio.write(string.format("(using write cache: %s)",writable)) - texio.write(string.format("(using read cache: %s)",table.concat(readables," "))) - end -end -function caches.getwritablepath(category,subcategory) - local path=file.join(writable,category) - lfs.mkdir(path) - path=file.join(path,subcategory) - lfs.mkdir(path) - return path -end -function caches.getreadablepaths(category,subcategory) - local t={} - for i=1,#readables do - t[i]=file.join(readables[i],category,subcategory) - end - return t -end -local function makefullname(path,name) - if path and path~="" then - return file.addsuffix(file.join(path,name),"lua"),file.addsuffix(file.join(path,name),usingjit and "lub" or "luc") - end -end -function caches.is_writable(path,name) - local fullname=makefullname(path,name) - return fullname and file.is_writable(fullname) -end -function caches.loaddata(paths,name) - for i=1,#paths do - local data=false - local luaname,lucname=makefullname(paths[i],name) - if lucname and not lfs.isfile(lucname) and type(caches.compile)=="function" then - texio.write(string.format("(compiling luc: %s)",lucname)) - data=loadfile(luaname) - if data then - data=data() - end - if data then - caches.compile(data,luaname,lucname) - return data - end - end - if lucname and lfs.isfile(lucname) then - texio.write(string.format("(load luc: %s)",lucname)) - data=loadfile(lucname) - if data then - data=data() - end - if data then - return data - else - texio.write(string.format("(loading failed: %s)",lucname)) - end - end - if luaname and lfs.isfile(luaname) then - texio.write(string.format("(load lua: %s)",luaname)) - data=loadfile(luaname) - if data then - data=data() - end - if data then - return data - end - end - end -end -function caches.savedata(path,name,data) - local luaname,lucname=makefullname(path,name) - if luaname then - texio.write(string.format("(save: %s)",luaname)) - table.tofile(luaname,data,true) - if lucname and type(caches.compile)=="function" then - os.remove(lucname) - texio.write(string.format("(save: %s)",lucname)) - caches.compile(data,luaname,lucname) - end - end -end -function caches.compile(data,luaname,lucname) - local d=io.loaddata(luaname) - if not d or d=="" then - d=table.serialize(data,true) - end - if d and d~="" then - local f=io.open(lucname,'wb') - if f then - local s=loadstring(d) - if s then - f:write(string.dump(s,true)) - end - f:close() - end - end -end -function table.setmetatableindex(t,f) - if type(t)~="table" then - f=f or t - t={} - end - setmetatable(t,{ __index=f }) - return t -end -arguments={} -if arg then - for i=1,#arg do - local k,v=string.match(arg[i],"^%-%-([^=]+)=?(.-)$") - if k and v then - arguments[k]=v - end - end -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['data-con']={ - version=1.100, - comment="companion to luat-lib.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local format,lower,gsub=string.format,string.lower,string.gsub -local trace_cache=false trackers.register("resolvers.cache",function(v) trace_cache=v end) -local trace_containers=false trackers.register("resolvers.containers",function(v) trace_containers=v end) -local trace_storage=false trackers.register("resolvers.storage",function(v) trace_storage=v end) -containers=containers or {} -local containers=containers -containers.usecache=true -local report_containers=logs.reporter("resolvers","containers") -local allocated={} -local mt={ - __index=function(t,k) - if k=="writable" then - local writable=caches.getwritablepath(t.category,t.subcategory) or { "." } - t.writable=writable - return writable - elseif k=="readables" then - local readables=caches.getreadablepaths(t.category,t.subcategory) or { "." } - t.readables=readables - return readables - end - end, - __storage__=true -} -function containers.define(category,subcategory,version,enabled) - if category and subcategory then - local c=allocated[category] - if not c then - c={} - allocated[category]=c - end - local s=c[subcategory] - if not s then - s={ - category=category, - subcategory=subcategory, - storage={}, - enabled=enabled, - version=version or math.pi, - trace=false, - } - setmetatable(s,mt) - c[subcategory]=s - end - return s - end -end -function containers.is_usable(container,name) - return container.enabled and caches and caches.is_writable(container.writable,name) -end -function containers.is_valid(container,name) - if name and name~="" then - local storage=container.storage[name] - return storage and storage.cache_version==container.version - else - return false - end -end -function containers.read(container,name) - local storage=container.storage - local stored=storage[name] - if not stored and container.enabled and caches and containers.usecache then - stored=caches.loaddata(container.readables,name) - if stored and stored.cache_version==container.version then - if trace_cache or trace_containers then - report_containers("action %a, category %a, name %a","load",container.subcategory,name) - end - else - stored=nil - end - storage[name]=stored - elseif stored then - if trace_cache or trace_containers then - report_containers("action %a, category %a, name %a","reuse",container.subcategory,name) - end - end - return stored -end -function containers.write(container,name,data) - if data then - data.cache_version=container.version - if container.enabled and caches then - local unique,shared=data.unique,data.shared - data.unique,data.shared=nil,nil - caches.savedata(container.writable,name,data) - if trace_cache or trace_containers then - report_containers("action %a, category %a, name %a","save",container.subcategory,name) - end - data.unique,data.shared=unique,shared - end - if trace_cache or trace_containers then - report_containers("action %a, category %a, name %a","store",container.subcategory,name) - end - container.storage[name]=data - end - return data -end -function containers.content(container,name) - return container.storage[name] -end -function containers.cleanname(name) - return (gsub(lower(name),"[^%w\128-\255]+","-")) -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['luatex-fonts-nod']={ - version=1.001, - comment="companion to luatex-fonts.lua", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end -if tex.attribute[0]~=0 then - texio.write_nl("log","!") - texio.write_nl("log","! Attribute 0 is reserved for ConTeXt's font feature management and has to be") - texio.write_nl("log","! set to zero. Also, some attributes in the range 1-255 are used for special") - texio.write_nl("log","! purposes so setting them at the TeX end might break the font handler.") - texio.write_nl("log","!") - tex.attribute[0]=0 -end -attributes=attributes or {} -attributes.unsetvalue=-0x7FFFFFFF -local numbers,last={},127 -attributes.private=attributes.private or function(name) - local number=numbers[name] - if not number then - if last<255 then - last=last+1 - end - number=last - numbers[name]=number - end - return number -end -nodes={} -nodes.pool={} -nodes.handlers={} -local nodecodes={} for k,v in next,node.types () do nodecodes[string.gsub(v,"_","")]=k end -local whatcodes={} for k,v in next,node.whatsits() do whatcodes[string.gsub(v,"_","")]=k end -local glyphcodes={ [0]="character","glyph","ligature","ghost","left","right" } -local disccodes={ [0]="discretionary","explicit","automatic","regular","first","second" } -nodes.nodecodes=nodecodes -nodes.whatcodes=whatcodes -nodes.whatsitcodes=whatcodes -nodes.glyphcodes=glyphcodes -nodes.disccodes=disccodes -local free_node=node.free -local remove_node=node.remove -local new_node=node.new -local traverse_id=node.traverse_id -nodes.handlers.protectglyphs=node.protect_glyphs -nodes.handlers.unprotectglyphs=node.unprotect_glyphs -local math_code=nodecodes.math -local end_of_math=node.end_of_math -function node.end_of_math(n) - if n.id==math_code and n.subtype==1 then - return n - else - return end_of_math(n) - end -end -function nodes.remove(head,current,free_too) - local t=current - head,current=remove_node(head,current) - if t then - if free_too then - free_node(t) - t=nil - else - t.next,t.prev=nil,nil - end - end - return head,current,t -end -function nodes.delete(head,current) - return nodes.remove(head,current,true) -end -function nodes.pool.kern(k) - local n=new_node("kern",1) - n.kern=k - return n -end -local getfield=node.getfield -local setfield=node.setfield -nodes.getfield=getfield -nodes.setfield=setfield -nodes.getattr=getfield -nodes.setattr=setfield -nodes.tostring=node.tostring or tostring -nodes.copy=node.copy -nodes.copy_list=node.copy_list -nodes.delete=node.delete -nodes.dimensions=node.dimensions -nodes.end_of_math=node.end_of_math -nodes.flush_list=node.flush_list -nodes.flush_node=node.flush_node -nodes.free=node.free -nodes.insert_after=node.insert_after -nodes.insert_before=node.insert_before -nodes.hpack=node.hpack -nodes.new=node.new -nodes.tail=node.tail -nodes.traverse=node.traverse -nodes.traverse_id=node.traverse_id -nodes.slide=node.slide -nodes.vpack=node.vpack -nodes.first_glyph=node.first_glyph -nodes.first_character=node.first_character -nodes.has_glyph=node.has_glyph or node.first_glyph -nodes.current_attr=node.current_attr -nodes.do_ligature_n=node.do_ligature_n -nodes.has_field=node.has_field -nodes.last_node=node.last_node -nodes.usedlist=node.usedlist -nodes.protrusion_skippable=node.protrusion_skippable -nodes.write=node.write -nodes.has_attribute=node.has_attribute -nodes.set_attribute=node.set_attribute -nodes.unset_attribute=node.unset_attribute -nodes.protect_glyphs=node.protect_glyphs -nodes.unprotect_glyphs=node.unprotect_glyphs -nodes.kerning=node.kerning -nodes.ligaturing=node.ligaturing -nodes.mlist_to_hlist=node.mlist_to_hlist -local direct=node.direct -local nuts={} -nodes.nuts=nuts -local tonode=direct.tonode -local tonut=direct.todirect -nodes.tonode=tonode -nodes.tonut=tonut -nuts.tonode=tonode -nuts.tonut=tonut -local getfield=direct.getfield -local setfield=direct.setfield -nuts.getfield=getfield -nuts.setfield=setfield -nuts.getnext=direct.getnext -nuts.getprev=direct.getprev -nuts.getid=direct.getid -nuts.getattr=getfield -nuts.setattr=setfield -nuts.getfont=direct.getfont -nuts.getsubtype=direct.getsubtype -nuts.getchar=direct.getchar -nuts.insert_before=direct.insert_before -nuts.insert_after=direct.insert_after -nuts.delete=direct.delete -nuts.copy=direct.copy -nuts.tail=direct.tail -nuts.flush_list=direct.flush_list -nuts.end_of_math=direct.end_of_math -nuts.traverse=direct.traverse -nuts.traverse_id=direct.traverse_id -nuts.getprop=nuts.getattr -nuts.setprop=nuts.setattr -local new_nut=direct.new -nuts.new=new_nut -nuts.pool={} -function nuts.pool.kern(k) - local n=new_nut("kern",1) - setfield(n,"kern",k) - return n -end -local propertydata=direct.get_properties_table() -nodes.properties={ data=propertydata } -direct.set_properties_mode(true,true) -function direct.set_properties_mode() end -nuts.getprop=function(n,k) - local p=propertydata[n] - if p then - return p[k] - end -end -nuts.setprop=function(n,k,v) - if v then - local p=propertydata[n] - if p then - p[k]=v - else - propertydata[n]={ [k]=v } - end - end -end -nodes.setprop=nodes.setproperty -nodes.getprop=nodes.getproperty - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-ini']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local allocate=utilities.storage.allocate -local report_defining=logs.reporter("fonts","defining") -fonts=fonts or {} -local fonts=fonts -fonts.hashes={ identifiers=allocate() } -fonts.tables=fonts.tables or {} -fonts.helpers=fonts.helpers or {} -fonts.tracers=fonts.tracers or {} -fonts.specifiers=fonts.specifiers or {} -fonts.analyzers={} -fonts.readers={} -fonts.definers={ methods={} } -fonts.loggers={ register=function() end } -fontloader.totable=fontloader.to_table - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-con']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local next,tostring,rawget=next,tostring,rawget -local format,match,lower,gsub=string.format,string.match,string.lower,string.gsub -local utfbyte=utf.byte -local sort,insert,concat,sortedkeys,serialize,fastcopy=table.sort,table.insert,table.concat,table.sortedkeys,table.serialize,table.fastcopy -local derivetable=table.derive -local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end) -local trace_scaling=false trackers.register("fonts.scaling",function(v) trace_scaling=v end) -local report_defining=logs.reporter("fonts","defining") -local fonts=fonts -local constructors=fonts.constructors or {} -fonts.constructors=constructors -local handlers=fonts.handlers or {} -fonts.handlers=handlers -local allocate=utilities.storage.allocate -local setmetatableindex=table.setmetatableindex -constructors.dontembed=allocate() -constructors.autocleanup=true -constructors.namemode="fullpath" -constructors.version=1.01 -constructors.cache=containers.define("fonts","constructors",constructors.version,false) -constructors.privateoffset=0xF0000 -constructors.cacheintex=true -constructors.keys={ - properties={ - encodingbytes="number", - embedding="number", - cidinfo={}, - format="string", - fontname="string", - fullname="string", - filename="filename", - psname="string", - name="string", - virtualized="boolean", - hasitalics="boolean", - autoitalicamount="basepoints", - nostackmath="boolean", - noglyphnames="boolean", - mode="string", - hasmath="boolean", - mathitalics="boolean", - textitalics="boolean", - finalized="boolean", - }, - parameters={ - mathsize="number", - scriptpercentage="float", - scriptscriptpercentage="float", - units="cardinal", - designsize="scaledpoints", - expansion={ - stretch="integerscale", - shrink="integerscale", - step="integerscale", - auto="boolean", - }, - protrusion={ - auto="boolean", - }, - slantfactor="float", - extendfactor="float", - factor="float", - hfactor="float", - vfactor="float", - size="scaledpoints", - units="scaledpoints", - scaledpoints="scaledpoints", - slantperpoint="scaledpoints", - spacing={ - width="scaledpoints", - stretch="scaledpoints", - shrink="scaledpoints", - extra="scaledpoints", - }, - xheight="scaledpoints", - quad="scaledpoints", - ascender="scaledpoints", - descender="scaledpoints", - synonyms={ - space="spacing.width", - spacestretch="spacing.stretch", - spaceshrink="spacing.shrink", - extraspace="spacing.extra", - x_height="xheight", - space_stretch="spacing.stretch", - space_shrink="spacing.shrink", - extra_space="spacing.extra", - em="quad", - ex="xheight", - slant="slantperpoint", - }, - }, - description={ - width="basepoints", - height="basepoints", - depth="basepoints", - boundingbox={}, - }, - character={ - width="scaledpoints", - height="scaledpoints", - depth="scaledpoints", - italic="scaledpoints", - }, -} -local designsizes=allocate() -constructors.designsizes=designsizes -local loadedfonts=allocate() -constructors.loadedfonts=loadedfonts -local factors={ - pt=65536.0, - bp=65781.8, -} -function constructors.setfactor(f) - constructors.factor=factors[f or 'pt'] or factors.pt -end -constructors.setfactor() -function constructors.scaled(scaledpoints,designsize) - if scaledpoints<0 then - if designsize then - local factor=constructors.factor - if designsize>factor then - return (- scaledpoints/1000)*designsize - else - return (- scaledpoints/1000)*designsize*factor - end - else - return (- scaledpoints/1000)*10*factor - end - else - return scaledpoints - end -end -function constructors.cleanuptable(tfmdata) - if constructors.autocleanup and tfmdata.properties.virtualized then - for k,v in next,tfmdata.characters do - if v.commands then v.commands=nil end - end - end -end -function constructors.calculatescale(tfmdata,scaledpoints) - local parameters=tfmdata.parameters - if scaledpoints<0 then - scaledpoints=(- scaledpoints/1000)*(tfmdata.designsize or parameters.designsize) - end - return scaledpoints,scaledpoints/(parameters.units or 1000) -end -local unscaled={ - ScriptPercentScaleDown=true, - ScriptScriptPercentScaleDown=true, - RadicalDegreeBottomRaisePercent=true -} -function constructors.assignmathparameters(target,original) - local mathparameters=original.mathparameters - if mathparameters and next(mathparameters) then - local targetparameters=target.parameters - local targetproperties=target.properties - local targetmathparameters={} - local factor=targetproperties.math_is_scaled and 1 or targetparameters.factor - for name,value in next,mathparameters do - if unscaled[name] then - targetmathparameters[name]=value - else - targetmathparameters[name]=value*factor - end - end - if not targetmathparameters.FractionDelimiterSize then - targetmathparameters.FractionDelimiterSize=1.01*targetparameters.size - end - if not mathparameters.FractionDelimiterDisplayStyleSize then - targetmathparameters.FractionDelimiterDisplayStyleSize=2.40*targetparameters.size - end - target.mathparameters=targetmathparameters - end -end -function constructors.beforecopyingcharacters(target,original) -end -function constructors.aftercopyingcharacters(target,original) -end -constructors.sharefonts=false -constructors.nofsharedfonts=0 -local sharednames={} -function constructors.trytosharefont(target,tfmdata) - if constructors.sharefonts then - local characters=target.characters - local n=1 - local t={ target.psname } - local u=sortedkeys(characters) - for i=1,#u do - local k=u[i] - n=n+1;t[n]=k - n=n+1;t[n]=characters[k].index or k - end - local h=md5.HEX(concat(t," ")) - local s=sharednames[h] - if s then - if trace_defining then - report_defining("font %a uses backend resources of font %a",target.fullname,s) - end - target.fullname=s - constructors.nofsharedfonts=constructors.nofsharedfonts+1 - target.properties.sharedwith=s - else - sharednames[h]=target.fullname - end - end -end -function constructors.enhanceparameters(parameters) - local xheight=parameters.x_height - local quad=parameters.quad - local space=parameters.space - local stretch=parameters.space_stretch - local shrink=parameters.space_shrink - local extra=parameters.extra_space - local slant=parameters.slant - parameters.xheight=xheight - parameters.spacestretch=stretch - parameters.spaceshrink=shrink - parameters.extraspace=extra - parameters.em=quad - parameters.ex=xheight - parameters.slantperpoint=slant - parameters.spacing={ - width=space, - stretch=stretch, - shrink=shrink, - extra=extra, - } -end -function constructors.scale(tfmdata,specification) - local target={} - if tonumber(specification) then - specification={ size=specification } - end - target.specification=specification - local scaledpoints=specification.size - local relativeid=specification.relativeid - local properties=tfmdata.properties or {} - local goodies=tfmdata.goodies or {} - local resources=tfmdata.resources or {} - local descriptions=tfmdata.descriptions or {} - local characters=tfmdata.characters or {} - local changed=tfmdata.changed or {} - local shared=tfmdata.shared or {} - local parameters=tfmdata.parameters or {} - local mathparameters=tfmdata.mathparameters or {} - local targetcharacters={} - local targetdescriptions=derivetable(descriptions) - local targetparameters=derivetable(parameters) - local targetproperties=derivetable(properties) - local targetgoodies=goodies - target.characters=targetcharacters - target.descriptions=targetdescriptions - target.parameters=targetparameters - target.properties=targetproperties - target.goodies=targetgoodies - target.shared=shared - target.resources=resources - target.unscaled=tfmdata - local mathsize=tonumber(specification.mathsize) or 0 - local textsize=tonumber(specification.textsize) or scaledpoints - local forcedsize=tonumber(parameters.mathsize ) or 0 - local extrafactor=tonumber(specification.factor ) or 1 - if (mathsize==2 or forcedsize==2) and parameters.scriptpercentage then - scaledpoints=parameters.scriptpercentage*textsize/100 - elseif (mathsize==3 or forcedsize==3) and parameters.scriptscriptpercentage then - scaledpoints=parameters.scriptscriptpercentage*textsize/100 - elseif forcedsize>1000 then - scaledpoints=forcedsize - end - targetparameters.mathsize=mathsize - targetparameters.textsize=textsize - targetparameters.forcedsize=forcedsize - targetparameters.extrafactor=extrafactor - local tounicode=fonts.mappings.tounicode - local defaultwidth=resources.defaultwidth or 0 - local defaultheight=resources.defaultheight or 0 - local defaultdepth=resources.defaultdepth or 0 - local units=parameters.units or 1000 - if target.fonts then - target.fonts=fastcopy(target.fonts) - end - targetproperties.language=properties.language or "dflt" - targetproperties.script=properties.script or "dflt" - targetproperties.mode=properties.mode or "base" - local askedscaledpoints=scaledpoints - local scaledpoints,delta=constructors.calculatescale(tfmdata,scaledpoints,nil,specification) - local hdelta=delta - local vdelta=delta - target.designsize=parameters.designsize - target.units_per_em=units - local direction=properties.direction or tfmdata.direction or 0 - target.direction=direction - properties.direction=direction - target.size=scaledpoints - target.encodingbytes=properties.encodingbytes or 1 - target.embedding=properties.embedding or "subset" - target.tounicode=1 - target.cidinfo=properties.cidinfo - target.format=properties.format - target.cache=constructors.cacheintex and "yes" or "renew" - local fontname=properties.fontname or tfmdata.fontname - local fullname=properties.fullname or tfmdata.fullname - local filename=properties.filename or tfmdata.filename - local psname=properties.psname or tfmdata.psname - local name=properties.name or tfmdata.name - if not psname or psname=="" then - psname=fontname or (fullname and fonts.names.cleanname(fullname)) - end - target.fontname=fontname - target.fullname=fullname - target.filename=filename - target.psname=psname - target.name=name - properties.fontname=fontname - properties.fullname=fullname - properties.filename=filename - properties.psname=psname - properties.name=name - local expansion=parameters.expansion - if expansion then - target.stretch=expansion.stretch - target.shrink=expansion.shrink - target.step=expansion.step - target.auto_expand=expansion.auto - end - local protrusion=parameters.protrusion - if protrusion then - target.auto_protrude=protrusion.auto - end - local extendfactor=parameters.extendfactor or 0 - if extendfactor~=0 and extendfactor~=1 then - hdelta=hdelta*extendfactor - target.extend=extendfactor*1000 - else - target.extend=1000 - end - local slantfactor=parameters.slantfactor or 0 - if slantfactor~=0 then - target.slant=slantfactor*1000 - else - target.slant=0 - end - targetparameters.factor=delta - targetparameters.hfactor=hdelta - targetparameters.vfactor=vdelta - targetparameters.size=scaledpoints - targetparameters.units=units - targetparameters.scaledpoints=askedscaledpoints - local isvirtual=properties.virtualized or tfmdata.type=="virtual" - local hasquality=target.auto_expand or target.auto_protrude - local hasitalics=properties.hasitalics - local autoitalicamount=properties.autoitalicamount - local stackmath=not properties.nostackmath - local nonames=properties.noglyphnames - local haskerns=properties.haskerns or properties.mode=="base" - local hasligatures=properties.hasligatures or properties.mode=="base" - if changed and not next(changed) then - changed=false - end - target.type=isvirtual and "virtual" or "real" - target.postprocessors=tfmdata.postprocessors - local targetslant=(parameters.slant or parameters[1] or 0)*factors.pt - local targetspace=(parameters.space or parameters[2] or 0)*hdelta - local targetspace_stretch=(parameters.space_stretch or parameters[3] or 0)*hdelta - local targetspace_shrink=(parameters.space_shrink or parameters[4] or 0)*hdelta - local targetx_height=(parameters.x_height or parameters[5] or 0)*vdelta - local targetquad=(parameters.quad or parameters[6] or 0)*hdelta - local targetextra_space=(parameters.extra_space or parameters[7] or 0)*hdelta - targetparameters.slant=targetslant - targetparameters.space=targetspace - targetparameters.space_stretch=targetspace_stretch - targetparameters.space_shrink=targetspace_shrink - targetparameters.x_height=targetx_height - targetparameters.quad=targetquad - targetparameters.extra_space=targetextra_space - local ascender=parameters.ascender - if ascender then - targetparameters.ascender=delta*ascender - end - local descender=parameters.descender - if descender then - targetparameters.descender=delta*descender - end - constructors.enhanceparameters(targetparameters) - local protrusionfactor=(targetquad~=0 and 1000/targetquad) or 0 - local scaledwidth=defaultwidth*hdelta - local scaledheight=defaultheight*vdelta - local scaleddepth=defaultdepth*vdelta - local hasmath=(properties.hasmath or next(mathparameters)) and true - if hasmath then - constructors.assignmathparameters(target,tfmdata) - properties.hasmath=true - target.nomath=false - target.MathConstants=target.mathparameters - else - properties.hasmath=false - target.nomath=true - target.mathparameters=nil - end - local italickey="italic" - local useitalics=true - if hasmath then - autoitalicamount=false - elseif properties.textitalics then - italickey="italic_correction" - useitalics=false - if properties.delaytextitalics then - autoitalicamount=false - end - end - if trace_defining then - report_defining("defining tfm, name %a, fullname %a, filename %a, hscale %a, vscale %a, math %a, italics %a", - name,fullname,filename,hdelta,vdelta, - hasmath and "enabled" or "disabled",useitalics and "enabled" or "disabled") - end - constructors.beforecopyingcharacters(target,tfmdata) - local sharedkerns={} - for unicode,character in next,characters do - local chr,description,index - if changed then - local c=changed[unicode] - if c then - description=descriptions[c] or descriptions[unicode] or character - character=characters[c] or character - index=description.index or c - else - description=descriptions[unicode] or character - index=description.index or unicode - end - else - description=descriptions[unicode] or character - index=description.index or unicode - end - local width=description.width - local height=description.height - local depth=description.depth - if width then width=hdelta*width else width=scaledwidth end - if height then height=vdelta*height else height=scaledheight end - if depth and depth~=0 then - depth=delta*depth - if nonames then - chr={ - index=index, - height=height, - depth=depth, - width=width, - } - else - chr={ - name=description.name, - index=index, - height=height, - depth=depth, - width=width, - } - end - else - if nonames then - chr={ - index=index, - height=height, - width=width, - } - else - chr={ - name=description.name, - index=index, - height=height, - width=width, - } - end - end - local isunicode=description.unicode - if isunicode then - chr.unicode=isunicode - chr.tounicode=tounicode(isunicode) - end - if hasquality then - local ve=character.expansion_factor - if ve then - chr.expansion_factor=ve*1000 - end - local vl=character.left_protruding - if vl then - chr.left_protruding=protrusionfactor*width*vl - end - local vr=character.right_protruding - if vr then - chr.right_protruding=protrusionfactor*width*vr - end - end - if autoitalicamount then - local vi=description.italic - if not vi then - local vi=description.boundingbox[3]-description.width+autoitalicamount - if vi>0 then - chr[italickey]=vi*hdelta - end - elseif vi~=0 then - chr[italickey]=vi*hdelta - end - elseif hasitalics then - local vi=description.italic - if vi and vi~=0 then - chr[italickey]=vi*hdelta - end - end - if hasmath then - local vn=character.next - if vn then - chr.next=vn - else - local vv=character.vert_variants - if vv then - local t={} - for i=1,#vv do - local vvi=vv[i] - t[i]={ - ["start"]=(vvi["start"] or 0)*vdelta, - ["end"]=(vvi["end"] or 0)*vdelta, - ["advance"]=(vvi["advance"] or 0)*vdelta, - ["extender"]=vvi["extender"], - ["glyph"]=vvi["glyph"], - } - end - chr.vert_variants=t - else - local hv=character.horiz_variants - if hv then - local t={} - for i=1,#hv do - local hvi=hv[i] - t[i]={ - ["start"]=(hvi["start"] or 0)*hdelta, - ["end"]=(hvi["end"] or 0)*hdelta, - ["advance"]=(hvi["advance"] or 0)*hdelta, - ["extender"]=hvi["extender"], - ["glyph"]=hvi["glyph"], - } - end - chr.horiz_variants=t - end - end - end - local va=character.top_accent - if va then - chr.top_accent=vdelta*va - end - if stackmath then - local mk=character.mathkerns - if mk then - local kerns={} - local v=mk.top_right if v then local k={} for i=1,#v do local vi=v[i] - k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern } - end kerns.top_right=k end - local v=mk.top_left if v then local k={} for i=1,#v do local vi=v[i] - k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern } - end kerns.top_left=k end - local v=mk.bottom_left if v then local k={} for i=1,#v do local vi=v[i] - k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern } - end kerns.bottom_left=k end - local v=mk.bottom_right if v then local k={} for i=1,#v do local vi=v[i] - k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern } - end kerns.bottom_right=k end - chr.mathkern=kerns - end - end - end - if haskerns then - local vk=character.kerns - if vk then - local s=sharedkerns[vk] - if not s then - s={} - for k,v in next,vk do s[k]=v*hdelta end - sharedkerns[vk]=s - end - chr.kerns=s - end - end - if hasligatures then - local vl=character.ligatures - if vl then - if true then - chr.ligatures=vl - else - local tt={} - for i,l in next,vl do - tt[i]=l - end - chr.ligatures=tt - end - end - end - if isvirtual then - local vc=character.commands - if vc then - local ok=false - for i=1,#vc do - local key=vc[i][1] - if key=="right" or key=="down" then - ok=true - break - end - end - if ok then - local tt={} - for i=1,#vc do - local ivc=vc[i] - local key=ivc[1] - if key=="right" then - tt[i]={ key,ivc[2]*hdelta } - elseif key=="down" then - tt[i]={ key,ivc[2]*vdelta } - elseif key=="rule" then - tt[i]={ key,ivc[2]*vdelta,ivc[3]*hdelta } - else - tt[i]=ivc - end - end - chr.commands=tt - else - chr.commands=vc - end - chr.index=nil - end - end - targetcharacters[unicode]=chr - end - constructors.aftercopyingcharacters(target,tfmdata) - constructors.trytosharefont(target,tfmdata) - return target -end -function constructors.finalize(tfmdata) - if tfmdata.properties and tfmdata.properties.finalized then - return - end - if not tfmdata.characters then - return nil - end - if not tfmdata.goodies then - tfmdata.goodies={} - end - local parameters=tfmdata.parameters - if not parameters then - return nil - end - if not parameters.expansion then - parameters.expansion={ - stretch=tfmdata.stretch or 0, - shrink=tfmdata.shrink or 0, - step=tfmdata.step or 0, - auto=tfmdata.auto_expand or false, - } - end - if not parameters.protrusion then - parameters.protrusion={ - auto=auto_protrude - } - end - if not parameters.size then - parameters.size=tfmdata.size - end - if not parameters.extendfactor then - parameters.extendfactor=tfmdata.extend or 0 - end - if not parameters.slantfactor then - parameters.slantfactor=tfmdata.slant or 0 - end - if not parameters.designsize then - parameters.designsize=tfmdata.designsize or (factors.pt*10) - end - if not parameters.units then - parameters.units=tfmdata.units_per_em or 1000 - end - if not tfmdata.descriptions then - local descriptions={} - setmetatableindex(descriptions,function(t,k) local v={} t[k]=v return v end) - tfmdata.descriptions=descriptions - end - local properties=tfmdata.properties - if not properties then - properties={} - tfmdata.properties=properties - end - if not properties.virtualized then - properties.virtualized=tfmdata.type=="virtual" - end - if not tfmdata.properties then - tfmdata.properties={ - fontname=tfmdata.fontname, - filename=tfmdata.filename, - fullname=tfmdata.fullname, - name=tfmdata.name, - psname=tfmdata.psname, - encodingbytes=tfmdata.encodingbytes or 1, - embedding=tfmdata.embedding or "subset", - tounicode=tfmdata.tounicode or 1, - cidinfo=tfmdata.cidinfo or nil, - format=tfmdata.format or "type1", - direction=tfmdata.direction or 0, - } - end - if not tfmdata.resources then - tfmdata.resources={} - end - if not tfmdata.shared then - tfmdata.shared={} - end - if not properties.hasmath then - properties.hasmath=not tfmdata.nomath - end - tfmdata.MathConstants=nil - tfmdata.postprocessors=nil - tfmdata.fontname=nil - tfmdata.filename=nil - tfmdata.fullname=nil - tfmdata.name=nil - tfmdata.psname=nil - tfmdata.encodingbytes=nil - tfmdata.embedding=nil - tfmdata.tounicode=nil - tfmdata.cidinfo=nil - tfmdata.format=nil - tfmdata.direction=nil - tfmdata.type=nil - tfmdata.nomath=nil - tfmdata.designsize=nil - tfmdata.size=nil - tfmdata.stretch=nil - tfmdata.shrink=nil - tfmdata.step=nil - tfmdata.auto_expand=nil - tfmdata.auto_protrude=nil - tfmdata.extend=nil - tfmdata.slant=nil - tfmdata.units_per_em=nil - tfmdata.cache=nil - properties.finalized=true - return tfmdata -end -local hashmethods={} -constructors.hashmethods=hashmethods -function constructors.hashfeatures(specification) - local features=specification.features - if features then - local t,tn={},0 - for category,list in next,features do - if next(list) then - local hasher=hashmethods[category] - if hasher then - local hash=hasher(list) - if hash then - tn=tn+1 - t[tn]=category..":"..hash - end - end - end - end - if tn>0 then - return concat(t," & ") - end - end - return "unknown" -end -hashmethods.normal=function(list) - local s={} - local n=0 - for k,v in next,list do - if not k then - elseif k=="number" or k=="features" then - else - n=n+1 - s[n]=k - end - end - if n>0 then - sort(s) - for i=1,n do - local k=s[i] - s[i]=k..'='..tostring(list[k]) - end - return concat(s,"+") - end -end -function constructors.hashinstance(specification,force) - local hash,size,fallbacks=specification.hash,specification.size,specification.fallbacks - if force or not hash then - hash=constructors.hashfeatures(specification) - specification.hash=hash - end - if size<1000 and designsizes[hash] then - size=math.round(constructors.scaled(size,designsizes[hash])) - specification.size=size - end - if fallbacks then - return hash..' @ '..tostring(size)..' @ '..fallbacks - else - return hash..' @ '..tostring(size) - end -end -function constructors.setname(tfmdata,specification) - if constructors.namemode=="specification" then - local specname=specification.specification - if specname then - tfmdata.properties.name=specname - if trace_defining then - report_otf("overloaded fontname %a",specname) - end - end - end -end -function constructors.checkedfilename(data) - local foundfilename=data.foundfilename - if not foundfilename then - local askedfilename=data.filename or "" - if askedfilename~="" then - askedfilename=resolvers.resolve(askedfilename) - foundfilename=resolvers.findbinfile(askedfilename,"") or "" - if foundfilename=="" then - report_defining("source file %a is not found",askedfilename) - foundfilename=resolvers.findbinfile(file.basename(askedfilename),"") or "" - if foundfilename~="" then - report_defining("using source file %a due to cache mismatch",foundfilename) - end - end - end - data.foundfilename=foundfilename - end - return foundfilename -end -local formats=allocate() -fonts.formats=formats -setmetatableindex(formats,function(t,k) - local l=lower(k) - if rawget(t,k) then - t[k]=l - return l - end - return rawget(t,file.suffix(l)) -end) -local locations={} -local function setindeed(mode,target,group,name,action,position) - local t=target[mode] - if not t then - report_defining("fatal error in setting feature %a, group %a, mode %a",name,group,mode) - os.exit() - elseif position then - insert(t,position,{ name=name,action=action }) - else - for i=1,#t do - local ti=t[i] - if ti.name==name then - ti.action=action - return - end - end - insert(t,{ name=name,action=action }) - end -end -local function set(group,name,target,source) - target=target[group] - if not target then - report_defining("fatal target error in setting feature %a, group %a",name,group) - os.exit() - end - local source=source[group] - if not source then - report_defining("fatal source error in setting feature %a, group %a",name,group) - os.exit() - end - local node=source.node - local base=source.base - local position=source.position - if node then - setindeed("node",target,group,name,node,position) - end - if base then - setindeed("base",target,group,name,base,position) - end -end -local function register(where,specification) - local name=specification.name - if name and name~="" then - local default=specification.default - local description=specification.description - local initializers=specification.initializers - local processors=specification.processors - local manipulators=specification.manipulators - local modechecker=specification.modechecker - if default then - where.defaults[name]=default - end - if description and description~="" then - where.descriptions[name]=description - end - if initializers then - set('initializers',name,where,specification) - end - if processors then - set('processors',name,where,specification) - end - if manipulators then - set('manipulators',name,where,specification) - end - if modechecker then - where.modechecker=modechecker - end - end -end -constructors.registerfeature=register -function constructors.getfeatureaction(what,where,mode,name) - what=handlers[what].features - if what then - where=what[where] - if where then - mode=where[mode] - if mode then - for i=1,#mode do - local m=mode[i] - if m.name==name then - return m.action - end - end - end - end - end -end -function constructors.newhandler(what) - local handler=handlers[what] - if not handler then - handler={} - handlers[what]=handler - end - return handler -end -function constructors.newfeatures(what) - local handler=handlers[what] - local features=handler.features - if not features then - local tables=handler.tables - local statistics=handler.statistics - features=allocate { - defaults={}, - descriptions=tables and tables.features or {}, - used=statistics and statistics.usedfeatures or {}, - initializers={ base={},node={} }, - processors={ base={},node={} }, - manipulators={ base={},node={} }, - } - features.register=function(specification) return register(features,specification) end - handler.features=features - end - return features -end -function constructors.checkedfeatures(what,features) - local defaults=handlers[what].features.defaults - if features and next(features) then - features=fastcopy(features) - for key,value in next,defaults do - if features[key]==nil then - features[key]=value - end - end - return features - else - return fastcopy(defaults) - end -end -function constructors.initializefeatures(what,tfmdata,features,trace,report) - if features and next(features) then - local properties=tfmdata.properties or {} - local whathandler=handlers[what] - local whatfeatures=whathandler.features - local whatinitializers=whatfeatures.initializers - local whatmodechecker=whatfeatures.modechecker - local mode=properties.mode or (whatmodechecker and whatmodechecker(tfmdata,features,features.mode)) or features.mode or "base" - properties.mode=mode - features.mode=mode - local done={} - while true do - local redo=false - local initializers=whatfeatures.initializers[mode] - if initializers then - for i=1,#initializers do - local step=initializers[i] - local feature=step.name - local value=features[feature] - if not value then - elseif done[feature] then - else - local action=step.action - if trace then - report("initializing feature %a to %a for mode %a for font %a",feature, - value,mode,tfmdata.properties.fullname) - end - action(tfmdata,value,features) - if mode~=properties.mode or mode~=features.mode then - if whatmodechecker then - properties.mode=whatmodechecker(tfmdata,features,properties.mode) - features.mode=properties.mode - end - if mode~=properties.mode then - mode=properties.mode - redo=true - end - end - done[feature]=true - end - if redo then - break - end - end - if not redo then - break - end - else - break - end - end - properties.mode=mode - return true - else - return false - end -end -function constructors.collectprocessors(what,tfmdata,features,trace,report) - local processes,nofprocesses={},0 - if features and next(features) then - local properties=tfmdata.properties - local whathandler=handlers[what] - local whatfeatures=whathandler.features - local whatprocessors=whatfeatures.processors - local mode=properties.mode - local processors=whatprocessors[mode] - if processors then - for i=1,#processors do - local step=processors[i] - local feature=step.name - if features[feature] then - local action=step.action - if trace then - report("installing feature processor %a for mode %a for font %a",feature,mode,tfmdata.properties.fullname) - end - if action then - nofprocesses=nofprocesses+1 - processes[nofprocesses]=action - end - end - end - elseif trace then - report("no feature processors for mode %a for font %a",mode,properties.fullname) - end - end - return processes -end -function constructors.applymanipulators(what,tfmdata,features,trace,report) - if features and next(features) then - local properties=tfmdata.properties - local whathandler=handlers[what] - local whatfeatures=whathandler.features - local whatmanipulators=whatfeatures.manipulators - local mode=properties.mode - local manipulators=whatmanipulators[mode] - if manipulators then - for i=1,#manipulators do - local step=manipulators[i] - local feature=step.name - local value=features[feature] - if value then - local action=step.action - if trace then - report("applying feature manipulator %a for mode %a for font %a",feature,mode,properties.fullname) - end - if action then - action(tfmdata,feature,value) - end - end - end - end - end -end -function constructors.addcoreunicodes(unicodes) - if not unicodes then - unicodes={} - end - unicodes.space=0x0020 - unicodes.hyphen=0x002D - unicodes.zwj=0x200D - unicodes.zwnj=0x200C - return unicodes -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['luatex-font-enc']={ - version=1.001, - comment="companion to luatex-*.tex", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end -local fonts=fonts -fonts.encodings={} -fonts.encodings.agl={} -fonts.encodings.known={} -setmetatable(fonts.encodings.agl,{ __index=function(t,k) - if k=="unicodes" then - texio.write(" ") - local unicodes=dofile(resolvers.findfile("font-age.lua")) - fonts.encodings.agl={ unicodes=unicodes } - return unicodes - else - return nil - end -end }) - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-cid']={ - version=1.001, - comment="companion to font-otf.lua (cidmaps)", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local format,match,lower=string.format,string.match,string.lower -local tonumber=tonumber -local P,S,R,C,V,lpegmatch=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.V,lpeg.match -local fonts,logs,trackers=fonts,logs,trackers -local trace_loading=false trackers.register("otf.loading",function(v) trace_loading=v end) -local report_otf=logs.reporter("fonts","otf loading") -local cid={} -fonts.cid=cid -local cidmap={} -local cidmax=10 -local number=C(R("09","af","AF")^1) -local space=S(" \n\r\t") -local spaces=space^0 -local period=P(".") -local periods=period*period -local name=P("/")*C((1-space)^1) -local unicodes,names={},{} -local function do_one(a,b) - unicodes[tonumber(a)]=tonumber(b,16) -end -local function do_range(a,b,c) - c=tonumber(c,16) - for i=tonumber(a),tonumber(b) do - unicodes[i]=c - c=c+1 - end -end -local function do_name(a,b) - names[tonumber(a)]=b -end -local grammar=P { "start", - start=number*spaces*number*V("series"), - series=(spaces*(V("one")+V("range")+V("named")))^1, - one=(number*spaces*number)/do_one, - range=(number*periods*number*spaces*number)/do_range, - named=(number*spaces*name)/do_name -} -local function loadcidfile(filename) - local data=io.loaddata(filename) - if data then - unicodes,names={},{} - lpegmatch(grammar,data) - local supplement,registry,ordering=match(filename,"^(.-)%-(.-)%-()%.(.-)$") - return { - supplement=supplement, - registry=registry, - ordering=ordering, - filename=filename, - unicodes=unicodes, - names=names, - } - end -end -cid.loadfile=loadcidfile -local template="%s-%s-%s.cidmap" -local function locate(registry,ordering,supplement) - local filename=format(template,registry,ordering,supplement) - local hashname=lower(filename) - local found=cidmap[hashname] - if not found then - if trace_loading then - report_otf("checking cidmap, registry %a, ordering %a, supplement %a, filename %a",registry,ordering,supplement,filename) - end - local fullname=resolvers.findfile(filename,'cid') or "" - if fullname~="" then - found=loadcidfile(fullname) - if found then - if trace_loading then - report_otf("using cidmap file %a",filename) - end - cidmap[hashname]=found - found.usedname=file.basename(filename) - end - end - end - return found -end -function cid.getmap(specification) - if not specification then - report_otf("invalid cidinfo specification, table expected") - return - end - local registry=specification.registry - local ordering=specification.ordering - local supplement=specification.supplement - local filename=format(registry,ordering,supplement) - local lowername=lower(filename) - local found=cidmap[lowername] - if found then - return found - end - if ordering=="Identity" then - local found={ - supplement=supplement, - registry=registry, - ordering=ordering, - filename=filename, - unicodes={}, - names={}, - } - cidmap[lowername]=found - return found - end - if trace_loading then - report_otf("cidmap needed, registry %a, ordering %a, supplement %a",registry,ordering,supplement) - end - found=locate(registry,ordering,supplement) - if not found then - local supnum=tonumber(supplement) - local cidnum=nil - if supnum0 then - for s=supnum-1,0,-1 do - local c=locate(registry,ordering,s) - if c then - found,cidnum=c,s - break - end - end - end - registry=lower(registry) - ordering=lower(ordering) - if found and cidnum>0 then - for s=0,cidnum-1 do - local filename=format(template,registry,ordering,s) - if not cidmap[filename] then - cidmap[filename]=found - end - end - end - end - return found -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-map']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local tonumber,next,type=tonumber,next,type -local match,format,find,concat,gsub,lower=string.match,string.format,string.find,table.concat,string.gsub,string.lower -local P,R,S,C,Ct,Cc,lpegmatch=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cc,lpeg.match -local utfbyte=utf.byte -local floor=math.floor -local formatters=string.formatters -local trace_loading=false trackers.register("fonts.loading",function(v) trace_loading=v end) -local trace_mapping=false trackers.register("fonts.mapping",function(v) trace_unimapping=v end) -local report_fonts=logs.reporter("fonts","loading") -local fonts=fonts or {} -local mappings=fonts.mappings or {} -fonts.mappings=mappings -local function loadlumtable(filename) - local lumname=file.replacesuffix(file.basename(filename),"lum") - local lumfile=resolvers.findfile(lumname,"map") or "" - if lumfile~="" and lfs.isfile(lumfile) then - if trace_loading or trace_mapping then - report_fonts("loading map table %a",lumfile) - end - lumunic=dofile(lumfile) - return lumunic,lumfile - end -end -local hex=R("AF","09") -local hexfour=(hex*hex*hex*hex)/function(s) return tonumber(s,16) end -local hexsix=(hex*hex*hex*hex*hex*hex)/function(s) return tonumber(s,16) end -local dec=(R("09")^1)/tonumber -local period=P(".") -local unicode=P("uni")*(hexfour*(period+P(-1))*Cc(false)+Ct(hexfour^1)*Cc(true)) -local ucode=P("u")*(hexsix*(period+P(-1))*Cc(false)+Ct(hexsix^1)*Cc(true)) -local index=P("index")*dec*Cc(false) -local parser=unicode+ucode+index -local parsers={} -local function makenameparser(str) - if not str or str=="" then - return parser - else - local p=parsers[str] - if not p then - p=P(str)*period*dec*Cc(false) - parsers[str]=p - end - return p - end -end -local f_single=formatters["%04X"] -local f_double=formatters["%04X%04X"] -local function tounicode16(unicode,name) - if unicode<0x10000 then - return f_single(unicode) - elseif unicode<0x1FFFFFFFFF then - return f_double(floor(unicode/1024),unicode%1024+0xDC00) - else - report_fonts("can't convert %a in %a into tounicode",unicode,name) - end -end -local function tounicode16sequence(unicodes,name) - local t={} - for l=1,#unicodes do - local u=unicodes[l] - if u<0x10000 then - t[l]=f_single(u) - elseif unicode<0x1FFFFFFFFF then - t[l]=f_double(floor(u/1024),u%1024+0xDC00) - else - report_fonts ("can't convert %a in %a into tounicode",u,name) - return - end - end - return concat(t) -end -local function tounicode(unicode,name) - if type(unicode)=="table" then - local t={} - for l=1,#unicode do - local u=unicode[l] - if u<0x10000 then - t[l]=f_single(u) - elseif u<0x1FFFFFFFFF then - t[l]=f_double(floor(u/1024),u%1024+0xDC00) - else - report_fonts ("can't convert %a in %a into tounicode",u,name) - return - end - end - return concat(t) - else - if unicode<0x10000 then - return f_single(unicode) - elseif unicode<0x1FFFFFFFFF then - return f_double(floor(unicode/1024),unicode%1024+0xDC00) - else - report_fonts("can't convert %a in %a into tounicode",unicode,name) - end - end -end -local function fromunicode16(str) - if #str==4 then - return tonumber(str,16) - else - local l,r=match(str,"(....)(....)") - return (tonumber(l,16))*0x400+tonumber(r,16)-0xDC00 - end -end -mappings.loadlumtable=loadlumtable -mappings.makenameparser=makenameparser -mappings.tounicode=tounicode -mappings.tounicode16=tounicode16 -mappings.tounicode16sequence=tounicode16sequence -mappings.fromunicode16=fromunicode16 -local ligseparator=P("_") -local varseparator=P(".") -local namesplitter=Ct(C((1-ligseparator-varseparator)^1)*(ligseparator*C((1-ligseparator-varseparator)^1))^0) -local overloads={ - IJ={ name="I_J",unicode={ 0x49,0x4A },mess=0x0132 }, - ij={ name="i_j",unicode={ 0x69,0x6A },mess=0x0133 }, - ff={ name="f_f",unicode={ 0x66,0x66 },mess=0xFB00 }, - fi={ name="f_i",unicode={ 0x66,0x69 },mess=0xFB01 }, - fl={ name="f_l",unicode={ 0x66,0x6C },mess=0xFB02 }, - ffi={ name="f_f_i",unicode={ 0x66,0x66,0x69 },mess=0xFB03 }, - ffl={ name="f_f_l",unicode={ 0x66,0x66,0x6C },mess=0xFB04 }, - fj={ name="f_j",unicode={ 0x66,0x6A } }, - fk={ name="f_k",unicode={ 0x66,0x6B } }, -} -for k,v in next,overloads do - local name=v.name - local mess=v.mess - if name then - overloads[name]=v - end - if mess then - overloads[mess]=v - end -end -mappings.overloads=overloads -function mappings.addtounicode(data,filename) - local resources=data.resources - local properties=data.properties - local descriptions=data.descriptions - local unicodes=resources.unicodes - local lookuptypes=resources.lookuptypes - if not unicodes then - return - end - unicodes['space']=unicodes['space'] or 32 - unicodes['hyphen']=unicodes['hyphen'] or 45 - unicodes['zwj']=unicodes['zwj'] or 0x200D - unicodes['zwnj']=unicodes['zwnj'] or 0x200C - local private=fonts.constructors.privateoffset - local unicodevector=fonts.encodings.agl.unicodes - local missing={} - local lumunic,uparser,oparser - local cidinfo,cidnames,cidcodes,usedmap - cidinfo=properties.cidinfo - usedmap=cidinfo and fonts.cid.getmap(cidinfo) - if usedmap then - oparser=usedmap and makenameparser(cidinfo.ordering) - cidnames=usedmap.names - cidcodes=usedmap.unicodes - end - uparser=makenameparser() - local ns,nl=0,0 - for unic,glyph in next,descriptions do - local index=glyph.index - local name=glyph.name - local r=overloads[name] - if r then - glyph.unicode=r.unicode - elseif unic==-1 or unic>=private or (unic>=0xE000 and unic<=0xF8FF) or unic==0xFFFE or unic==0xFFFF then - local unicode=lumunic and lumunic[name] or unicodevector[name] - if unicode then - glyph.unicode=unicode - ns=ns+1 - end - if (not unicode) and usedmap then - local foundindex=lpegmatch(oparser,name) - if foundindex then - unicode=cidcodes[foundindex] - if unicode then - glyph.unicode=unicode - ns=ns+1 - else - local reference=cidnames[foundindex] - if reference then - local foundindex=lpegmatch(oparser,reference) - if foundindex then - unicode=cidcodes[foundindex] - if unicode then - glyph.unicode=unicode - ns=ns+1 - end - end - if not unicode or unicode=="" then - local foundcodes,multiple=lpegmatch(uparser,reference) - if foundcodes then - glyph.unicode=foundcodes - if multiple then - nl=nl+1 - unicode=true - else - ns=ns+1 - unicode=foundcodes - end - end - end - end - end - end - end - if not unicode or unicode=="" then - local split=lpegmatch(namesplitter,name) - local nsplit=split and #split or 0 - local t,n={},0 - unicode=true - for l=1,nsplit do - local base=split[l] - local u=unicodes[base] or unicodevector[base] - if not u then - break - elseif type(u)=="table" then - if u[1]>=private then - unicode=false - break - end - n=n+1 - t[n]=u[1] - else - if u>=private then - unicode=false - break - end - n=n+1 - t[n]=u - end - end - if n==0 then - elseif n==1 then - glyph.unicode=t[1] - else - glyph.unicode=t - end - nl=nl+1 - end - if not unicode or unicode=="" then - local foundcodes,multiple=lpegmatch(uparser,name) - if foundcodes then - glyph.unicode=foundcodes - if multiple then - nl=nl+1 - unicode=true - else - ns=ns+1 - unicode=foundcodes - end - end - end - local r=overloads[unicode] - if r then - unicode=r.unicode - glyph.unicode=unicode - end - if not unicode then - missing[name]=true - end - end - end - if next(missing) then - local guess={} - local function check(gname,code,unicode) - local description=descriptions[code] - local variant=description.name - if variant==gname then - return - end - local unic=unicodes[variant] - if unic==-1 or unic>=private or (unic>=0xE000 and unic<=0xF8FF) or unic==0xFFFE or unic==0xFFFF then - else - return - end - if descriptions[code].unicode then - return - end - local g=guess[variant] - if g then - g[gname]=unicode - else - guess[variant]={ [gname]=unicode } - end - end - for unicode,description in next,descriptions do - local slookups=description.slookups - if slookups then - local gname=description.name - for tag,data in next,slookups do - local lookuptype=lookuptypes[tag] - if lookuptype=="alternate" then - for i=1,#data do - check(gname,data[i],unicode) - end - elseif lookuptype=="substitution" then - check(gname,data,unicode) - end - end - end - local mlookups=description.mlookups - if mlookups then - local gname=description.name - for tag,list in next,mlookups do - local lookuptype=lookuptypes[tag] - if lookuptype=="alternate" then - for i=1,#list do - local data=list[i] - for i=1,#data do - check(gname,data[i],unicode) - end - end - elseif lookuptype=="substitution" then - for i=1,#list do - check(gname,list[i],unicode) - end - end - end - end - end - local done=true - while done do - done=false - for k,v in next,guess do - if type(v)~="number" then - for kk,vv in next,v do - if vv==-1 or vv>=private or (vv>=0xE000 and vv<=0xF8FF) or vv==0xFFFE or vv==0xFFFF then - local uu=guess[kk] - if type(uu)=="number" then - guess[k]=uu - done=true - end - else - guess[k]=vv - done=true - end - end - end - end - end - local orphans=0 - local guessed=0 - for k,v in next,guess do - if type(v)=="number" then - descriptions[unicodes[k]].unicode=descriptions[v].unicode or v - guessed=guessed+1 - else - local t=nil - local l=lower(k) - local u=unicodes[l] - if not u then - orphans=orphans+1 - elseif u==-1 or u>=private or (u>=0xE000 and u<=0xF8FF) or u==0xFFFE or u==0xFFFF then - local unicode=descriptions[u].unicode - if unicode then - descriptions[unicodes[k]].unicode=unicode - guessed=guessed+1 - else - orphans=orphans+1 - end - else - orphans=orphans+1 - end - end - end - if trace_loading and orphans>0 or guessed>0 then - report_fonts("%s glyphs with no related unicode, %s guessed, %s orphans",guessed+orphans,guessed,orphans) - end - end - if trace_mapping then - for unic,glyph in table.sortedhash(descriptions) do - local name=glyph.name - local index=glyph.index - local unicode=glyph.unicode - if unicode then - if type(unicode)=="table" then - local unicodes={} - for i=1,#unicode do - unicodes[i]=formatters("%U",unicode[i]) - end - report_fonts("internal slot %U, name %a, unicode %U, tounicode % t",index,name,unic,unicodes) - else - report_fonts("internal slot %U, name %a, unicode %U, tounicode %U",index,name,unic,unicode) - end - else - report_fonts("internal slot %U, name %a, unicode %U",index,name,unic) - end - end - end - if trace_loading and (ns>0 or nl>0) then - report_fonts("%s tounicode entries added, ligatures %s",nl+ns,ns) - end -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['luatex-fonts-syn']={ - version=1.001, - comment="companion to luatex-*.tex", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end -local fonts=fonts -fonts.names=fonts.names or {} -fonts.names.version=1.001 -fonts.names.basename="luatex-fonts-names" -fonts.names.new_to_old={} -fonts.names.old_to_new={} -fonts.names.cache=containers.define("fonts","data",fonts.names.version,true) -local data,loaded=nil,false -local fileformats={ "lua","tex","other text files" } -function fonts.names.reportmissingbase() - texio.write("") - fonts.names.reportmissingbase=nil -end -function fonts.names.reportmissingname() - texio.write("") - fonts.names.reportmissingname=nil -end -function fonts.names.resolve(name,sub) - if not loaded then - local basename=fonts.names.basename - if basename and basename~="" then - data=containers.read(fonts.names.cache,basename) - if not data then - basename=file.addsuffix(basename,"lua") - for i=1,#fileformats do - local format=fileformats[i] - local foundname=resolvers.findfile(basename,format) or "" - if foundname~="" then - data=dofile(foundname) - texio.write("") - break - end - end - end - end - loaded=true - end - if type(data)=="table" and data.version==fonts.names.version then - local condensed=string.gsub(string.lower(name),"[^%a%d]","") - local found=data.mappings and data.mappings[condensed] - if found then - local fontname,filename,subfont=found[1],found[2],found[3] - if subfont then - return filename,fontname - else - return filename,false - end - elseif fonts.names.reportmissingname then - fonts.names.reportmissingname() - return name,false - end - elseif fonts.names.reportmissingbase then - fonts.names.reportmissingbase() - end -end -fonts.names.resolvespec=fonts.names.resolve -function fonts.names.getfilename(askedname,suffix) - return "" -end -function fonts.names.ignoredfile(filename) - return false -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-tfm']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local next=next -local match=string.match -local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end) -local trace_features=false trackers.register("tfm.features",function(v) trace_features=v end) -local report_defining=logs.reporter("fonts","defining") -local report_tfm=logs.reporter("fonts","tfm loading") -local findbinfile=resolvers.findbinfile -local fonts=fonts -local handlers=fonts.handlers -local readers=fonts.readers -local constructors=fonts.constructors -local encodings=fonts.encodings -local tfm=constructors.newhandler("tfm") -local tfmfeatures=constructors.newfeatures("tfm") -local registertfmfeature=tfmfeatures.register -constructors.resolvevirtualtoo=false -fonts.formats.tfm="type1" -function tfm.setfeatures(tfmdata,features) - local okay=constructors.initializefeatures("tfm",tfmdata,features,trace_features,report_tfm) - if okay then - return constructors.collectprocessors("tfm",tfmdata,features,trace_features,report_tfm) - else - return {} - end -end -local function read_from_tfm(specification) - local filename=specification.filename - local size=specification.size - if trace_defining then - report_defining("loading tfm file %a at size %s",filename,size) - end - local tfmdata=font.read_tfm(filename,size) - if tfmdata then - local features=specification.features and specification.features.normal or {} - local resources=tfmdata.resources or {} - local properties=tfmdata.properties or {} - local parameters=tfmdata.parameters or {} - local shared=tfmdata.shared or {} - properties.name=tfmdata.name - properties.fontname=tfmdata.fontname - properties.psname=tfmdata.psname - properties.filename=specification.filename - properties.format=fonts.formats.tfm - parameters.size=size - shared.rawdata={} - shared.features=features - shared.processes=next(features) and tfm.setfeatures(tfmdata,features) or nil - tfmdata.properties=properties - tfmdata.resources=resources - tfmdata.parameters=parameters - tfmdata.shared=shared - parameters.slant=parameters.slant or parameters[1] or 0 - parameters.space=parameters.space or parameters[2] or 0 - parameters.space_stretch=parameters.space_stretch or parameters[3] or 0 - parameters.space_shrink=parameters.space_shrink or parameters[4] or 0 - parameters.x_height=parameters.x_height or parameters[5] or 0 - parameters.quad=parameters.quad or parameters[6] or 0 - parameters.extra_space=parameters.extra_space or parameters[7] or 0 - constructors.enhanceparameters(parameters) - if constructors.resolvevirtualtoo then - fonts.loggers.register(tfmdata,file.suffix(filename),specification) - local vfname=findbinfile(specification.name,'ovf') - if vfname and vfname~="" then - local vfdata=font.read_vf(vfname,size) - if vfdata then - local chars=tfmdata.characters - for k,v in next,vfdata.characters do - chars[k].commands=v.commands - end - properties.virtualized=true - tfmdata.fonts=vfdata.fonts - end - end - end - local allfeatures=tfmdata.shared.features or specification.features.normal - constructors.applymanipulators("tfm",tfmdata,allfeatures.normal,trace_features,report_tfm) - if not features.encoding then - local encoding,filename=match(properties.filename,"^(.-)%-(.*)$") - if filename and encoding and encodings.known and encodings.known[encoding] then - features.encoding=encoding - end - end - properties.haskerns=true - properties.haslogatures=true - resources.unicodes={} - resources.lookuptags={} - return tfmdata - end -end -local function check_tfm(specification,fullname) - local foundname=findbinfile(fullname,'tfm') or "" - if foundname=="" then - foundname=findbinfile(fullname,'ofm') or "" - end - if foundname=="" then - foundname=fonts.names.getfilename(fullname,"tfm") or "" - end - if foundname~="" then - specification.filename=foundname - specification.format="ofm" - return read_from_tfm(specification) - elseif trace_defining then - report_defining("loading tfm with name %a fails",specification.name) - end -end -readers.check_tfm=check_tfm -function readers.tfm(specification) - local fullname=specification.filename or "" - if fullname=="" then - local forced=specification.forced or "" - if forced~="" then - fullname=specification.name.."."..forced - else - fullname=specification.name - end - end - return check_tfm(specification,fullname) -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-afm']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local fonts,logs,trackers,containers,resolvers=fonts,logs,trackers,containers,resolvers -local next,type,tonumber=next,type,tonumber -local format,match,gmatch,lower,gsub,strip=string.format,string.match,string.gmatch,string.lower,string.gsub,string.strip -local abs=math.abs -local P,S,C,R,lpegmatch,patterns=lpeg.P,lpeg.S,lpeg.C,lpeg.R,lpeg.match,lpeg.patterns -local derivetable=table.derive -local trace_features=false trackers.register("afm.features",function(v) trace_features=v end) -local trace_indexing=false trackers.register("afm.indexing",function(v) trace_indexing=v end) -local trace_loading=false trackers.register("afm.loading",function(v) trace_loading=v end) -local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end) -local report_afm=logs.reporter("fonts","afm loading") -local setmetatableindex=table.setmetatableindex -local findbinfile=resolvers.findbinfile -local definers=fonts.definers -local readers=fonts.readers -local constructors=fonts.constructors -local afm=constructors.newhandler("afm") -local pfb=constructors.newhandler("pfb") -local afmfeatures=constructors.newfeatures("afm") -local registerafmfeature=afmfeatures.register -afm.version=1.500 -afm.cache=containers.define("fonts","afm",afm.version,true) -afm.autoprefixed=true -afm.helpdata={} -afm.syncspace=true -afm.addligatures=true -afm.addtexligatures=true -afm.addkerns=true -local overloads=fonts.mappings.overloads -local applyruntimefixes=fonts.treatments and fonts.treatments.applyfixes -local function setmode(tfmdata,value) - if value then - tfmdata.properties.mode=lower(value) - end -end -registerafmfeature { - name="mode", - description="mode", - initializers={ - base=setmode, - node=setmode, - } -} -local comment=P("Comment") -local spacing=patterns.spacer -local lineend=patterns.newline -local words=C((1-lineend)^1) -local number=C((R("09")+S("."))^1)/tonumber*spacing^0 -local data=lpeg.Carg(1) -local pattern=( - comment*spacing*( - data*( - ("CODINGSCHEME"*spacing*words )/function(fd,a) end+("DESIGNSIZE"*spacing*number*words )/function(fd,a) fd[ 1]=a end+("CHECKSUM"*spacing*number*words )/function(fd,a) fd[ 2]=a end+("SPACE"*spacing*number*"plus"*number*"minus"*number)/function(fd,a,b,c) fd[ 3],fd[ 4],fd[ 5]=a,b,c end+("QUAD"*spacing*number )/function(fd,a) fd[ 6]=a end+("EXTRASPACE"*spacing*number )/function(fd,a) fd[ 7]=a end+("NUM"*spacing*number*number*number )/function(fd,a,b,c) fd[ 8],fd[ 9],fd[10]=a,b,c end+("DENOM"*spacing*number*number )/function(fd,a,b ) fd[11],fd[12]=a,b end+("SUP"*spacing*number*number*number )/function(fd,a,b,c) fd[13],fd[14],fd[15]=a,b,c end+("SUB"*spacing*number*number )/function(fd,a,b) fd[16],fd[17]=a,b end+("SUPDROP"*spacing*number )/function(fd,a) fd[18]=a end+("SUBDROP"*spacing*number )/function(fd,a) fd[19]=a end+("DELIM"*spacing*number*number )/function(fd,a,b) fd[20],fd[21]=a,b end+("AXISHEIGHT"*spacing*number )/function(fd,a) fd[22]=a end - )+(1-lineend)^0 - )+(1-comment)^1 -)^0 -local function scan_comment(str) - local fd={} - lpegmatch(pattern,str,1,fd) - return fd -end -local keys={} -function keys.FontName (data,line) data.metadata.fontname=strip (line) - data.metadata.fullname=strip (line) end -function keys.ItalicAngle (data,line) data.metadata.italicangle=tonumber (line) end -function keys.IsFixedPitch(data,line) data.metadata.isfixedpitch=toboolean(line,true) end -function keys.CharWidth (data,line) data.metadata.charwidth=tonumber (line) end -function keys.XHeight (data,line) data.metadata.xheight=tonumber (line) end -function keys.Descender (data,line) data.metadata.descender=tonumber (line) end -function keys.Ascender (data,line) data.metadata.ascender=tonumber (line) end -function keys.Comment (data,line) - line=lower(line) - local designsize=match(line,"designsize[^%d]*(%d+)") - if designsize then data.metadata.designsize=tonumber(designsize) end -end -local function get_charmetrics(data,charmetrics,vector) - local characters=data.characters - local chr,ind={},0 - for k,v in gmatch(charmetrics,"([%a]+) +(.-) *;") do - if k=='C' then - v=tonumber(v) - if v<0 then - ind=ind+1 - else - ind=v - end - chr={ - index=ind - } - elseif k=='WX' then - chr.width=tonumber(v) - elseif k=='N' then - characters[v]=chr - elseif k=='B' then - local llx,lly,urx,ury=match(v,"^ *(.-) +(.-) +(.-) +(.-)$") - chr.boundingbox={ tonumber(llx),tonumber(lly),tonumber(urx),tonumber(ury) } - elseif k=='L' then - local plus,becomes=match(v,"^(.-) +(.-)$") - local ligatures=chr.ligatures - if ligatures then - ligatures[plus]=becomes - else - chr.ligatures={ [plus]=becomes } - end - end - end -end -local function get_kernpairs(data,kernpairs) - local characters=data.characters - for one,two,value in gmatch(kernpairs,"KPX +(.-) +(.-) +(.-)\n") do - local chr=characters[one] - if chr then - local kerns=chr.kerns - if kerns then - kerns[two]=tonumber(value) - else - chr.kerns={ [two]=tonumber(value) } - end - end - end -end -local function get_variables(data,fontmetrics) - for key,rest in gmatch(fontmetrics,"(%a+) *(.-)[\n\r]") do - local keyhandler=keys[key] - if keyhandler then - keyhandler(data,rest) - end - end -end -local function get_indexes(data,pfbname) - data.resources.filename=resolvers.unresolve(pfbname) - local pfbblob=fontloader.open(pfbname) - if pfbblob then - local characters=data.characters - local pfbdata=fontloader.to_table(pfbblob) - if pfbdata then - local glyphs=pfbdata.glyphs - if glyphs then - if trace_loading then - report_afm("getting index data from %a",pfbname) - end - for index,glyph in next,glyphs do - local name=glyph.name - if name then - local char=characters[name] - if char then - if trace_indexing then - report_afm("glyph %a has index %a",name,index) - end - char.index=index - end - end - end - elseif trace_loading then - report_afm("no glyph data in pfb file %a",pfbname) - end - elseif trace_loading then - report_afm("no data in pfb file %a",pfbname) - end - fontloader.close(pfbblob) - elseif trace_loading then - report_afm("invalid pfb file %a",pfbname) - end -end -local function readafm(filename) - local ok,afmblob,size=resolvers.loadbinfile(filename) - if ok and afmblob then - local data={ - resources={ - filename=resolvers.unresolve(filename), - version=afm.version, - creator="context mkiv", - }, - properties={ - hasitalics=false, - }, - goodies={}, - metadata={ - filename=file.removesuffix(file.basename(filename)) - }, - characters={ - }, - descriptions={ - }, - } - afmblob=gsub(afmblob,"StartCharMetrics(.-)EndCharMetrics",function(charmetrics) - if trace_loading then - report_afm("loading char metrics") - end - get_charmetrics(data,charmetrics,vector) - return "" - end) - afmblob=gsub(afmblob,"StartKernPairs(.-)EndKernPairs",function(kernpairs) - if trace_loading then - report_afm("loading kern pairs") - end - get_kernpairs(data,kernpairs) - return "" - end) - afmblob=gsub(afmblob,"StartFontMetrics%s+([%d%.]+)(.-)EndFontMetrics",function(version,fontmetrics) - if trace_loading then - report_afm("loading variables") - end - data.afmversion=version - get_variables(data,fontmetrics) - data.fontdimens=scan_comment(fontmetrics) - return "" - end) - return data - else - if trace_loading then - report_afm("no valid afm file %a",filename) - end - return nil - end -end -local addkerns,addligatures,addtexligatures,unify,normalize,fixnames -function afm.load(filename) - filename=resolvers.findfile(filename,'afm') or "" - if filename~="" and not fonts.names.ignoredfile(filename) then - local name=file.removesuffix(file.basename(filename)) - local data=containers.read(afm.cache,name) - local attr=lfs.attributes(filename) - local size,time=attr.size or 0,attr.modification or 0 - local pfbfile=file.replacesuffix(name,"pfb") - local pfbname=resolvers.findfile(pfbfile,"pfb") or "" - if pfbname=="" then - pfbname=resolvers.findfile(file.basename(pfbfile),"pfb") or "" - end - local pfbsize,pfbtime=0,0 - if pfbname~="" then - local attr=lfs.attributes(pfbname) - pfbsize=attr.size or 0 - pfbtime=attr.modification or 0 - end - if not data or data.size~=size or data.time~=time or data.pfbsize~=pfbsize or data.pfbtime~=pfbtime then - report_afm("reading %a",filename) - data=readafm(filename) - if data then - if pfbname~="" then - get_indexes(data,pfbname) - elseif trace_loading then - report_afm("no pfb file for %a",filename) - end - report_afm("unifying %a",filename) - unify(data,filename) - if afm.addligatures then - report_afm("add ligatures") - addligatures(data) - end - if afm.addtexligatures then - report_afm("add tex ligatures") - addtexligatures(data) - end - if afm.addkerns then - report_afm("add extra kerns") - addkerns(data) - end - normalize(data) - fixnames(data) - report_afm("add tounicode data") - fonts.mappings.addtounicode(data,filename) - data.size=size - data.time=time - data.pfbsize=pfbsize - data.pfbtime=pfbtime - report_afm("saving %a in cache",name) - data.resources.unicodes=nil - data=containers.write(afm.cache,name,data) - data=containers.read(afm.cache,name) - end - if applyruntimefixes and data then - applyruntimefixes(filename,data) - end - end - return data - else - return nil - end -end -local uparser=fonts.mappings.makenameparser() -unify=function(data,filename) - local unicodevector=fonts.encodings.agl.unicodes - local unicodes,names={},{} - local private=constructors.privateoffset - local descriptions=data.descriptions - for name,blob in next,data.characters do - local code=unicodevector[name] - if not code then - code=lpegmatch(uparser,name) - if not code then - code=private - private=private+1 - report_afm("assigning private slot %U for unknown glyph name %a",code,name) - end - end - local index=blob.index - unicodes[name]=code - names[name]=index - blob.name=name - descriptions[code]={ - boundingbox=blob.boundingbox, - width=blob.width, - kerns=blob.kerns, - index=index, - name=name, - } - end - for unicode,description in next,descriptions do - local kerns=description.kerns - if kerns then - local krn={} - for name,kern in next,kerns do - local unicode=unicodes[name] - if unicode then - krn[unicode]=kern - else - end - end - description.kerns=krn - end - end - data.characters=nil - local resources=data.resources - local filename=resources.filename or file.removesuffix(file.basename(filename)) - resources.filename=resolvers.unresolve(filename) - resources.unicodes=unicodes - resources.marks={} - resources.private=private -end -normalize=function(data) -end -fixnames=function(data) - for k,v in next,data.descriptions do - local n=v.name - local r=overloads[n] - if r then - local name=r.name - if trace_indexing then - report_afm("renaming characters %a to %a",n,name) - end - v.name=name - v.unicode=r.unicode - end - end -end -local addthem=function(rawdata,ligatures) - if ligatures then - local descriptions=rawdata.descriptions - local resources=rawdata.resources - local unicodes=resources.unicodes - for ligname,ligdata in next,ligatures do - local one=descriptions[unicodes[ligname]] - if one then - for _,pair in next,ligdata do - local two,three=unicodes[pair[1]],unicodes[pair[2]] - if two and three then - local ol=one.ligatures - if ol then - if not ol[two] then - ol[two]=three - end - else - one.ligatures={ [two]=three } - end - end - end - end - end - end -end -addligatures=function(rawdata) addthem(rawdata,afm.helpdata.ligatures ) end -addtexligatures=function(rawdata) addthem(rawdata,afm.helpdata.texligatures) end -addkerns=function(rawdata) - local descriptions=rawdata.descriptions - local resources=rawdata.resources - local unicodes=resources.unicodes - local function do_it_left(what) - if what then - for unicode,description in next,descriptions do - local kerns=description.kerns - if kerns then - local extrakerns - for complex,simple in next,what do - complex=unicodes[complex] - simple=unicodes[simple] - if complex and simple then - local ks=kerns[simple] - if ks and not kerns[complex] then - if extrakerns then - extrakerns[complex]=ks - else - extrakerns={ [complex]=ks } - end - end - end - end - if extrakerns then - description.extrakerns=extrakerns - end - end - end - end - end - local function do_it_copy(what) - if what then - for complex,simple in next,what do - complex=unicodes[complex] - simple=unicodes[simple] - if complex and simple then - local complexdescription=descriptions[complex] - if complexdescription then - local simpledescription=descriptions[complex] - if simpledescription then - local extrakerns - local kerns=simpledescription.kerns - if kerns then - for unicode,kern in next,kerns do - if extrakerns then - extrakerns[unicode]=kern - else - extrakerns={ [unicode]=kern } - end - end - end - local extrakerns=simpledescription.extrakerns - if extrakerns then - for unicode,kern in next,extrakerns do - if extrakerns then - extrakerns[unicode]=kern - else - extrakerns={ [unicode]=kern } - end - end - end - if extrakerns then - complexdescription.extrakerns=extrakerns - end - end - end - end - end - end - end - do_it_left(afm.helpdata.leftkerned) - do_it_left(afm.helpdata.bothkerned) - do_it_copy(afm.helpdata.bothkerned) - do_it_copy(afm.helpdata.rightkerned) -end -local function adddimensions(data) - if data then - for unicode,description in next,data.descriptions do - local bb=description.boundingbox - if bb then - local ht,dp=bb[4],-bb[2] - if ht==0 or ht<0 then - else - description.height=ht - end - if dp==0 or dp<0 then - else - description.depth=dp - end - end - end - end -end -local function copytotfm(data) - if data and data.descriptions then - local metadata=data.metadata - local resources=data.resources - local properties=derivetable(data.properties) - local descriptions=derivetable(data.descriptions) - local goodies=derivetable(data.goodies) - local characters={} - local parameters={} - local unicodes=resources.unicodes - for unicode,description in next,data.descriptions do - characters[unicode]={} - end - local filename=constructors.checkedfilename(resources) - local fontname=metadata.fontname or metadata.fullname - local fullname=metadata.fullname or metadata.fontname - local endash=0x0020 - local emdash=0x2014 - local spacer="space" - local spaceunits=500 - local monospaced=metadata.isfixedpitch - local charwidth=metadata.charwidth - local italicangle=metadata.italicangle - local charxheight=metadata.xheight and metadata.xheight>0 and metadata.xheight - properties.monospaced=monospaced - parameters.italicangle=italicangle - parameters.charwidth=charwidth - parameters.charxheight=charxheight - if properties.monospaced then - if descriptions[endash] then - spaceunits,spacer=descriptions[endash].width,"space" - end - if not spaceunits and descriptions[emdash] then - spaceunits,spacer=descriptions[emdash].width,"emdash" - end - if not spaceunits and charwidth then - spaceunits,spacer=charwidth,"charwidth" - end - else - if descriptions[endash] then - spaceunits,spacer=descriptions[endash].width,"space" - end - if not spaceunits and charwidth then - spaceunits,spacer=charwidth,"charwidth" - end - end - spaceunits=tonumber(spaceunits) - if spaceunits<200 then - end - parameters.slant=0 - parameters.space=spaceunits - parameters.space_stretch=500 - parameters.space_shrink=333 - parameters.x_height=400 - parameters.quad=1000 - if italicangle and italicangle~=0 then - parameters.italicangle=italicangle - parameters.italicfactor=math.cos(math.rad(90+italicangle)) - parameters.slant=- math.tan(italicangle*math.pi/180) - end - if monospaced then - parameters.space_stretch=0 - parameters.space_shrink=0 - elseif afm.syncspace then - parameters.space_stretch=spaceunits/2 - parameters.space_shrink=spaceunits/3 - end - parameters.extra_space=parameters.space_shrink - if charxheight then - parameters.x_height=charxheight - else - local x=0x0078 - if x then - local x=descriptions[x] - if x then - parameters.x_height=x.height - end - end - end - local fd=data.fontdimens - if fd and fd[8] and fd[9] and fd[10] then - for k,v in next,fd do - parameters[k]=v - end - end - parameters.designsize=(metadata.designsize or 10)*65536 - parameters.ascender=abs(metadata.ascender or 0) - parameters.descender=abs(metadata.descender or 0) - parameters.units=1000 - properties.spacer=spacer - properties.encodingbytes=2 - properties.format=fonts.formats[filename] or "type1" - properties.filename=filename - properties.fontname=fontname - properties.fullname=fullname - properties.psname=fullname - properties.name=filename or fullname or fontname - if next(characters) then - return { - characters=characters, - descriptions=descriptions, - parameters=parameters, - resources=resources, - properties=properties, - goodies=goodies, - } - end - end - return nil -end -function afm.setfeatures(tfmdata,features) - local okay=constructors.initializefeatures("afm",tfmdata,features,trace_features,report_afm) - if okay then - return constructors.collectprocessors("afm",tfmdata,features,trace_features,report_afm) - else - return {} - end -end -local function addtables(data) - local resources=data.resources - local lookuptags=resources.lookuptags - local unicodes=resources.unicodes - if not lookuptags then - lookuptags={} - resources.lookuptags=lookuptags - end - setmetatableindex(lookuptags,function(t,k) - local v=type(k)=="number" and ("lookup "..k) or k - t[k]=v - return v - end) - if not unicodes then - unicodes={} - resources.unicodes=unicodes - setmetatableindex(unicodes,function(t,k) - setmetatableindex(unicodes,nil) - for u,d in next,data.descriptions do - local n=d.name - if n then - t[n]=u - end - end - return rawget(t,k) - end) - end - constructors.addcoreunicodes(unicodes) -end -local function afmtotfm(specification) - local afmname=specification.filename or specification.name - if specification.forced=="afm" or specification.format=="afm" then - if trace_loading then - report_afm("forcing afm format for %a",afmname) - end - else - local tfmname=findbinfile(afmname,"ofm") or "" - if tfmname~="" then - if trace_loading then - report_afm("fallback from afm to tfm for %a",afmname) - end - return - end - end - if afmname~="" then - local features=constructors.checkedfeatures("afm",specification.features.normal) - specification.features.normal=features - constructors.hashinstance(specification,true) - specification=definers.resolve(specification) - local cache_id=specification.hash - local tfmdata=containers.read(constructors.cache,cache_id) - if not tfmdata then - local rawdata=afm.load(afmname) - if rawdata and next(rawdata) then - addtables(rawdata) - adddimensions(rawdata) - tfmdata=copytotfm(rawdata) - if tfmdata and next(tfmdata) then - local shared=tfmdata.shared - if not shared then - shared={} - tfmdata.shared=shared - end - shared.rawdata=rawdata - shared.features=features - shared.processes=afm.setfeatures(tfmdata,features) - end - elseif trace_loading then - report_afm("no (valid) afm file found with name %a",afmname) - end - tfmdata=containers.write(constructors.cache,cache_id,tfmdata) - end - return tfmdata - end -end -local function read_from_afm(specification) - local tfmdata=afmtotfm(specification) - if tfmdata then - tfmdata.properties.name=specification.name - tfmdata=constructors.scale(tfmdata,specification) - local allfeatures=tfmdata.shared.features or specification.features.normal - constructors.applymanipulators("afm",tfmdata,allfeatures,trace_features,report_afm) - fonts.loggers.register(tfmdata,'afm',specification) - end - return tfmdata -end -local function prepareligatures(tfmdata,ligatures,value) - if value then - local descriptions=tfmdata.descriptions - local hasligatures=false - for unicode,character in next,tfmdata.characters do - local description=descriptions[unicode] - local dligatures=description.ligatures - if dligatures then - local cligatures=character.ligatures - if not cligatures then - cligatures={} - character.ligatures=cligatures - end - for unicode,ligature in next,dligatures do - cligatures[unicode]={ - char=ligature, - type=0 - } - end - hasligatures=true - end - end - tfmdata.properties.hasligatures=hasligatures - end -end -local function preparekerns(tfmdata,kerns,value) - if value then - local rawdata=tfmdata.shared.rawdata - local resources=rawdata.resources - local unicodes=resources.unicodes - local descriptions=tfmdata.descriptions - local haskerns=false - for u,chr in next,tfmdata.characters do - local d=descriptions[u] - local newkerns=d[kerns] - if newkerns then - local kerns=chr.kerns - if not kerns then - kerns={} - chr.kerns=kerns - end - for k,v in next,newkerns do - local uk=unicodes[k] - if uk then - kerns[uk]=v - end - end - haskerns=true - end - end - tfmdata.properties.haskerns=haskerns - end -end -local list={ - [0x0027]=0x2019, -} -local function texreplacements(tfmdata,value) - local descriptions=tfmdata.descriptions - local characters=tfmdata.characters - for k,v in next,list do - characters [k]=characters [v] - descriptions[k]=descriptions[v] - end -end -local function ligatures (tfmdata,value) prepareligatures(tfmdata,'ligatures',value) end -local function texligatures(tfmdata,value) prepareligatures(tfmdata,'texligatures',value) end -local function kerns (tfmdata,value) preparekerns (tfmdata,'kerns',value) end -local function extrakerns (tfmdata,value) preparekerns (tfmdata,'extrakerns',value) end -registerafmfeature { - name="liga", - description="traditional ligatures", - initializers={ - base=ligatures, - node=ligatures, - } -} -registerafmfeature { - name="kern", - description="intercharacter kerning", - initializers={ - base=kerns, - node=kerns, - } -} -registerafmfeature { - name="extrakerns", - description="additional intercharacter kerning", - initializers={ - base=extrakerns, - node=extrakerns, - } -} -registerafmfeature { - name='tlig', - description='tex ligatures', - initializers={ - base=texligatures, - node=texligatures, - } -} -registerafmfeature { - name='trep', - description='tex replacements', - initializers={ - base=texreplacements, - node=texreplacements, - } -} -local check_tfm=readers.check_tfm -fonts.formats.afm="type1" -fonts.formats.pfb="type1" -local function check_afm(specification,fullname) - local foundname=findbinfile(fullname,'afm') or "" - if foundname=="" then - foundname=fonts.names.getfilename(fullname,"afm") or "" - end - if foundname=="" and afm.autoprefixed then - local encoding,shortname=match(fullname,"^(.-)%-(.*)$") - if encoding and shortname and fonts.encodings.known[encoding] then - shortname=findbinfile(shortname,'afm') or "" - if shortname~="" then - foundname=shortname - if trace_defining then - report_afm("stripping encoding prefix from filename %a",afmname) - end - end - end - end - if foundname~="" then - specification.filename=foundname - specification.format="afm" - return read_from_afm(specification) - end -end -function readers.afm(specification,method) - local fullname,tfmdata=specification.filename or "",nil - if fullname=="" then - local forced=specification.forced or "" - if forced~="" then - tfmdata=check_afm(specification,specification.name.."."..forced) - end - if not tfmdata then - method=method or definers.method or "afm or tfm" - if method=="tfm" then - tfmdata=check_tfm(specification,specification.name) - elseif method=="afm" then - tfmdata=check_afm(specification,specification.name) - elseif method=="tfm or afm" then - tfmdata=check_tfm(specification,specification.name) or check_afm(specification,specification.name) - else - tfmdata=check_afm(specification,specification.name) or check_tfm(specification,specification.name) - end - end - else - tfmdata=check_afm(specification,fullname) - end - return tfmdata -end -function readers.pfb(specification,method) - local original=specification.specification - if trace_defining then - report_afm("using afm reader for %a",original) - end - specification.specification=gsub(original,"%.pfb",".afm") - specification.forced="afm" - return readers.afm(specification,method) -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-afk']={ - version=1.001, - comment="companion to font-afm.lua", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files", - dataonly=true, -} -local allocate=utilities.storage.allocate -fonts.handlers.afm.helpdata={ - ligatures=allocate { - ['f']={ - { 'f','ff' }, - { 'i','fi' }, - { 'l','fl' }, - }, - ['ff']={ - { 'i','ffi' } - }, - ['fi']={ - { 'i','fii' } - }, - ['fl']={ - { 'i','fli' } - }, - ['s']={ - { 't','st' } - }, - ['i']={ - { 'j','ij' } - }, - }, - texligatures=allocate { - ['quoteleft']={ - { 'quoteleft','quotedblleft' } - }, - ['quoteright']={ - { 'quoteright','quotedblright' } - }, - ['hyphen']={ - { 'hyphen','endash' } - }, - ['endash']={ - { 'hyphen','emdash' } - } - }, - leftkerned=allocate { - AEligature="A",aeligature="a", - OEligature="O",oeligature="o", - IJligature="I",ijligature="i", - AE="A",ae="a", - OE="O",oe="o", - IJ="I",ij="i", - Ssharp="S",ssharp="s", - }, - rightkerned=allocate { - AEligature="E",aeligature="e", - OEligature="E",oeligature="e", - IJligature="J",ijligature="j", - AE="E",ae="e", - OE="E",oe="e", - IJ="J",ij="j", - Ssharp="S",ssharp="s", - }, - bothkerned=allocate { - Acircumflex="A",acircumflex="a", - Ccircumflex="C",ccircumflex="c", - Ecircumflex="E",ecircumflex="e", - Gcircumflex="G",gcircumflex="g", - Hcircumflex="H",hcircumflex="h", - Icircumflex="I",icircumflex="i", - Jcircumflex="J",jcircumflex="j", - Ocircumflex="O",ocircumflex="o", - Scircumflex="S",scircumflex="s", - Ucircumflex="U",ucircumflex="u", - Wcircumflex="W",wcircumflex="w", - Ycircumflex="Y",ycircumflex="y", - Agrave="A",agrave="a", - Egrave="E",egrave="e", - Igrave="I",igrave="i", - Ograve="O",ograve="o", - Ugrave="U",ugrave="u", - Ygrave="Y",ygrave="y", - Atilde="A",atilde="a", - Itilde="I",itilde="i", - Otilde="O",otilde="o", - Utilde="U",utilde="u", - Ntilde="N",ntilde="n", - Adiaeresis="A",adiaeresis="a",Adieresis="A",adieresis="a", - Ediaeresis="E",ediaeresis="e",Edieresis="E",edieresis="e", - Idiaeresis="I",idiaeresis="i",Idieresis="I",idieresis="i", - Odiaeresis="O",odiaeresis="o",Odieresis="O",odieresis="o", - Udiaeresis="U",udiaeresis="u",Udieresis="U",udieresis="u", - Ydiaeresis="Y",ydiaeresis="y",Ydieresis="Y",ydieresis="y", - Aacute="A",aacute="a", - Cacute="C",cacute="c", - Eacute="E",eacute="e", - Iacute="I",iacute="i", - Lacute="L",lacute="l", - Nacute="N",nacute="n", - Oacute="O",oacute="o", - Racute="R",racute="r", - Sacute="S",sacute="s", - Uacute="U",uacute="u", - Yacute="Y",yacute="y", - Zacute="Z",zacute="z", - Dstroke="D",dstroke="d", - Hstroke="H",hstroke="h", - Tstroke="T",tstroke="t", - Cdotaccent="C",cdotaccent="c", - Edotaccent="E",edotaccent="e", - Gdotaccent="G",gdotaccent="g", - Idotaccent="I",idotaccent="i", - Zdotaccent="Z",zdotaccent="z", - Amacron="A",amacron="a", - Emacron="E",emacron="e", - Imacron="I",imacron="i", - Omacron="O",omacron="o", - Umacron="U",umacron="u", - Ccedilla="C",ccedilla="c", - Kcedilla="K",kcedilla="k", - Lcedilla="L",lcedilla="l", - Ncedilla="N",ncedilla="n", - Rcedilla="R",rcedilla="r", - Scedilla="S",scedilla="s", - Tcedilla="T",tcedilla="t", - Ohungarumlaut="O",ohungarumlaut="o", - Uhungarumlaut="U",uhungarumlaut="u", - Aogonek="A",aogonek="a", - Eogonek="E",eogonek="e", - Iogonek="I",iogonek="i", - Uogonek="U",uogonek="u", - Aring="A",aring="a", - Uring="U",uring="u", - Abreve="A",abreve="a", - Ebreve="E",ebreve="e", - Gbreve="G",gbreve="g", - Ibreve="I",ibreve="i", - Obreve="O",obreve="o", - Ubreve="U",ubreve="u", - Ccaron="C",ccaron="c", - Dcaron="D",dcaron="d", - Ecaron="E",ecaron="e", - Lcaron="L",lcaron="l", - Ncaron="N",ncaron="n", - Rcaron="R",rcaron="r", - Scaron="S",scaron="s", - Tcaron="T",tcaron="t", - Zcaron="Z",zcaron="z", - dotlessI="I",dotlessi="i", - dotlessJ="J",dotlessj="j", - AEligature="AE",aeligature="ae",AE="AE",ae="ae", - OEligature="OE",oeligature="oe",OE="OE",oe="oe", - IJligature="IJ",ijligature="ij",IJ="IJ",ij="ij", - Lstroke="L",lstroke="l",Lslash="L",lslash="l", - Ostroke="O",ostroke="o",Oslash="O",oslash="o", - Ssharp="SS",ssharp="ss", - Aumlaut="A",aumlaut="a", - Eumlaut="E",eumlaut="e", - Iumlaut="I",iumlaut="i", - Oumlaut="O",oumlaut="o", - Uumlaut="U",uumlaut="u", - } -} - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['luatex-fonts-tfm']={ - version=1.001, - comment="companion to luatex-*.tex", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end -local fonts=fonts -local tfm={} -fonts.handlers.tfm=tfm -fonts.formats.tfm="type1" -function fonts.readers.tfm(specification) - local fullname=specification.filename or "" - if fullname=="" then - local forced=specification.forced or "" - if forced~="" then - fullname=specification.name.."."..forced - else - fullname=specification.name - end - end - local foundname=resolvers.findbinfile(fullname,'tfm') or "" - if foundname=="" then - foundname=resolvers.findbinfile(fullname,'ofm') or "" - end - if foundname~="" then - specification.filename=foundname - specification.format="ofm" - return font.read_tfm(specification.filename,specification.size) - end -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-oti']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local lower=string.lower -local fonts=fonts -local constructors=fonts.constructors -local otf=constructors.newhandler("otf") -local otffeatures=constructors.newfeatures("otf") -local otftables=otf.tables -local registerotffeature=otffeatures.register -local allocate=utilities.storage.allocate -registerotffeature { - name="features", - description="initialization of feature handler", - default=true, -} -local function setmode(tfmdata,value) - if value then - tfmdata.properties.mode=lower(value) - end -end -local function setlanguage(tfmdata,value) - if value then - local cleanvalue=lower(value) - local languages=otftables and otftables.languages - local properties=tfmdata.properties - if not languages then - properties.language=cleanvalue - elseif languages[value] then - properties.language=cleanvalue - else - properties.language="dflt" - end - end -end -local function setscript(tfmdata,value) - if value then - local cleanvalue=lower(value) - local scripts=otftables and otftables.scripts - local properties=tfmdata.properties - if not scripts then - properties.script=cleanvalue - elseif scripts[value] then - properties.script=cleanvalue - else - properties.script="dflt" - end - end -end -registerotffeature { - name="mode", - description="mode", - initializers={ - base=setmode, - node=setmode, - } -} -registerotffeature { - name="language", - description="language", - initializers={ - base=setlanguage, - node=setlanguage, - } -} -registerotffeature { - name="script", - description="script", - initializers={ - base=setscript, - node=setscript, - } -} - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-otf']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local utfbyte=utf.byte -local format,gmatch,gsub,find,match,lower,strip=string.format,string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip -local type,next,tonumber,tostring=type,next,tonumber,tostring -local abs=math.abs -local insert=table.insert -local lpegmatch=lpeg.match -local reversed,concat,remove,sortedkeys=table.reversed,table.concat,table.remove,table.sortedkeys -local ioflush=io.flush -local fastcopy,tohash,derivetable=table.fastcopy,table.tohash,table.derive -local formatters=string.formatters -local P,R,S,C,Ct,lpegmatch=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Ct,lpeg.match -local setmetatableindex=table.setmetatableindex -local allocate=utilities.storage.allocate -local registertracker=trackers.register -local registerdirective=directives.register -local starttiming=statistics.starttiming -local stoptiming=statistics.stoptiming -local elapsedtime=statistics.elapsedtime -local findbinfile=resolvers.findbinfile -local trace_private=false registertracker("otf.private",function(v) trace_private=v end) -local trace_loading=false registertracker("otf.loading",function(v) trace_loading=v end) -local trace_features=false registertracker("otf.features",function(v) trace_features=v end) -local trace_dynamics=false registertracker("otf.dynamics",function(v) trace_dynamics=v end) -local trace_sequences=false registertracker("otf.sequences",function(v) trace_sequences=v end) -local trace_markwidth=false registertracker("otf.markwidth",function(v) trace_markwidth=v end) -local trace_defining=false registertracker("fonts.defining",function(v) trace_defining=v end) -local compact_lookups=true registertracker("otf.compactlookups",function(v) compact_lookups=v end) -local purge_names=true registertracker("otf.purgenames",function(v) purge_names=v end) -local report_otf=logs.reporter("fonts","otf loading") -local fonts=fonts -local otf=fonts.handlers.otf -otf.glists={ "gsub","gpos" } -otf.version=2.802 -otf.cache=containers.define("fonts","otf",otf.version,true) -local fontdata=fonts.hashes.identifiers -local chardata=characters and characters.data -local definers=fonts.definers -local readers=fonts.readers -local constructors=fonts.constructors -local otffeatures=constructors.newfeatures("otf") -local registerotffeature=otffeatures.register -local enhancers=allocate() -otf.enhancers=enhancers -local patches={} -enhancers.patches=patches -local forceload=false -local cleanup=0 -local packdata=true -local syncspace=true -local forcenotdef=false -local includesubfonts=false -local overloadkerns=false -local applyruntimefixes=fonts.treatments and fonts.treatments.applyfixes -local wildcard="*" -local default="dflt" -local fontloaderfields=fontloader.fields -local mainfields=nil -local glyphfields=nil -local formats=fonts.formats -formats.otf="opentype" -formats.ttf="truetype" -formats.ttc="truetype" -formats.dfont="truetype" -registerdirective("fonts.otf.loader.cleanup",function(v) cleanup=tonumber(v) or (v and 1) or 0 end) -registerdirective("fonts.otf.loader.force",function(v) forceload=v end) -registerdirective("fonts.otf.loader.pack",function(v) packdata=v end) -registerdirective("fonts.otf.loader.syncspace",function(v) syncspace=v end) -registerdirective("fonts.otf.loader.forcenotdef",function(v) forcenotdef=v end) -registerdirective("fonts.otf.loader.overloadkerns",function(v) overloadkerns=v end) -function otf.fileformat(filename) - local leader=lower(io.loadchunk(filename,4)) - local suffix=lower(file.suffix(filename)) - if leader=="otto" then - return formats.otf,suffix=="otf" - elseif leader=="ttcf" then - return formats.ttc,suffix=="ttc" - elseif suffix=="ttc" then - return formats.ttc,true - elseif suffix=="dfont" then - return formats.dfont,true - else - return formats.ttf,suffix=="ttf" - end -end -local function otf_format(filename) - local format,okay=otf.fileformat(filename) - if not okay then - report_otf("font %a is actually an %a file",filename,format) - end - return format -end -local function load_featurefile(raw,featurefile) - if featurefile and featurefile~="" then - if trace_loading then - report_otf("using featurefile %a",featurefile) - end - fontloader.apply_featurefile(raw,featurefile) - end -end -local function showfeatureorder(rawdata,filename) - local sequences=rawdata.resources.sequences - if sequences and #sequences>0 then - if trace_loading then - report_otf("font %a has %s sequences",filename,#sequences) - report_otf(" ") - end - for nos=1,#sequences do - local sequence=sequences[nos] - local typ=sequence.type or "no-type" - local name=sequence.name or "no-name" - local subtables=sequence.subtables or { "no-subtables" } - local features=sequence.features - if trace_loading then - report_otf("%3i %-15s %-20s [% t]",nos,name,typ,subtables) - end - if features then - for feature,scripts in next,features do - local tt={} - if type(scripts)=="table" then - for script,languages in next,scripts do - local ttt={} - for language,_ in next,languages do - ttt[#ttt+1]=language - end - tt[#tt+1]=formatters["[%s: % t]"](script,ttt) - end - if trace_loading then - report_otf(" %s: % t",feature,tt) - end - else - if trace_loading then - report_otf(" %s: %S",feature,scripts) - end - end - end - end - end - if trace_loading then - report_otf("\n") - end - elseif trace_loading then - report_otf("font %a has no sequences",filename) - end -end -local valid_fields=table.tohash { - "ascent", - "cidinfo", - "copyright", - "descent", - "design_range_bottom", - "design_range_top", - "design_size", - "encodingchanged", - "extrema_bound", - "familyname", - "fontname", - "fontstyle_id", - "fontstyle_name", - "fullname", - "hasvmetrics", - "horiz_base", - "issans", - "isserif", - "italicangle", - "macstyle", - "onlybitmaps", - "origname", - "os2_version", - "pfminfo", - "serifcheck", - "sfd_version", - "strokedfont", - "strokewidth", - "table_version", - "ttf_tables", - "uni_interp", - "uniqueid", - "units_per_em", - "upos", - "use_typo_metrics", - "uwidth", - "validation_state", - "version", - "vert_base", - "weight", - "weight_width_slope_only", -} -local ordered_enhancers={ - "prepare tables", - "prepare glyphs", - "prepare lookups", - "analyze glyphs", - "analyze math", - "reorganize lookups", - "reorganize mark classes", - "reorganize anchor classes", - "reorganize glyph kerns", - "reorganize glyph lookups", - "reorganize glyph anchors", - "merge kern classes", - "reorganize features", - "reorganize subtables", - "check glyphs", - "check metadata", - "check extra features", - "prepare tounicode", - "check encoding", - "add duplicates", - "cleanup tables", - "compact lookups", - "purge names", -} -local actions=allocate() -local before=allocate() -local after=allocate() -patches.before=before -patches.after=after -local function enhance(name,data,filename,raw) - local enhancer=actions[name] - if enhancer then - if trace_loading then - report_otf("apply enhancement %a to file %a",name,filename) - ioflush() - end - enhancer(data,filename,raw) - else - end -end -function enhancers.apply(data,filename,raw) - local basename=file.basename(lower(filename)) - if trace_loading then - report_otf("%s enhancing file %a","start",filename) - end - ioflush() - for e=1,#ordered_enhancers do - local enhancer=ordered_enhancers[e] - local b=before[enhancer] - if b then - for pattern,action in next,b do - if find(basename,pattern) then - action(data,filename,raw) - end - end - end - enhance(enhancer,data,filename,raw) - local a=after[enhancer] - if a then - for pattern,action in next,a do - if find(basename,pattern) then - action(data,filename,raw) - end - end - end - ioflush() - end - if trace_loading then - report_otf("%s enhancing file %a","stop",filename) - end - ioflush() -end -function patches.register(what,where,pattern,action) - local pw=patches[what] - if pw then - local ww=pw[where] - if ww then - ww[pattern]=action - else - pw[where]={ [pattern]=action} - end - end -end -function patches.report(fmt,...) - if trace_loading then - report_otf("patching: %s",formatters[fmt](...)) - end -end -function enhancers.register(what,action) - actions[what]=action -end -function otf.load(filename,sub,featurefile) - local base=file.basename(file.removesuffix(filename)) - local name=file.removesuffix(base) - local attr=lfs.attributes(filename) - local size=attr and attr.size or 0 - local time=attr and attr.modification or 0 - if featurefile then - name=name.."@"..file.removesuffix(file.basename(featurefile)) - end - if sub=="" then - sub=false - end - local hash=name - if sub then - hash=hash.."-"..sub - end - hash=containers.cleanname(hash) - local featurefiles - if featurefile then - featurefiles={} - for s in gmatch(featurefile,"[^,]+") do - local name=resolvers.findfile(file.addsuffix(s,'fea'),'fea') or "" - if name=="" then - report_otf("loading error, no featurefile %a",s) - else - local attr=lfs.attributes(name) - featurefiles[#featurefiles+1]={ - name=name, - size=attr and attr.size or 0, - time=attr and attr.modification or 0, - } - end - end - if #featurefiles==0 then - featurefiles=nil - end - end - local data=containers.read(otf.cache,hash) - local reload=not data or data.size~=size or data.time~=time - if forceload then - report_otf("forced reload of %a due to hard coded flag",filename) - reload=true - end - if not reload then - local featuredata=data.featuredata - if featurefiles then - if not featuredata or #featuredata~=#featurefiles then - reload=true - else - for i=1,#featurefiles do - local fi,fd=featurefiles[i],featuredata[i] - if fi.name~=fd.name or fi.size~=fd.size or fi.time~=fd.time then - reload=true - break - end - end - end - elseif featuredata then - reload=true - end - if reload then - report_otf("loading: forced reload due to changed featurefile specification %a",featurefile) - end - end - if reload then - report_otf("loading %a, hash %a",filename,hash) - local fontdata,messages - if sub then - fontdata,messages=fontloader.open(filename,sub) - else - fontdata,messages=fontloader.open(filename) - end - if fontdata then - mainfields=mainfields or (fontloaderfields and fontloaderfields(fontdata)) - end - if trace_loading and messages and #messages>0 then - if type(messages)=="string" then - report_otf("warning: %s",messages) - else - for m=1,#messages do - report_otf("warning: %S",messages[m]) - end - end - else - report_otf("loading done") - end - if fontdata then - if featurefiles then - for i=1,#featurefiles do - load_featurefile(fontdata,featurefiles[i].name) - end - end - local unicodes={ - } - local splitter=lpeg.splitter(" ",unicodes) - data={ - size=size, - time=time, - format=otf_format(filename), - featuredata=featurefiles, - resources={ - filename=resolvers.unresolve(filename), - version=otf.version, - creator="context mkiv", - unicodes=unicodes, - indices={ - }, - duplicates={ - }, - variants={ - }, - lookuptypes={}, - }, - warnings={}, - metadata={ - }, - properties={ - }, - descriptions={}, - goodies={}, - helpers={ - tounicodelist=splitter, - tounicodetable=Ct(splitter), - }, - } - starttiming(data) - report_otf("file size: %s",size) - enhancers.apply(data,filename,fontdata) - local packtime={} - if packdata then - if cleanup>0 then - collectgarbage("collect") - end - starttiming(packtime) - enhance("pack",data,filename,nil) - stoptiming(packtime) - end - report_otf("saving %a in cache",filename) - data=containers.write(otf.cache,hash,data) - if cleanup>1 then - collectgarbage("collect") - end - stoptiming(data) - if elapsedtime then - report_otf("preprocessing and caching time %s, packtime %s", - elapsedtime(data),packdata and elapsedtime(packtime) or 0) - end - fontloader.close(fontdata) - if cleanup>3 then - collectgarbage("collect") - end - data=containers.read(otf.cache,hash) - if cleanup>2 then - collectgarbage("collect") - end - else - data=nil - report_otf("loading failed due to read error") - end - end - if data then - if trace_defining then - report_otf("loading from cache using hash %a",hash) - end - enhance("unpack",data,filename,nil,false) - local resources=data.resources - local lookuptags=resources.lookuptags - local unicodes=resources.unicodes - if not lookuptags then - lookuptags={} - resources.lookuptags=lookuptags - end - setmetatableindex(lookuptags,function(t,k) - local v=type(k)=="number" and ("lookup "..k) or k - t[k]=v - return v - end) - if not unicodes then - unicodes={} - resources.unicodes=unicodes - setmetatableindex(unicodes,function(t,k) - setmetatableindex(unicodes,nil) - for u,d in next,data.descriptions do - local n=d.name - if n then - t[n]=u - else - end - end - return rawget(t,k) - end) - end - constructors.addcoreunicodes(unicodes) - if applyruntimefixes then - applyruntimefixes(filename,data) - end - enhance("add dimensions",data,filename,nil,false) - if trace_sequences then - showfeatureorder(data,filename) - end - end - return data -end -local mt={ - __index=function(t,k) - if k=="height" then - local ht=t.boundingbox[4] - return ht<0 and 0 or ht - elseif k=="depth" then - local dp=-t.boundingbox[2] - return dp<0 and 0 or dp - elseif k=="width" then - return 0 - elseif k=="name" then - return forcenotdef and ".notdef" - end - end -} -actions["prepare tables"]=function(data,filename,raw) - data.properties.hasitalics=false -end -actions["add dimensions"]=function(data,filename) - if data then - local descriptions=data.descriptions - local resources=data.resources - local defaultwidth=resources.defaultwidth or 0 - local defaultheight=resources.defaultheight or 0 - local defaultdepth=resources.defaultdepth or 0 - local basename=trace_markwidth and file.basename(filename) - for _,d in next,descriptions do - local bb,wd=d.boundingbox,d.width - if not wd then - d.width=defaultwidth - elseif trace_markwidth and wd~=0 and d.class=="mark" then - report_otf("mark %a with width %b found in %a",d.name or "",wd,basename) - end - if bb then - local ht,dp=bb[4],-bb[2] - if ht==0 or ht<0 then - else - d.height=ht - end - if dp==0 or dp<0 then - else - d.depth=dp - end - end - end - end -end -local function somecopy(old) - if old then - local new={} - if type(old)=="table" then - for k,v in next,old do - if k=="glyphs" then - elseif type(v)=="table" then - new[k]=somecopy(v) - else - new[k]=v - end - end - else - for i=1,#mainfields do - local k=mainfields[i] - local v=old[k] - if k=="glyphs" then - elseif type(v)=="table" then - new[k]=somecopy(v) - else - new[k]=v - end - end - end - return new - else - return {} - end -end -actions["prepare glyphs"]=function(data,filename,raw) - local rawglyphs=raw.glyphs - local rawsubfonts=raw.subfonts - local rawcidinfo=raw.cidinfo - local criterium=constructors.privateoffset - local private=criterium - local resources=data.resources - local metadata=data.metadata - local properties=data.properties - local descriptions=data.descriptions - local unicodes=resources.unicodes - local indices=resources.indices - local duplicates=resources.duplicates - local variants=resources.variants - if rawsubfonts then - metadata.subfonts=includesubfonts and {} - properties.cidinfo=rawcidinfo - if rawcidinfo.registry then - local cidmap=fonts.cid.getmap(rawcidinfo) - if cidmap then - rawcidinfo.usedname=cidmap.usedname - local nofnames,nofunicodes=0,0 - local cidunicodes,cidnames=cidmap.unicodes,cidmap.names - for cidindex=1,#rawsubfonts do - local subfont=rawsubfonts[cidindex] - local cidglyphs=subfont.glyphs - if includesubfonts then - metadata.subfonts[cidindex]=somecopy(subfont) - end - for index=0,subfont.glyphcnt-1 do - local glyph=cidglyphs[index] - if glyph then - local unicode=glyph.unicode - if unicode>=0x00E000 and unicode<=0x00F8FF then - unicode=-1 - elseif unicode>=0x0F0000 and unicode<=0x0FFFFD then - unicode=-1 - elseif unicode>=0x100000 and unicode<=0x10FFFD then - unicode=-1 - end - local name=glyph.name or cidnames[index] - if not unicode or unicode==-1 then - unicode=cidunicodes[index] - end - if unicode and descriptions[unicode] then - if trace_private then - report_otf("preventing glyph %a at index %H to overload unicode %U",name or "noname",index,unicode) - end - unicode=-1 - end - if not unicode or unicode==-1 then - if not name then - name=format("u%06X.ctx",private) - end - unicode=private - unicodes[name]=private - if trace_private then - report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private) - end - private=private+1 - nofnames=nofnames+1 - else - if not name then - name=format("u%06X.ctx",unicode) - end - unicodes[name]=unicode - nofunicodes=nofunicodes+1 - end - indices[index]=unicode - local description={ - boundingbox=glyph.boundingbox, - name=glyph.name or name or "unknown", - cidindex=cidindex, - index=index, - glyph=glyph, - } - descriptions[unicode]=description - else - end - end - end - if trace_loading then - report_otf("cid font remapped, %s unicode points, %s symbolic names, %s glyphs",nofunicodes,nofnames,nofunicodes+nofnames) - end - elseif trace_loading then - report_otf("unable to remap cid font, missing cid file for %a",filename) - end - elseif trace_loading then - report_otf("font %a has no glyphs",filename) - end - else - for index=0,raw.glyphcnt-1 do - local glyph=rawglyphs[index] - if glyph then - local unicode=glyph.unicode - local name=glyph.name - if not unicode or unicode==-1 then - unicode=private - unicodes[name]=private - if trace_private then - report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private) - end - private=private+1 - else - if unicode>criterium then - local taken=descriptions[unicode] - if taken then - if unicode>=private then - private=unicode+1 - else - private=private+1 - end - descriptions[private]=taken - unicodes[taken.name]=private - indices[taken.index]=private - if trace_private then - report_otf("slot %U is moved to %U due to private in font",unicode) - end - else - if unicode>=private then - private=unicode+1 - end - end - end - unicodes[name]=unicode - end - indices[index]=unicode - descriptions[unicode]={ - boundingbox=glyph.boundingbox, - name=name, - index=index, - glyph=glyph, - } - local altuni=glyph.altuni - if altuni then - for i=1,#altuni do - local a=altuni[i] - local u=a.unicode - local v=a.variant - if v then - local vv=variants[v] - if vv then - vv[u]=unicode - else - vv={ [u]=unicode } - variants[v]=vv - end - end - end - end - else - report_otf("potential problem: glyph %U is used but empty",index) - end - end - end - resources.private=private -end -actions["check encoding"]=function(data,filename,raw) - local descriptions=data.descriptions - local resources=data.resources - local properties=data.properties - local unicodes=resources.unicodes - local indices=resources.indices - local duplicates=resources.duplicates - local mapdata=raw.map or {} - local unicodetoindex=mapdata and mapdata.map or {} - local indextounicode=mapdata and mapdata.backmap or {} - local encname=lower(data.enc_name or mapdata.enc_name or "") - local criterium=0xFFFF - local privateoffset=constructors.privateoffset - if find(encname,"unicode") then - if trace_loading then - report_otf("checking embedded unicode map %a",encname) - end - local reported={} - for maybeunicode,index in next,unicodetoindex do - if descriptions[maybeunicode] then - else - local unicode=indices[index] - if not unicode then - elseif maybeunicode==unicode then - elseif unicode>privateoffset then - else - local d=descriptions[unicode] - if d then - local c=d.copies - if c then - c[maybeunicode]=true - else - d.copies={ [maybeunicode]=true } - end - elseif index and not reported[index] then - report_otf("missing index %i",index) - reported[index]=true - end - end - end - end - for unicode,data in next,descriptions do - local d=data.copies - if d then - duplicates[unicode]=sortedkeys(d) - data.copies=nil - end - end - elseif properties.cidinfo then - report_otf("warning: no unicode map, used cidmap %a",properties.cidinfo.usedname) - else - report_otf("warning: non unicode map %a, only using glyph unicode data",encname or "whatever") - end - if mapdata then - mapdata.map={} - mapdata.backmap={} - end -end -actions["add duplicates"]=function(data,filename,raw) - local descriptions=data.descriptions - local resources=data.resources - local properties=data.properties - local unicodes=resources.unicodes - local indices=resources.indices - local duplicates=resources.duplicates - for unicode,d in next,duplicates do - local nofduplicates=#d - if nofduplicates>4 then - if trace_loading then - report_otf("ignoring excessive duplicates of %U (n=%s)",unicode,nofduplicates) - end - else - for i=1,nofduplicates do - local u=d[i] - if not descriptions[u] then - local description=descriptions[unicode] - local n=0 - for _,description in next,descriptions do - local kerns=description.kerns - if kerns then - for _,k in next,kerns do - local ku=k[unicode] - if ku then - k[u]=ku - n=n+1 - end - end - end - end - if u>0 then - local duplicate=table.copy(description) - duplicate.comment=format("copy of U+%05X",unicode) - descriptions[u]=duplicate - if trace_loading then - report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n) - end - end - end - end - end - end -end -actions["analyze glyphs"]=function(data,filename,raw) - local descriptions=data.descriptions - local resources=data.resources - local metadata=data.metadata - local properties=data.properties - local hasitalics=false - local widths={} - local marks={} - for unicode,description in next,descriptions do - local glyph=description.glyph - local italic=glyph.italic_correction - if not italic then - elseif italic==0 then - else - description.italic=italic - hasitalics=true - end - local width=glyph.width - widths[width]=(widths[width] or 0)+1 - local class=glyph.class - if class then - if class=="mark" then - marks[unicode]=true - end - description.class=class - end - end - properties.hasitalics=hasitalics - resources.marks=marks - local wd,most=0,1 - for k,v in next,widths do - if v>most then - wd,most=k,v - end - end - if most>1000 then - if trace_loading then - report_otf("most common width: %s (%s times), sharing (cjk font)",wd,most) - end - for unicode,description in next,descriptions do - if description.width==wd then - else - description.width=description.glyph.width - end - end - resources.defaultwidth=wd - else - for unicode,description in next,descriptions do - description.width=description.glyph.width - end - end -end -actions["reorganize mark classes"]=function(data,filename,raw) - local mark_classes=raw.mark_classes - if mark_classes then - local resources=data.resources - local unicodes=resources.unicodes - local markclasses={} - resources.markclasses=markclasses - for name,class in next,mark_classes do - local t={} - for s in gmatch(class,"[^ ]+") do - t[unicodes[s]]=true - end - markclasses[name]=t - end - end -end -actions["reorganize features"]=function(data,filename,raw) - local features={} - data.resources.features=features - for k,what in next,otf.glists do - local dw=raw[what] - if dw then - local f={} - features[what]=f - for i=1,#dw do - local d=dw[i] - local dfeatures=d.features - if dfeatures then - for i=1,#dfeatures do - local df=dfeatures[i] - local tag=strip(lower(df.tag)) - local ft=f[tag] - if not ft then - ft={} - f[tag]=ft - end - local dscripts=df.scripts - for i=1,#dscripts do - local d=dscripts[i] - local languages=d.langs - local script=strip(lower(d.script)) - local fts=ft[script] if not fts then fts={} ft[script]=fts end - for i=1,#languages do - fts[strip(lower(languages[i]))]=true - end - end - end - end - end - end - end -end -actions["reorganize anchor classes"]=function(data,filename,raw) - local resources=data.resources - local anchor_to_lookup={} - local lookup_to_anchor={} - resources.anchor_to_lookup=anchor_to_lookup - resources.lookup_to_anchor=lookup_to_anchor - local classes=raw.anchor_classes - if classes then - for c=1,#classes do - local class=classes[c] - local anchor=class.name - local lookups=class.lookup - if type(lookups)~="table" then - lookups={ lookups } - end - local a=anchor_to_lookup[anchor] - if not a then - a={} - anchor_to_lookup[anchor]=a - end - for l=1,#lookups do - local lookup=lookups[l] - local l=lookup_to_anchor[lookup] - if l then - l[anchor]=true - else - l={ [anchor]=true } - lookup_to_anchor[lookup]=l - end - a[lookup]=true - end - end - end -end -actions["prepare tounicode"]=function(data,filename,raw) - fonts.mappings.addtounicode(data,filename) -end -local g_directions={ - gsub_contextchain=1, - gpos_contextchain=1, - gsub_reversecontextchain=-1, - gpos_reversecontextchain=-1, -} -actions["reorganize subtables"]=function(data,filename,raw) - local resources=data.resources - local sequences={} - local lookups={} - local chainedfeatures={} - resources.sequences=sequences - resources.lookups=lookups - for _,what in next,otf.glists do - local dw=raw[what] - if dw then - for k=1,#dw do - local gk=dw[k] - local features=gk.features - local typ=gk.type - local chain=g_directions[typ] or 0 - local subtables=gk.subtables - if subtables then - local t={} - for s=1,#subtables do - t[s]=subtables[s].name - end - subtables=t - end - local flags,markclass=gk.flags,nil - if flags then - local t={ - (flags.ignorecombiningmarks and "mark") or false, - (flags.ignoreligatures and "ligature") or false, - (flags.ignorebaseglyphs and "base") or false, - flags.r2l or false, - } - markclass=flags.mark_class - if markclass then - markclass=resources.markclasses[markclass] - end - flags=t - end - local name=gk.name - if not name then - report_otf("skipping weird lookup number %s",k) - elseif features then - local f={} - local o={} - for i=1,#features do - local df=features[i] - local tag=strip(lower(df.tag)) - local ft=f[tag] - if not ft then - ft={} - f[tag]=ft - o[#o+1]=tag - end - local dscripts=df.scripts - for i=1,#dscripts do - local d=dscripts[i] - local languages=d.langs - local script=strip(lower(d.script)) - local fts=ft[script] if not fts then fts={} ft[script]=fts end - for i=1,#languages do - fts[strip(lower(languages[i]))]=true - end - end - end - sequences[#sequences+1]={ - type=typ, - chain=chain, - flags=flags, - name=name, - subtables=subtables, - markclass=markclass, - features=f, - order=o, - } - else - lookups[name]={ - type=typ, - chain=chain, - flags=flags, - subtables=subtables, - markclass=markclass, - } - end - end - end - end -end -actions["prepare lookups"]=function(data,filename,raw) - local lookups=raw.lookups - if lookups then - data.lookups=lookups - end -end -local function t_uncover(splitter,cache,covers) - local result={} - for n=1,#covers do - local cover=covers[n] - local uncovered=cache[cover] - if not uncovered then - uncovered=lpegmatch(splitter,cover) - cache[cover]=uncovered - end - result[n]=uncovered - end - return result -end -local function s_uncover(splitter,cache,cover) - if cover=="" then - return nil - else - local uncovered=cache[cover] - if not uncovered then - uncovered=lpegmatch(splitter,cover) - cache[cover]=uncovered - end - return { uncovered } - end -end -local function t_hashed(t,cache) - if t then - local ht={} - for i=1,#t do - local ti=t[i] - local tih=cache[ti] - if not tih then - local tn=#ti - if tn==1 then - tih={ [ti[1]]=true } - else - tih={} - for i=1,tn do - tih[ti[i]]=true - end - end - cache[ti]=tih - end - ht[i]=tih - end - return ht - else - return nil - end -end -local function s_hashed(t,cache) - if t then - local tf=t[1] - local nf=#tf - if nf==1 then - return { [tf[1]]=true } - else - local ht={} - for i=1,nf do - ht[i]={ [tf[i]]=true } - end - return ht - end - else - return nil - end -end -local function r_uncover(splitter,cache,cover,replacements) - if cover=="" then - return nil - else - local uncovered=cover[1] - local replaced=cache[replacements] - if not replaced then - replaced=lpegmatch(splitter,replacements) - cache[replacements]=replaced - end - local nu,nr=#uncovered,#replaced - local r={} - if nu==nr then - for i=1,nu do - r[uncovered[i]]=replaced[i] - end - end - return r - end -end -actions["reorganize lookups"]=function(data,filename,raw) - if data.lookups then - local splitter=data.helpers.tounicodetable - local t_u_cache={} - local s_u_cache=t_u_cache - local t_h_cache={} - local s_h_cache=t_h_cache - local r_u_cache={} - for _,lookup in next,data.lookups do - local rules=lookup.rules - if rules then - local format=lookup.format - if format=="class" then - local before_class=lookup.before_class - if before_class then - before_class=t_uncover(splitter,t_u_cache,reversed(before_class)) - end - local current_class=lookup.current_class - if current_class then - current_class=t_uncover(splitter,t_u_cache,current_class) - end - local after_class=lookup.after_class - if after_class then - after_class=t_uncover(splitter,t_u_cache,after_class) - end - for i=1,#rules do - local rule=rules[i] - local class=rule.class - local before=class.before - if before then - for i=1,#before do - before[i]=before_class[before[i]] or {} - end - rule.before=t_hashed(before,t_h_cache) - end - local current=class.current - local lookups=rule.lookups - if current then - for i=1,#current do - current[i]=current_class[current[i]] or {} - if lookups and not lookups[i] then - lookups[i]="" - end - end - rule.current=t_hashed(current,t_h_cache) - end - local after=class.after - if after then - for i=1,#after do - after[i]=after_class[after[i]] or {} - end - rule.after=t_hashed(after,t_h_cache) - end - rule.class=nil - end - lookup.before_class=nil - lookup.current_class=nil - lookup.after_class=nil - lookup.format="coverage" - elseif format=="coverage" then - for i=1,#rules do - local rule=rules[i] - local coverage=rule.coverage - if coverage then - local before=coverage.before - if before then - before=t_uncover(splitter,t_u_cache,reversed(before)) - rule.before=t_hashed(before,t_h_cache) - end - local current=coverage.current - if current then - current=t_uncover(splitter,t_u_cache,current) - local lookups=rule.lookups - if lookups then - for i=1,#current do - if not lookups[i] then - lookups[i]="" - end - end - end - rule.current=t_hashed(current,t_h_cache) - end - local after=coverage.after - if after then - after=t_uncover(splitter,t_u_cache,after) - rule.after=t_hashed(after,t_h_cache) - end - rule.coverage=nil - end - end - elseif format=="reversecoverage" then - for i=1,#rules do - local rule=rules[i] - local reversecoverage=rule.reversecoverage - if reversecoverage then - local before=reversecoverage.before - if before then - before=t_uncover(splitter,t_u_cache,reversed(before)) - rule.before=t_hashed(before,t_h_cache) - end - local current=reversecoverage.current - if current then - current=t_uncover(splitter,t_u_cache,current) - rule.current=t_hashed(current,t_h_cache) - end - local after=reversecoverage.after - if after then - after=t_uncover(splitter,t_u_cache,after) - rule.after=t_hashed(after,t_h_cache) - end - local replacements=reversecoverage.replacements - if replacements then - rule.replacements=r_uncover(splitter,r_u_cache,current,replacements) - end - rule.reversecoverage=nil - end - end - elseif format=="glyphs" then - for i=1,#rules do - local rule=rules[i] - local glyphs=rule.glyphs - if glyphs then - local fore=glyphs.fore - if fore and fore~="" then - fore=s_uncover(splitter,s_u_cache,fore) - rule.after=s_hashed(fore,s_h_cache) - end - local back=glyphs.back - if back then - back=s_uncover(splitter,s_u_cache,back) - rule.before=s_hashed(back,s_h_cache) - end - local names=glyphs.names - if names then - names=s_uncover(splitter,s_u_cache,names) - rule.current=s_hashed(names,s_h_cache) - end - rule.glyphs=nil - local lookups=rule.lookups - if lookups then - for i=1,#names do - if not lookups[i] then - lookups[i]="" - end - end - end - end - end - end - end - end - end -end -local function check_variants(unicode,the_variants,splitter,unicodes) - local variants=the_variants.variants - if variants then - local glyphs=lpegmatch(splitter,variants) - local done={ [unicode]=true } - local n=0 - for i=1,#glyphs do - local g=glyphs[i] - if done[g] then - if i>1 then - report_otf("skipping cyclic reference %U in math variant %U",g,unicode) - end - else - if n==0 then - n=1 - variants={ g } - else - n=n+1 - variants[n]=g - end - done[g]=true - end - end - if n==0 then - variants=nil - end - end - local parts=the_variants.parts - if parts then - local p=#parts - if p>0 then - for i=1,p do - local pi=parts[i] - pi.glyph=unicodes[pi.component] or 0 - pi.component=nil - end - else - parts=nil - end - end - local italic_correction=the_variants.italic_correction - if italic_correction and italic_correction==0 then - italic_correction=nil - end - return variants,parts,italic_correction -end -actions["analyze math"]=function(data,filename,raw) - if raw.math then - data.metadata.math=raw.math - local unicodes=data.resources.unicodes - local splitter=data.helpers.tounicodetable - for unicode,description in next,data.descriptions do - local glyph=description.glyph - local mathkerns=glyph.mathkern - local horiz_variants=glyph.horiz_variants - local vert_variants=glyph.vert_variants - local top_accent=glyph.top_accent - if mathkerns or horiz_variants or vert_variants or top_accent then - local math={} - if top_accent then - math.top_accent=top_accent - end - if mathkerns then - for k,v in next,mathkerns do - if not next(v) then - mathkerns[k]=nil - else - for k,v in next,v do - if v==0 then - k[v]=nil - end - end - end - end - math.kerns=mathkerns - end - if horiz_variants then - math.horiz_variants,math.horiz_parts,math.horiz_italic_correction=check_variants(unicode,horiz_variants,splitter,unicodes) - end - if vert_variants then - math.vert_variants,math.vert_parts,math.vert_italic_correction=check_variants(unicode,vert_variants,splitter,unicodes) - end - local italic_correction=description.italic - if italic_correction and italic_correction~=0 then - math.italic_correction=italic_correction - end - description.math=math - end - end - end -end -actions["reorganize glyph kerns"]=function(data,filename,raw) - local descriptions=data.descriptions - local resources=data.resources - local unicodes=resources.unicodes - for unicode,description in next,descriptions do - local kerns=description.glyph.kerns - if kerns then - local newkerns={} - for k,kern in next,kerns do - local name=kern.char - local offset=kern.off - local lookup=kern.lookup - if name and offset and lookup then - local unicode=unicodes[name] - if unicode then - if type(lookup)=="table" then - for l=1,#lookup do - local lookup=lookup[l] - local lookupkerns=newkerns[lookup] - if lookupkerns then - lookupkerns[unicode]=offset - else - newkerns[lookup]={ [unicode]=offset } - end - end - else - local lookupkerns=newkerns[lookup] - if lookupkerns then - lookupkerns[unicode]=offset - else - newkerns[lookup]={ [unicode]=offset } - end - end - elseif trace_loading then - report_otf("problems with unicode %a of kern %a of glyph %U",name,k,unicode) - end - end - end - description.kerns=newkerns - end - end -end -actions["merge kern classes"]=function(data,filename,raw) - local gposlist=raw.gpos - if gposlist then - local descriptions=data.descriptions - local resources=data.resources - local unicodes=resources.unicodes - local splitter=data.helpers.tounicodetable - local ignored=0 - local blocked=0 - for gp=1,#gposlist do - local gpos=gposlist[gp] - local subtables=gpos.subtables - if subtables then - local first_done={} - local split={} - for s=1,#subtables do - local subtable=subtables[s] - local kernclass=subtable.kernclass - local lookup=subtable.lookup or subtable.name - if kernclass then - if #kernclass>0 then - kernclass=kernclass[1] - lookup=type(kernclass.lookup)=="string" and kernclass.lookup or lookup - report_otf("fixing kernclass table of lookup %a",lookup) - end - local firsts=kernclass.firsts - local seconds=kernclass.seconds - local offsets=kernclass.offsets - for n,s in next,firsts do - split[s]=split[s] or lpegmatch(splitter,s) - end - local maxseconds=0 - for n,s in next,seconds do - if n>maxseconds then - maxseconds=n - end - split[s]=split[s] or lpegmatch(splitter,s) - end - for fk=1,#firsts do - local fv=firsts[fk] - local splt=split[fv] - if splt then - local extrakerns={} - local baseoffset=(fk-1)*maxseconds - for sk=2,maxseconds do - local sv=seconds[sk] - local splt=split[sv] - if splt then - local offset=offsets[baseoffset+sk] - if offset then - for i=1,#splt do - extrakerns[splt[i]]=offset - end - end - end - end - for i=1,#splt do - local first_unicode=splt[i] - if first_done[first_unicode] then - report_otf("lookup %a: ignoring further kerns of %C",lookup,first_unicode) - blocked=blocked+1 - else - first_done[first_unicode]=true - local description=descriptions[first_unicode] - if description then - local kerns=description.kerns - if not kerns then - kerns={} - description.kerns=kerns - end - local lookupkerns=kerns[lookup] - if not lookupkerns then - lookupkerns={} - kerns[lookup]=lookupkerns - end - if overloadkerns then - for second_unicode,kern in next,extrakerns do - lookupkerns[second_unicode]=kern - end - else - for second_unicode,kern in next,extrakerns do - local k=lookupkerns[second_unicode] - if not k then - lookupkerns[second_unicode]=kern - elseif k~=kern then - if trace_loading then - report_otf("lookup %a: ignoring overload of kern between %C and %C, rejecting %a, keeping %a",lookup,first_unicode,second_unicode,k,kern) - end - ignored=ignored+1 - end - end - end - elseif trace_loading then - report_otf("no glyph data for %U",first_unicode) - end - end - end - end - end - subtable.kernclass={} - end - end - end - end - if ignored>0 then - report_otf("%s kern overloads ignored",ignored) - end - if blocked>0 then - report_otf("%s succesive kerns blocked",blocked) - end - end -end -actions["check glyphs"]=function(data,filename,raw) - for unicode,description in next,data.descriptions do - description.glyph=nil - end -end -local valid=(R("\x00\x7E")-S("(){}[]<>%/ \n\r\f\v"))^0*P(-1) -local function valid_ps_name(str) - return str and str~="" and #str<64 and lpegmatch(valid,str) and true or false -end -actions["check metadata"]=function(data,filename,raw) - local metadata=data.metadata - for _,k in next,mainfields do - if valid_fields[k] then - local v=raw[k] - if not metadata[k] then - metadata[k]=v - end - end - end - local ttftables=metadata.ttf_tables - if ttftables then - for i=1,#ttftables do - ttftables[i].data="deleted" - end - end - if metadata.validation_state and table.contains(metadata.validation_state,"bad_ps_fontname") then - local function valid(what) - local names=raw.names - for i=1,#names do - local list=names[i] - local names=list.names - if names then - local name=names[what] - if name and valid_ps_name(name) then - return name - end - end - end - end - local function check(what) - local oldname=metadata[what] - if valid_ps_name(oldname) then - report_otf("ignoring warning %a because %s %a is proper ASCII","bad_ps_fontname",what,oldname) - else - local newname=valid(what) - if not newname then - newname=formatters["bad-%s-%s"](what,file.nameonly(filename)) - end - local warning=formatters["overloading %s from invalid ASCII name %a to %a"](what,oldname,newname) - data.warnings[#data.warnings+1]=warning - report_otf(warning) - metadata[what]=newname - end - end - check("fontname") - check("fullname") - end -end -actions["cleanup tables"]=function(data,filename,raw) - local duplicates=data.resources.duplicates - if duplicates then - for k,v in next,duplicates do - if #v==1 then - duplicates[k]=v[1] - end - end - end - data.resources.indices=nil - data.resources.unicodes=nil - data.helpers=nil -end -actions["reorganize glyph lookups"]=function(data,filename,raw) - local resources=data.resources - local unicodes=resources.unicodes - local descriptions=data.descriptions - local splitter=data.helpers.tounicodelist - local lookuptypes=resources.lookuptypes - for unicode,description in next,descriptions do - local lookups=description.glyph.lookups - if lookups then - for tag,lookuplist in next,lookups do - for l=1,#lookuplist do - local lookup=lookuplist[l] - local specification=lookup.specification - local lookuptype=lookup.type - local lt=lookuptypes[tag] - if not lt then - lookuptypes[tag]=lookuptype - elseif lt~=lookuptype then - report_otf("conflicting lookuptypes, %a points to %a and %a",tag,lt,lookuptype) - end - if lookuptype=="ligature" then - lookuplist[l]={ lpegmatch(splitter,specification.components) } - elseif lookuptype=="alternate" then - lookuplist[l]={ lpegmatch(splitter,specification.components) } - elseif lookuptype=="substitution" then - lookuplist[l]=unicodes[specification.variant] - elseif lookuptype=="multiple" then - lookuplist[l]={ lpegmatch(splitter,specification.components) } - elseif lookuptype=="position" then - lookuplist[l]={ - specification.x or 0, - specification.y or 0, - specification.h or 0, - specification.v or 0 - } - elseif lookuptype=="pair" then - local one=specification.offsets[1] - local two=specification.offsets[2] - local paired=unicodes[specification.paired] - if one then - if two then - lookuplist[l]={ paired,{ one.x or 0,one.y or 0,one.h or 0,one.v or 0 },{ two.x or 0,two.y or 0,two.h or 0,two.v or 0 } } - else - lookuplist[l]={ paired,{ one.x or 0,one.y or 0,one.h or 0,one.v or 0 } } - end - else - if two then - lookuplist[l]={ paired,{},{ two.x or 0,two.y or 0,two.h or 0,two.v or 0} } - else - lookuplist[l]={ paired } - end - end - end - end - end - local slookups,mlookups - for tag,lookuplist in next,lookups do - if #lookuplist==1 then - if slookups then - slookups[tag]=lookuplist[1] - else - slookups={ [tag]=lookuplist[1] } - end - else - if mlookups then - mlookups[tag]=lookuplist - else - mlookups={ [tag]=lookuplist } - end - end - end - if slookups then - description.slookups=slookups - end - if mlookups then - description.mlookups=mlookups - end - end - end -end -actions["reorganize glyph anchors"]=function(data,filename,raw) - local descriptions=data.descriptions - for unicode,description in next,descriptions do - local anchors=description.glyph.anchors - if anchors then - for class,data in next,anchors do - if class=="baselig" then - for tag,specification in next,data do - for i=1,#specification do - local si=specification[i] - specification[i]={ si.x or 0,si.y or 0 } - end - end - else - for tag,specification in next,data do - data[tag]={ specification.x or 0,specification.y or 0 } - end - end - end - description.anchors=anchors - end - end -end -local bogusname=(P("uni")+P("u"))*R("AF","09")^4+(P("index")+P("glyph")+S("Ii")*P("dentity")*P(".")^0)*R("09")^1 -local uselessname=(1-bogusname)^0*bogusname -actions["purge names"]=function(data,filename,raw) - if purge_names then - local n=0 - for u,d in next,data.descriptions do - if lpegmatch(uselessname,d.name) then - n=n+1 - d.name=nil - end - end - if n>0 then - report_otf("%s bogus names removed",n) - end - end -end -actions["compact lookups"]=function(data,filename,raw) - if not compact_lookups then - report_otf("not compacting") - return - end - local last=0 - local tags=table.setmetatableindex({}, - function(t,k) - last=last+1 - t[k]=last - return last - end - ) - local descriptions=data.descriptions - local resources=data.resources - for u,d in next,descriptions do - local slookups=d.slookups - if type(slookups)=="table" then - local s={} - for k,v in next,slookups do - s[tags[k]]=v - end - d.slookups=s - end - local mlookups=d.mlookups - if type(mlookups)=="table" then - local m={} - for k,v in next,mlookups do - m[tags[k]]=v - end - d.mlookups=m - end - local kerns=d.kerns - if type(kerns)=="table" then - local t={} - for k,v in next,kerns do - t[tags[k]]=v - end - d.kerns=t - end - end - local lookups=data.lookups - if lookups then - local l={} - for k,v in next,lookups do - local rules=v.rules - if rules then - for i=1,#rules do - local l=rules[i].lookups - if type(l)=="table" then - for i=1,#l do - l[i]=tags[l[i]] - end - end - end - end - l[tags[k]]=v - end - data.lookups=l - end - local lookups=resources.lookups - if lookups then - local l={} - for k,v in next,lookups do - local s=v.subtables - if type(s)=="table" then - for i=1,#s do - s[i]=tags[s[i]] - end - end - l[tags[k]]=v - end - resources.lookups=l - end - local sequences=resources.sequences - if sequences then - for i=1,#sequences do - local s=sequences[i] - local n=s.name - if n then - s.name=tags[n] - end - local t=s.subtables - if type(t)=="table" then - for i=1,#t do - t[i]=tags[t[i]] - end - end - end - end - local lookuptypes=resources.lookuptypes - if lookuptypes then - local l={} - for k,v in next,lookuptypes do - l[tags[k]]=v - end - resources.lookuptypes=l - end - local anchor_to_lookup=resources.anchor_to_lookup - if anchor_to_lookup then - for anchor,lookups in next,anchor_to_lookup do - local l={} - for lookup,value in next,lookups do - l[tags[lookup]]=value - end - anchor_to_lookup[anchor]=l - end - end - local lookup_to_anchor=resources.lookup_to_anchor - if lookup_to_anchor then - local l={} - for lookup,value in next,lookup_to_anchor do - l[tags[lookup]]=value - end - resources.lookup_to_anchor=l - end - tags=table.swapped(tags) - report_otf("%s lookup tags compacted",#tags) - resources.lookuptags=tags -end -function otf.setfeatures(tfmdata,features) - local okay=constructors.initializefeatures("otf",tfmdata,features,trace_features,report_otf) - if okay then - return constructors.collectprocessors("otf",tfmdata,features,trace_features,report_otf) - else - return {} - end -end -local function copytotfm(data,cache_id) - if data then - local metadata=data.metadata - local warnings=data.warnings - local resources=data.resources - local properties=derivetable(data.properties) - local descriptions=derivetable(data.descriptions) - local goodies=derivetable(data.goodies) - local characters={} - local parameters={} - local mathparameters={} - local pfminfo=metadata.pfminfo or {} - local resources=data.resources - local unicodes=resources.unicodes - local spaceunits=500 - local spacer="space" - local designsize=metadata.designsize or metadata.design_size or 100 - local mathspecs=metadata.math - if designsize==0 then - designsize=100 - end - if mathspecs then - for name,value in next,mathspecs do - mathparameters[name]=value - end - end - for unicode,_ in next,data.descriptions do - characters[unicode]={} - end - if mathspecs then - for unicode,character in next,characters do - local d=descriptions[unicode] - local m=d.math - if m then - local variants=m.horiz_variants - local parts=m.horiz_parts - if variants then - local c=character - for i=1,#variants do - local un=variants[i] - c.next=un - c=characters[un] - end - c.horiz_variants=parts - elseif parts then - character.horiz_variants=parts - end - local variants=m.vert_variants - local parts=m.vert_parts - if variants then - local c=character - for i=1,#variants do - local un=variants[i] - c.next=un - c=characters[un] - end - c.vert_variants=parts - elseif parts then - character.vert_variants=parts - end - local italic_correction=m.vert_italic_correction - if italic_correction then - character.vert_italic_correction=italic_correction - end - local top_accent=m.top_accent - if top_accent then - character.top_accent=top_accent - end - local kerns=m.kerns - if kerns then - character.mathkerns=kerns - end - end - end - end - local filename=constructors.checkedfilename(resources) - local fontname=metadata.fontname - local fullname=metadata.fullname or fontname - local psname=fontname or fullname - local units=metadata.units_per_em or 1000 - if units==0 then - units=1000 - metadata.units_per_em=1000 - report_otf("changing %a units to %a",0,units) - end - local monospaced=metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose.proportion=="Monospaced") - local charwidth=pfminfo.avgwidth - local charxheight=pfminfo.os2_xheight and pfminfo.os2_xheight>0 and pfminfo.os2_xheight - local italicangle=metadata.italicangle - properties.monospaced=monospaced - parameters.italicangle=italicangle - parameters.charwidth=charwidth - parameters.charxheight=charxheight - local space=0x0020 - local emdash=0x2014 - if monospaced then - if descriptions[space] then - spaceunits,spacer=descriptions[space].width,"space" - end - if not spaceunits and descriptions[emdash] then - spaceunits,spacer=descriptions[emdash].width,"emdash" - end - if not spaceunits and charwidth then - spaceunits,spacer=charwidth,"charwidth" - end - else - if descriptions[space] then - spaceunits,spacer=descriptions[space].width,"space" - end - if not spaceunits and descriptions[emdash] then - spaceunits,spacer=descriptions[emdash].width/2,"emdash/2" - end - if not spaceunits and charwidth then - spaceunits,spacer=charwidth,"charwidth" - end - end - spaceunits=tonumber(spaceunits) or 500 - parameters.slant=0 - parameters.space=spaceunits - parameters.space_stretch=units/2 - parameters.space_shrink=1*units/3 - parameters.x_height=2*units/5 - parameters.quad=units - if spaceunits<2*units/5 then - end - if italicangle and italicangle~=0 then - parameters.italicangle=italicangle - parameters.italicfactor=math.cos(math.rad(90+italicangle)) - parameters.slant=- math.tan(italicangle*math.pi/180) - end - if monospaced then - parameters.space_stretch=0 - parameters.space_shrink=0 - elseif syncspace then - parameters.space_stretch=spaceunits/2 - parameters.space_shrink=spaceunits/3 - end - parameters.extra_space=parameters.space_shrink - if charxheight then - parameters.x_height=charxheight - else - local x=0x0078 - if x then - local x=descriptions[x] - if x then - parameters.x_height=x.height - end - end - end - parameters.designsize=(designsize/10)*65536 - parameters.ascender=abs(metadata.ascent or 0) - parameters.descender=abs(metadata.descent or 0) - parameters.units=units - properties.space=spacer - properties.encodingbytes=2 - properties.format=data.format or otf_format(filename) or formats.otf - properties.noglyphnames=true - properties.filename=filename - properties.fontname=fontname - properties.fullname=fullname - properties.psname=psname - properties.name=filename or fullname - if warnings and #warnings>0 then - report_otf("warnings for font: %s",filename) - report_otf() - for i=1,#warnings do - report_otf(" %s",warnings[i]) - end - report_otf() - end - return { - characters=characters, - descriptions=descriptions, - parameters=parameters, - mathparameters=mathparameters, - resources=resources, - properties=properties, - goodies=goodies, - warnings=warnings, - } - end -end -local function otftotfm(specification) - local cache_id=specification.hash - local tfmdata=containers.read(constructors.cache,cache_id) - if not tfmdata then - local name=specification.name - local sub=specification.sub - local filename=specification.filename - local features=specification.features.normal - local rawdata=otf.load(filename,sub,features and features.featurefile) - if rawdata and next(rawdata) then - local descriptions=rawdata.descriptions - local duplicates=rawdata.resources.duplicates - if duplicates then - local nofduplicates,nofduplicated=0,0 - for parent,list in next,duplicates do - if type(list)=="table" then - local n=#list - for i=1,n do - local unicode=list[i] - if not descriptions[unicode] then - descriptions[unicode]=descriptions[parent] - nofduplicated=nofduplicated+1 - end - end - nofduplicates=nofduplicates+n - else - if not descriptions[list] then - descriptions[list]=descriptions[parent] - nofduplicated=nofduplicated+1 - end - nofduplicates=nofduplicates+1 - end - end - if trace_otf and nofduplicated~=nofduplicates then - report_otf("%i extra duplicates copied out of %i",nofduplicated,nofduplicates) - end - end - rawdata.lookuphash={} - tfmdata=copytotfm(rawdata,cache_id) - if tfmdata and next(tfmdata) then - local features=constructors.checkedfeatures("otf",features) - local shared=tfmdata.shared - if not shared then - shared={} - tfmdata.shared=shared - end - shared.rawdata=rawdata - shared.dynamics={} - tfmdata.changed={} - shared.features=features - shared.processes=otf.setfeatures(tfmdata,features) - end - end - containers.write(constructors.cache,cache_id,tfmdata) - end - return tfmdata -end -local function read_from_otf(specification) - local tfmdata=otftotfm(specification) - if tfmdata then - tfmdata.properties.name=specification.name - tfmdata.properties.sub=specification.sub - tfmdata=constructors.scale(tfmdata,specification) - local allfeatures=tfmdata.shared.features or specification.features.normal - constructors.applymanipulators("otf",tfmdata,allfeatures,trace_features,report_otf) - constructors.setname(tfmdata,specification) - fonts.loggers.register(tfmdata,file.suffix(specification.filename),specification) - end - return tfmdata -end -local function checkmathsize(tfmdata,mathsize) - local mathdata=tfmdata.shared.rawdata.metadata.math - local mathsize=tonumber(mathsize) - if mathdata then - local parameters=tfmdata.parameters - parameters.scriptpercentage=mathdata.ScriptPercentScaleDown - parameters.scriptscriptpercentage=mathdata.ScriptScriptPercentScaleDown - parameters.mathsize=mathsize - end -end -registerotffeature { - name="mathsize", - description="apply mathsize specified in the font", - initializers={ - base=checkmathsize, - node=checkmathsize, - } -} -function otf.collectlookups(rawdata,kind,script,language) - local sequences=rawdata.resources.sequences - if sequences then - local featuremap,featurelist={},{} - for s=1,#sequences do - local sequence=sequences[s] - local features=sequence.features - features=features and features[kind] - features=features and (features[script] or features[default] or features[wildcard]) - features=features and (features[language] or features[default] or features[wildcard]) - if features then - local subtables=sequence.subtables - if subtables then - for s=1,#subtables do - local ss=subtables[s] - if not featuremap[s] then - featuremap[ss]=true - featurelist[#featurelist+1]=ss - end - end - end - end - end - if #featurelist>0 then - return featuremap,featurelist - end - end - return nil,nil -end -local function check_otf(forced,specification,suffix) - local name=specification.name - if forced then - name=specification.forcedname - end - local fullname=findbinfile(name,suffix) or "" - if fullname=="" then - fullname=fonts.names.getfilename(name,suffix) or "" - end - if fullname~="" and not fonts.names.ignoredfile(fullname) then - specification.filename=fullname - return read_from_otf(specification) - end -end -local function opentypereader(specification,suffix) - local forced=specification.forced or "" - if formats[forced] then - return check_otf(true,specification,forced) - else - return check_otf(false,specification,suffix) - end -end -readers.opentype=opentypereader -function readers.otf (specification) return opentypereader(specification,"otf") end -function readers.ttf (specification) return opentypereader(specification,"ttf") end -function readers.ttc (specification) return opentypereader(specification,"ttf") end -function readers.dfont(specification) return opentypereader(specification,"ttf") end -function otf.scriptandlanguage(tfmdata,attr) - local properties=tfmdata.properties - return properties.script or "dflt",properties.language or "dflt" -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-otb']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local concat=table.concat -local format,gmatch,gsub,find,match,lower,strip=string.format,string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip -local type,next,tonumber,tostring,rawget=type,next,tonumber,tostring,rawget -local lpegmatch=lpeg.match -local utfchar=utf.char -local trace_baseinit=false trackers.register("otf.baseinit",function(v) trace_baseinit=v end) -local trace_singles=false trackers.register("otf.singles",function(v) trace_singles=v end) -local trace_multiples=false trackers.register("otf.multiples",function(v) trace_multiples=v end) -local trace_alternatives=false trackers.register("otf.alternatives",function(v) trace_alternatives=v end) -local trace_ligatures=false trackers.register("otf.ligatures",function(v) trace_ligatures=v end) -local trace_ligatures_detail=false trackers.register("otf.ligatures.detail",function(v) trace_ligatures_detail=v end) -local trace_kerns=false trackers.register("otf.kerns",function(v) trace_kerns=v end) -local trace_preparing=false trackers.register("otf.preparing",function(v) trace_preparing=v end) -local report_prepare=logs.reporter("fonts","otf prepare") -local fonts=fonts -local otf=fonts.handlers.otf -local otffeatures=otf.features -local registerotffeature=otffeatures.register -otf.defaultbasealternate="none" -local wildcard="*" -local default="dflt" -local formatters=string.formatters -local f_unicode=formatters["%U"] -local f_uniname=formatters["%U (%s)"] -local f_unilist=formatters["% t (% t)"] -local function gref(descriptions,n) - if type(n)=="number" then - local name=descriptions[n].name - if name then - return f_uniname(n,name) - else - return f_unicode(n) - end - elseif n then - local num,nam,j={},{},0 - for i=1,#n do - local ni=n[i] - if tonumber(ni) then - j=j+1 - local di=descriptions[ni] - num[j]=f_unicode(ni) - nam[j]=di and di.name or "-" - end - end - return f_unilist(num,nam) - else - return "" - end -end -local function cref(feature,lookuptags,lookupname) - if lookupname then - return formatters["feature %a, lookup %a"](feature,lookuptags[lookupname]) - else - return formatters["feature %a"](feature) - end -end -local function report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,comment) - report_prepare("%s: base alternate %s => %s (%S => %S)", - cref(feature,lookuptags,lookupname), - gref(descriptions,unicode), - replacement and gref(descriptions,replacement), - value, - comment) -end -local function report_substitution(feature,lookuptags,lookupname,descriptions,unicode,substitution) - report_prepare("%s: base substitution %s => %S", - cref(feature,lookuptags,lookupname), - gref(descriptions,unicode), - gref(descriptions,substitution)) -end -local function report_ligature(feature,lookuptags,lookupname,descriptions,unicode,ligature) - report_prepare("%s: base ligature %s => %S", - cref(feature,lookuptags,lookupname), - gref(descriptions,ligature), - gref(descriptions,unicode)) -end -local function report_kern(feature,lookuptags,lookupname,descriptions,unicode,otherunicode,value) - report_prepare("%s: base kern %s + %s => %S", - cref(feature,lookuptags,lookupname), - gref(descriptions,unicode), - gref(descriptions,otherunicode), - value) -end -local basemethods={} -local basemethod="" -local function applybasemethod(what,...) - local m=basemethods[basemethod][what] - if m then - return m(...) - end -end -local basehash,basehashes,applied={},1,{} -local function registerbasehash(tfmdata) - local properties=tfmdata.properties - local hash=concat(applied," ") - local base=basehash[hash] - if not base then - basehashes=basehashes+1 - base=basehashes - basehash[hash]=base - end - properties.basehash=base - properties.fullname=properties.fullname.."-"..base - applied={} -end -local function registerbasefeature(feature,value) - applied[#applied+1]=feature.."="..tostring(value) -end -local trace=false -local function finalize_ligatures(tfmdata,ligatures) - local nofligatures=#ligatures - if nofligatures>0 then - local characters=tfmdata.characters - local descriptions=tfmdata.descriptions - local resources=tfmdata.resources - local unicodes=resources.unicodes - local private=resources.private - local alldone=false - while not alldone do - local done=0 - for i=1,nofligatures do - local ligature=ligatures[i] - if ligature then - local unicode,lookupdata=ligature[1],ligature[2] - if trace_ligatures_detail then - report_prepare("building % a into %a",lookupdata,unicode) - end - local size=#lookupdata - local firstcode=lookupdata[1] - local firstdata=characters[firstcode] - local okay=false - if firstdata then - local firstname="ctx_"..firstcode - for i=1,size-1 do - local firstdata=characters[firstcode] - if not firstdata then - firstcode=private - if trace_ligatures_detail then - report_prepare("defining %a as %a",firstname,firstcode) - end - unicodes[firstname]=firstcode - firstdata={ intermediate=true,ligatures={} } - characters[firstcode]=firstdata - descriptions[firstcode]={ name=firstname } - private=private+1 - end - local target - local secondcode=lookupdata[i+1] - local secondname=firstname.."_"..secondcode - if i==size-1 then - target=unicode - if not rawget(unicodes,secondname) then - unicodes[secondname]=unicode - end - okay=true - else - target=rawget(unicodes,secondname) - if not target then - break - end - end - if trace_ligatures_detail then - report_prepare("codes (%a,%a) + (%a,%a) -> %a",firstname,firstcode,secondname,secondcode,target) - end - local firstligs=firstdata.ligatures - if firstligs then - firstligs[secondcode]={ char=target } - else - firstdata.ligatures={ [secondcode]={ char=target } } - end - firstcode=target - firstname=secondname - end - elseif trace_ligatures_detail then - report_prepare("no glyph (%a,%a) for building %a",firstname,firstcode,target) - end - if okay then - ligatures[i]=false - done=done+1 - end - end - end - alldone=done==0 - end - if trace_ligatures_detail then - for k,v in table.sortedhash(characters) do - if v.ligatures then - table.print(v,k) - end - end - end - resources.private=private - return true - end -end -local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist) - local characters=tfmdata.characters - local descriptions=tfmdata.descriptions - local resources=tfmdata.resources - local properties=tfmdata.properties - local changed=tfmdata.changed - local lookuphash=resources.lookuphash - local lookuptypes=resources.lookuptypes - local lookuptags=resources.lookuptags - local ligatures={} - local alternate=tonumber(value) or true and 1 - local defaultalt=otf.defaultbasealternate - local trace_singles=trace_baseinit and trace_singles - local trace_alternatives=trace_baseinit and trace_alternatives - local trace_ligatures=trace_baseinit and trace_ligatures - local actions={ - substitution=function(lookupdata,lookuptags,lookupname,description,unicode) - if trace_singles then - report_substitution(feature,lookuptags,lookupname,descriptions,unicode,lookupdata) - end - changed[unicode]=lookupdata - end, - alternate=function(lookupdata,lookuptags,lookupname,description,unicode) - local replacement=lookupdata[alternate] - if replacement then - changed[unicode]=replacement - if trace_alternatives then - report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"normal") - end - elseif defaultalt=="first" then - replacement=lookupdata[1] - changed[unicode]=replacement - if trace_alternatives then - report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt) - end - elseif defaultalt=="last" then - replacement=lookupdata[#data] - if trace_alternatives then - report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt) - end - else - if trace_alternatives then - report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"unknown") - end - end - end, - ligature=function(lookupdata,lookuptags,lookupname,description,unicode) - if trace_ligatures then - report_ligature(feature,lookuptags,lookupname,descriptions,unicode,lookupdata) - end - ligatures[#ligatures+1]={ unicode,lookupdata } - end, - } - for unicode,character in next,characters do - local description=descriptions[unicode] - local lookups=description.slookups - if lookups then - for l=1,#lookuplist do - local lookupname=lookuplist[l] - local lookupdata=lookups[lookupname] - if lookupdata then - local lookuptype=lookuptypes[lookupname] - local action=actions[lookuptype] - if action then - action(lookupdata,lookuptags,lookupname,description,unicode) - end - end - end - end - local lookups=description.mlookups - if lookups then - for l=1,#lookuplist do - local lookupname=lookuplist[l] - local lookuplist=lookups[lookupname] - if lookuplist then - local lookuptype=lookuptypes[lookupname] - local action=actions[lookuptype] - if action then - for i=1,#lookuplist do - action(lookuplist[i],lookuptags,lookupname,description,unicode) - end - end - end - end - end - end - properties.hasligatures=finalize_ligatures(tfmdata,ligatures) -end -local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist) - local characters=tfmdata.characters - local descriptions=tfmdata.descriptions - local resources=tfmdata.resources - local properties=tfmdata.properties - local lookuptags=resources.lookuptags - local sharedkerns={} - local traceindeed=trace_baseinit and trace_kerns - local haskerns=false - for unicode,character in next,characters do - local description=descriptions[unicode] - local rawkerns=description.kerns - if rawkerns then - local s=sharedkerns[rawkerns] - if s==false then - elseif s then - character.kerns=s - else - local newkerns=character.kerns - local done=false - for l=1,#lookuplist do - local lookup=lookuplist[l] - local kerns=rawkerns[lookup] - if kerns then - for otherunicode,value in next,kerns do - if value==0 then - elseif not newkerns then - newkerns={ [otherunicode]=value } - done=true - if traceindeed then - report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,value) - end - elseif not newkerns[otherunicode] then - newkerns[otherunicode]=value - done=true - if traceindeed then - report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,value) - end - end - end - end - end - if done then - sharedkerns[rawkerns]=newkerns - character.kerns=newkerns - haskerns=true - else - sharedkerns[rawkerns]=false - end - end - end - end - properties.haskerns=haskerns -end -basemethods.independent={ - preparesubstitutions=preparesubstitutions, - preparepositionings=preparepositionings, -} -local function makefake(tfmdata,name,present) - local resources=tfmdata.resources - local private=resources.private - local character={ intermediate=true,ligatures={} } - resources.unicodes[name]=private - tfmdata.characters[private]=character - tfmdata.descriptions[private]={ name=name } - resources.private=private+1 - present[name]=private - return character -end -local function make_1(present,tree,name) - for k,v in next,tree do - if k=="ligature" then - present[name]=v - else - make_1(present,v,name.."_"..k) - end - end -end -local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,done,lookuptags,lookupname) - for k,v in next,tree do - if k=="ligature" then - local character=characters[preceding] - if not character then - if trace_baseinit then - report_prepare("weird ligature in lookup %a, current %C, preceding %C",lookuptags[lookupname],v,preceding) - end - character=makefake(tfmdata,name,present) - end - local ligatures=character.ligatures - if ligatures then - ligatures[unicode]={ char=v } - else - character.ligatures={ [unicode]={ char=v } } - end - if done then - local d=done[lookupname] - if not d then - done[lookupname]={ "dummy",v } - else - d[#d+1]=v - end - end - else - local code=present[name] or unicode - local name=name.."_"..k - make_2(present,tfmdata,characters,v,name,code,k,done,lookuptags,lookupname) - end - end -end -local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist) - local characters=tfmdata.characters - local descriptions=tfmdata.descriptions - local resources=tfmdata.resources - local changed=tfmdata.changed - local lookuphash=resources.lookuphash - local lookuptypes=resources.lookuptypes - local lookuptags=resources.lookuptags - local ligatures={} - local alternate=tonumber(value) or true and 1 - local defaultalt=otf.defaultbasealternate - local trace_singles=trace_baseinit and trace_singles - local trace_alternatives=trace_baseinit and trace_alternatives - local trace_ligatures=trace_baseinit and trace_ligatures - for l=1,#lookuplist do - local lookupname=lookuplist[l] - local lookupdata=lookuphash[lookupname] - local lookuptype=lookuptypes[lookupname] - for unicode,data in next,lookupdata do - if lookuptype=="substitution" then - if trace_singles then - report_substitution(feature,lookuptags,lookupname,descriptions,unicode,data) - end - changed[unicode]=data - elseif lookuptype=="alternate" then - local replacement=data[alternate] - if replacement then - changed[unicode]=replacement - if trace_alternatives then - report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"normal") - end - elseif defaultalt=="first" then - replacement=data[1] - changed[unicode]=replacement - if trace_alternatives then - report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt) - end - elseif defaultalt=="last" then - replacement=data[#data] - if trace_alternatives then - report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt) - end - else - if trace_alternatives then - report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"unknown") - end - end - elseif lookuptype=="ligature" then - ligatures[#ligatures+1]={ unicode,data,lookupname } - if trace_ligatures then - report_ligature(feature,lookuptags,lookupname,descriptions,unicode,data) - end - end - end - end - local nofligatures=#ligatures - if nofligatures>0 then - local characters=tfmdata.characters - local present={} - local done=trace_baseinit and trace_ligatures and {} - for i=1,nofligatures do - local ligature=ligatures[i] - local unicode,tree=ligature[1],ligature[2] - make_1(present,tree,"ctx_"..unicode) - end - for i=1,nofligatures do - local ligature=ligatures[i] - local unicode,tree,lookupname=ligature[1],ligature[2],ligature[3] - make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,lookuptags,lookupname) - end - end -end -local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist) - local characters=tfmdata.characters - local descriptions=tfmdata.descriptions - local resources=tfmdata.resources - local properties=tfmdata.properties - local lookuphash=resources.lookuphash - local lookuptags=resources.lookuptags - local traceindeed=trace_baseinit and trace_kerns - for l=1,#lookuplist do - local lookupname=lookuplist[l] - local lookupdata=lookuphash[lookupname] - for unicode,data in next,lookupdata do - local character=characters[unicode] - local kerns=character.kerns - if not kerns then - kerns={} - character.kerns=kerns - end - if traceindeed then - for otherunicode,kern in next,data do - if not kerns[otherunicode] and kern~=0 then - kerns[otherunicode]=kern - report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,kern) - end - end - else - for otherunicode,kern in next,data do - if not kerns[otherunicode] and kern~=0 then - kerns[otherunicode]=kern - end - end - end - end - end -end -local function initializehashes(tfmdata) - nodeinitializers.features(tfmdata) -end -basemethods.shared={ - initializehashes=initializehashes, - preparesubstitutions=preparesubstitutions, - preparepositionings=preparepositionings, -} -basemethod="independent" -local function featuresinitializer(tfmdata,value) - if true then - local starttime=trace_preparing and os.clock() - local features=tfmdata.shared.features - local fullname=tfmdata.properties.fullname or "?" - if features then - applybasemethod("initializehashes",tfmdata) - local collectlookups=otf.collectlookups - local rawdata=tfmdata.shared.rawdata - local properties=tfmdata.properties - local script=properties.script - local language=properties.language - local basesubstitutions=rawdata.resources.features.gsub - local basepositionings=rawdata.resources.features.gpos - if basesubstitutions or basepositionings then - local sequences=tfmdata.resources.sequences - for s=1,#sequences do - local sequence=sequences[s] - local sfeatures=sequence.features - if sfeatures then - local order=sequence.order - if order then - for i=1,#order do - local feature=order[i] - local value=features[feature] - if value then - local validlookups,lookuplist=collectlookups(rawdata,feature,script,language) - if not validlookups then - elseif basesubstitutions and basesubstitutions[feature] then - if trace_preparing then - report_prepare("filtering base %s feature %a for %a with value %a","sub",feature,fullname,value) - end - applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist) - registerbasefeature(feature,value) - elseif basepositionings and basepositionings[feature] then - if trace_preparing then - report_prepare("filtering base %a feature %a for %a with value %a","pos",feature,fullname,value) - end - applybasemethod("preparepositionings",tfmdata,feature,value,validlookups,lookuplist) - registerbasefeature(feature,value) - end - end - end - end - end - end - end - registerbasehash(tfmdata) - end - if trace_preparing then - report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,fullname) - end - end -end -registerotffeature { - name="features", - description="features", - default=true, - initializers={ - base=featuresinitializer, - } -} -directives.register("fonts.otf.loader.basemethod",function(v) - if basemethods[v] then - basemethod=v - end -end) - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['node-inj']={ - version=1.001, - comment="companion to node-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files", -} -local next=next -local utfchar=utf.char -local trace_injections=false trackers.register("nodes.injections",function(v) trace_injections=v end) -local report_injections=logs.reporter("nodes","injections") -local attributes,nodes,node=attributes,nodes,node -fonts=fonts -local fontdata=fonts.hashes.identifiers -nodes.injections=nodes.injections or {} -local injections=nodes.injections -local nodecodes=nodes.nodecodes -local glyph_code=nodecodes.glyph -local kern_code=nodecodes.kern -local nuts=nodes.nuts -local nodepool=nuts.pool -local newkern=nodepool.kern -local tonode=nuts.tonode -local tonut=nuts.tonut -local getfield=nuts.getfield -local getnext=nuts.getnext -local getprev=nuts.getprev -local getid=nuts.getid -local getattr=nuts.getattr -local getfont=nuts.getfont -local getsubtype=nuts.getsubtype -local getchar=nuts.getchar -local setfield=nuts.setfield -local setattr=nuts.setattr -local traverse_id=nuts.traverse_id -local insert_node_before=nuts.insert_before -local insert_node_after=nuts.insert_after -local a_kernpair=attributes.private('kernpair') -local a_ligacomp=attributes.private('ligacomp') -local a_markbase=attributes.private('markbase') -local a_markmark=attributes.private('markmark') -local a_markdone=attributes.private('markdone') -local a_cursbase=attributes.private('cursbase') -local a_curscurs=attributes.private('curscurs') -local a_cursdone=attributes.private('cursdone') -local unsetvalue=attributes.unsetvalue -function injections.installnewkern(nk) - newkern=nk or newkern -end -local cursives={} -local marks={} -local kerns={} -function injections.reset(n) -end -function injections.setligaindex(n,index) - setattr(n,a_ligacomp,index) -end -function injections.getligaindex(n,default) - return getattr(n,a_ligacomp) or default -end -function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext) - local dx,dy=factor*(exit[1]-entry[1]),factor*(exit[2]-entry[2]) - local ws,wn=tfmstart.width,tfmnext.width - local bound=#cursives+1 - setattr(start,a_cursbase,bound) - setattr(nxt,a_curscurs,bound) - cursives[bound]={ rlmode,dx,dy,ws,wn } - return dx,dy,bound -end -function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr) - local x,y,w,h=factor*spec[1],factor*spec[2],factor*spec[3],factor*spec[4] - if x~=0 or w~=0 or y~=0 or h~=0 then - local bound=getattr(current,a_kernpair) - if bound then - local kb=kerns[bound] - kb[2],kb[3],kb[4],kb[5]=(kb[2] or 0)+x,(kb[3] or 0)+y,(kb[4] or 0)+w,(kb[5] or 0)+h - else - bound=#kerns+1 - setattr(current,a_kernpair,bound) - kerns[bound]={ rlmode,x,y,w,h,r2lflag,tfmchr.width } - end - return x,y,w,h,bound - end - return x,y,w,h -end -function injections.setkern(current,factor,rlmode,x,tfmchr) - local dx=factor*x - if dx~=0 then - local bound=#kerns+1 - setattr(current,a_kernpair,bound) - kerns[bound]={ rlmode,dx } - return dx,bound - else - return 0,0 - end -end -function injections.setmark(start,base,factor,rlmode,ba,ma) - local dx,dy=factor*(ba[1]-ma[1]),factor*(ba[2]-ma[2]) - local bound=getattr(base,a_markbase) - local index=1 - if bound then - local mb=marks[bound] - if mb then - index=#mb+1 - mb[index]={ dx,dy,rlmode } - setattr(start,a_markmark,bound) - setattr(start,a_markdone,index) - return dx,dy,bound - else - report_injections("possible problem, %U is base mark without data (id %a)",getchar(base),bound) - end - end - index=index or 1 - bound=#marks+1 - setattr(base,a_markbase,bound) - setattr(start,a_markmark,bound) - setattr(start,a_markdone,index) - marks[bound]={ [index]={ dx,dy,rlmode } } - return dx,dy,bound -end -local function dir(n) - return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset" -end -local function trace(head) - report_injections("begin run") - for n in traverse_id(glyph_code,head) do - if getsubtype(n)<256 then - local kp=getattr(n,a_kernpair) - local mb=getattr(n,a_markbase) - local mm=getattr(n,a_markmark) - local md=getattr(n,a_markdone) - local cb=getattr(n,a_cursbase) - local cc=getattr(n,a_curscurs) - local char=getchar(n) - report_injections("font %s, char %U, glyph %c",getfont(n),char,char) - if kp then - local k=kerns[kp] - if k[3] then - report_injections(" pairkern: dir %a, x %p, y %p, w %p, h %p",dir(k[1]),k[2],k[3],k[4],k[5]) - else - report_injections(" kern: dir %a, dx %p",dir(k[1]),k[2]) - end - end - if mb then - report_injections(" markbase: bound %a",mb) - end - if mm then - local m=marks[mm] - if mb then - local m=m[mb] - if m then - report_injections(" markmark: bound %a, index %a, dx %p, dy %p",mm,md,m[1],m[2]) - else - report_injections(" markmark: bound %a, missing index",mm) - end - else - m=m[1] - report_injections(" markmark: bound %a, dx %p, dy %p",mm,m and m[1],m and m[2]) - end - end - if cb then - report_injections(" cursbase: bound %a",cb) - end - if cc then - local c=cursives[cc] - report_injections(" curscurs: bound %a, dir %a, dx %p, dy %p",cc,dir(c[1]),c[2],c[3]) - end - end - end - report_injections("end run") -end -local function show_result(head) - local current=head - local skipping=false - while current do - local id=getid(current) - if id==glyph_code then - report_injections("char: %C, width %p, xoffset %p, yoffset %p", - getchar(current),getfield(current,"width"),getfield(current,"xoffset"),getfield(current,"yoffset")) - skipping=false - elseif id==kern_code then - report_injections("kern: %p",getfield(current,"kern")) - skipping=false - elseif not skipping then - report_injections() - skipping=true - end - current=getnext(current) - end -end -function injections.handler(head,where,keep) - head=tonut(head) - local has_marks,has_cursives,has_kerns=next(marks),next(cursives),next(kerns) - if has_marks or has_cursives then - if trace_injections then - trace(head) - end - local done,ky,rl,valid,cx,wx,mk,nofvalid=false,{},{},{},{},{},{},0 - if has_kerns then - local nf,tm=nil,nil - for n in traverse_id(glyph_code,head) do - if getsubtype(n)<256 then - nofvalid=nofvalid+1 - valid[nofvalid]=n - local f=getfont(n) - if f~=nf then - nf=f - tm=fontdata[nf].resources.marks - end - if tm then - mk[n]=tm[getchar(n)] - end - local k=getattr(n,a_kernpair) - if k then - local kk=kerns[k] - if kk then - local x,y,w,h=kk[2] or 0,kk[3] or 0,kk[4] or 0,kk[5] or 0 - local dy=y-h - if dy~=0 then - ky[n]=dy - end - if w~=0 or x~=0 then - wx[n]=kk - end - rl[n]=kk[1] - end - end - end - end - else - local nf,tm=nil,nil - for n in traverse_id(glyph_code,head) do - if getsubtype(n)<256 then - nofvalid=nofvalid+1 - valid[nofvalid]=n - local f=getfont(n) - if f~=nf then - nf=f - tm=fontdata[nf].resources.marks - end - if tm then - mk[n]=tm[getchar(n)] - end - end - end - end - if nofvalid>0 then - local cx={} - if has_kerns and next(ky) then - for n,k in next,ky do - setfield(n,"yoffset",k) - end - end - if has_cursives then - local p_cursbase,p=nil,nil - local t,d,maxt={},{},0 - for i=1,nofvalid do - local n=valid[i] - if not mk[n] then - local n_cursbase=getattr(n,a_cursbase) - if p_cursbase then - local n_curscurs=getattr(n,a_curscurs) - if p_cursbase==n_curscurs then - local c=cursives[n_curscurs] - if c then - local rlmode,dx,dy,ws,wn=c[1],c[2],c[3],c[4],c[5] - if rlmode>=0 then - dx=dx-ws - else - dx=dx+wn - end - if dx~=0 then - cx[n]=dx - rl[n]=rlmode - end - dy=-dy - maxt=maxt+1 - t[maxt]=p - d[maxt]=dy - else - maxt=0 - end - end - elseif maxt>0 then - local ny=getfield(n,"yoffset") - for i=maxt,1,-1 do - ny=ny+d[i] - local ti=t[i] - setfield(ti,"yoffset",getfield(ti,"yoffset")+ny) - end - maxt=0 - end - if not n_cursbase and maxt>0 then - local ny=getfield(n,"yoffset") - for i=maxt,1,-1 do - ny=ny+d[i] - local ti=t[i] - setfield(ti,"yoffset",ny) - end - maxt=0 - end - p_cursbase,p=n_cursbase,n - end - end - if maxt>0 then - local ny=getfield(n,"yoffset") - for i=maxt,1,-1 do - ny=ny+d[i] - local ti=t[i] - setfield(ti,"yoffset",ny) - end - maxt=0 - end - if not keep then - cursives={} - end - end - if has_marks then - for i=1,nofvalid do - local p=valid[i] - local p_markbase=getattr(p,a_markbase) - if p_markbase then - local mrks=marks[p_markbase] - local nofmarks=#mrks - for n in traverse_id(glyph_code,getnext(p)) do - local n_markmark=getattr(n,a_markmark) - if p_markbase==n_markmark then - local index=getattr(n,a_markdone) or 1 - local d=mrks[index] - if d then - local rlmode=d[3] - local k=wx[p] - local px=getfield(p,"xoffset") - local ox=0 - if k then - local x=k[2] - local w=k[4] - if w then - if rlmode and rlmode>=0 then - ox=px-getfield(p,"width")+d[1]-(w-x) - else - ox=px-d[1]-x - end - else - if rlmode and rlmode>=0 then - ox=px-getfield(p,"width")+d[1] - else - ox=px-d[1]-x - end - end - else - local wp=getfield(p,"width") - local wn=getfield(n,"width") - if rlmode and rlmode>=0 then - ox=px-wp+d[1] - else - ox=px-d[1] - end - if wn~=0 then - insert_node_before(head,n,newkern(-wn/2)) - insert_node_after(head,n,newkern(-wn/2)) - end - end - setfield(n,"xoffset",ox) - local py=getfield(p,"yoffset") - local oy=0 - if mk[p] then - oy=py+d[2] - else - oy=getfield(n,"yoffset")+py+d[2] - end - setfield(n,"yoffset",oy) - if nofmarks==1 then - break - else - nofmarks=nofmarks-1 - end - end - elseif not n_markmark then - break - else - end - end - end - end - if not keep then - marks={} - end - end - if next(wx) then - for n,k in next,wx do - local x=k[2] - local w=k[4] - if w then - local rl=k[1] - local wx=w-x - if rl<0 then - if wx~=0 then - insert_node_before(head,n,newkern(wx)) - end - if x~=0 then - insert_node_after (head,n,newkern(x)) - end - else - if x~=0 then - insert_node_before(head,n,newkern(x)) - end - if wx~=0 then - insert_node_after (head,n,newkern(wx)) - end - end - elseif x~=0 then - insert_node_before(head,n,newkern(x)) - end - end - end - if next(cx) then - for n,k in next,cx do - if k~=0 then - local rln=rl[n] - if rln and rln<0 then - insert_node_before(head,n,newkern(-k)) - else - insert_node_before(head,n,newkern(k)) - end - end - end - end - if not keep then - kerns={} - end - return tonode(head),true - elseif not keep then - kerns,cursives,marks={},{},{} - end - elseif has_kerns then - if trace_injections then - trace(head) - end - for n in traverse_id(glyph_code,head) do - if getsubtype(n)<256 then - local k=getattr(n,a_kernpair) - if k then - local kk=kerns[k] - if kk then - local rl,x,y,w=kk[1],kk[2] or 0,kk[3],kk[4] - if y and y~=0 then - setfield(n,"yoffset",y) - end - if w then - local wx=w-x - if rl<0 then - if wx~=0 then - insert_node_before(head,n,newkern(wx)) - end - if x~=0 then - insert_node_after (head,n,newkern(x)) - end - else - if x~=0 then - insert_node_before(head,n,newkern(x)) - end - if wx~=0 then - insert_node_after(head,n,newkern(wx)) - end - end - else - if x~=0 then - insert_node_before(head,n,newkern(x)) - end - end - end - end - end - end - if not keep then - kerns={} - end - return tonode(head),true - else - end - return tonode(head),false -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-otx']={ - version=1.001, - comment="companion to font-otf.lua (analysing)", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local type=type -if not trackers then trackers={ register=function() end } end -local fonts,nodes,node=fonts,nodes,node -local allocate=utilities.storage.allocate -local otf=fonts.handlers.otf -local analyzers=fonts.analyzers -local initializers=allocate() -local methods=allocate() -analyzers.initializers=initializers -analyzers.methods=methods -analyzers.useunicodemarks=false -local a_state=attributes.private('state') -local nuts=nodes.nuts -local tonut=nuts.tonut -local getfield=nuts.getfield -local getnext=nuts.getnext -local getprev=nuts.getprev -local getid=nuts.getid -local getprop=nuts.getprop -local setprop=nuts.setprop -local getfont=nuts.getfont -local getsubtype=nuts.getsubtype -local getchar=nuts.getchar -local traverse_id=nuts.traverse_id -local traverse_node_list=nuts.traverse -local end_of_math=nuts.end_of_math -local nodecodes=nodes.nodecodes -local glyph_code=nodecodes.glyph -local disc_code=nodecodes.disc -local math_code=nodecodes.math -local fontdata=fonts.hashes.identifiers -local categories=characters and characters.categories or {} -local otffeatures=fonts.constructors.newfeatures("otf") -local registerotffeature=otffeatures.register -local s_init=1 local s_rphf=7 -local s_medi=2 local s_half=8 -local s_fina=3 local s_pref=9 -local s_isol=4 local s_blwf=10 -local s_mark=5 local s_pstf=11 -local s_rest=6 -local states={ - init=s_init, - medi=s_medi, - fina=s_fina, - isol=s_isol, - mark=s_mark, - rest=s_rest, - rphf=s_rphf, - half=s_half, - pref=s_pref, - blwf=s_blwf, - pstf=s_pstf, -} -local features={ - init=s_init, - medi=s_medi, - fina=s_fina, - isol=s_isol, - rphf=s_rphf, - half=s_half, - pref=s_pref, - blwf=s_blwf, - pstf=s_pstf, -} -analyzers.states=states -analyzers.features=features -function analyzers.setstate(head,font) - local useunicodemarks=analyzers.useunicodemarks - local tfmdata=fontdata[font] - local descriptions=tfmdata.descriptions - local first,last,current,n,done=nil,nil,head,0,false - current=tonut(current) - while current do - local id=getid(current) - if id==glyph_code and getfont(current)==font then - done=true - local char=getchar(current) - local d=descriptions[char] - if d then - if d.class=="mark" or (useunicodemarks and categories[char]=="mn") then - done=true - setprop(current,a_state,s_mark) - elseif n==0 then - first,last,n=current,current,1 - setprop(current,a_state,s_init) - else - last,n=current,n+1 - setprop(current,a_state,s_medi) - end - else - if first and first==last then - setprop(last,a_state,s_isol) - elseif last then - setprop(last,a_state,s_fina) - end - first,last,n=nil,nil,0 - end - elseif id==disc_code then - setprop(current,a_state,s_medi) - last=current - else - if first and first==last then - setprop(last,a_state,s_isol) - elseif last then - setprop(last,a_state,s_fina) - end - first,last,n=nil,nil,0 - if id==math_code then - current=end_of_math(current) - end - end - current=getnext(current) - end - if first and first==last then - setprop(last,a_state,s_isol) - elseif last then - setprop(last,a_state,s_fina) - end - return head,done -end -local function analyzeinitializer(tfmdata,value) - local script,language=otf.scriptandlanguage(tfmdata) - local action=initializers[script] - if not action then - elseif type(action)=="function" then - return action(tfmdata,value) - else - local action=action[language] - if action then - return action(tfmdata,value) - end - end -end -local function analyzeprocessor(head,font,attr) - local tfmdata=fontdata[font] - local script,language=otf.scriptandlanguage(tfmdata,attr) - local action=methods[script] - if not action then - elseif type(action)=="function" then - return action(head,font,attr) - else - action=action[language] - if action then - return action(head,font,attr) - end - end - return head,false -end -registerotffeature { - name="analyze", - description="analysis of character classes", - default=true, - initializers={ - node=analyzeinitializer, - }, - processors={ - position=1, - node=analyzeprocessor, - } -} -methods.latn=analyzers.setstate -local tatweel=0x0640 -local zwnj=0x200C -local zwj=0x200D -local isolated={ - [0x0600]=true,[0x0601]=true,[0x0602]=true,[0x0603]=true, - [0x0604]=true, - [0x0608]=true,[0x060B]=true,[0x0621]=true,[0x0674]=true, - [0x06DD]=true, - [0x0856]=true,[0x0858]=true,[0x0857]=true, - [0x07FA]=true, - [zwnj]=true, - [0x08AD]=true, -} -local final={ - [0x0622]=true,[0x0623]=true,[0x0624]=true,[0x0625]=true, - [0x0627]=true,[0x0629]=true,[0x062F]=true,[0x0630]=true, - [0x0631]=true,[0x0632]=true,[0x0648]=true,[0x0671]=true, - [0x0672]=true,[0x0673]=true,[0x0675]=true,[0x0676]=true, - [0x0677]=true,[0x0688]=true,[0x0689]=true,[0x068A]=true, - [0x068B]=true,[0x068C]=true,[0x068D]=true,[0x068E]=true, - [0x068F]=true,[0x0690]=true,[0x0691]=true,[0x0692]=true, - [0x0693]=true,[0x0694]=true,[0x0695]=true,[0x0696]=true, - [0x0697]=true,[0x0698]=true,[0x0699]=true,[0x06C0]=true, - [0x06C3]=true,[0x06C4]=true,[0x06C5]=true,[0x06C6]=true, - [0x06C7]=true,[0x06C8]=true,[0x06C9]=true,[0x06CA]=true, - [0x06CB]=true,[0x06CD]=true,[0x06CF]=true,[0x06D2]=true, - [0x06D3]=true,[0x06D5]=true,[0x06EE]=true,[0x06EF]=true, - [0x0759]=true,[0x075A]=true,[0x075B]=true,[0x076B]=true, - [0x076C]=true,[0x0771]=true,[0x0773]=true,[0x0774]=true, - [0x0778]=true,[0x0779]=true, - [0x08AA]=true,[0x08AB]=true,[0x08AC]=true, - [0xFEF5]=true,[0xFEF7]=true,[0xFEF9]=true,[0xFEFB]=true, - [0x0710]=true,[0x0715]=true,[0x0716]=true,[0x0717]=true, - [0x0718]=true,[0x0719]=true,[0x0728]=true,[0x072A]=true, - [0x072C]=true,[0x071E]=true, - [0x072F]=true,[0x074D]=true, - [0x0840]=true,[0x0849]=true,[0x0854]=true,[0x0846]=true, - [0x084F]=true, - [0x08AE]=true,[0x08B1]=true,[0x08B2]=true, -} -local medial={ - [0x0626]=true,[0x0628]=true,[0x062A]=true,[0x062B]=true, - [0x062C]=true,[0x062D]=true,[0x062E]=true,[0x0633]=true, - [0x0634]=true,[0x0635]=true,[0x0636]=true,[0x0637]=true, - [0x0638]=true,[0x0639]=true,[0x063A]=true,[0x063B]=true, - [0x063C]=true,[0x063D]=true,[0x063E]=true,[0x063F]=true, - [0x0641]=true,[0x0642]=true,[0x0643]=true, - [0x0644]=true,[0x0645]=true,[0x0646]=true,[0x0647]=true, - [0x0649]=true,[0x064A]=true,[0x066E]=true,[0x066F]=true, - [0x0678]=true,[0x0679]=true,[0x067A]=true,[0x067B]=true, - [0x067C]=true,[0x067D]=true,[0x067E]=true,[0x067F]=true, - [0x0680]=true,[0x0681]=true,[0x0682]=true,[0x0683]=true, - [0x0684]=true,[0x0685]=true,[0x0686]=true,[0x0687]=true, - [0x069A]=true,[0x069B]=true,[0x069C]=true,[0x069D]=true, - [0x069E]=true,[0x069F]=true,[0x06A0]=true,[0x06A1]=true, - [0x06A2]=true,[0x06A3]=true,[0x06A4]=true,[0x06A5]=true, - [0x06A6]=true,[0x06A7]=true,[0x06A8]=true,[0x06A9]=true, - [0x06AA]=true,[0x06AB]=true,[0x06AC]=true,[0x06AD]=true, - [0x06AE]=true,[0x06AF]=true,[0x06B0]=true,[0x06B1]=true, - [0x06B2]=true,[0x06B3]=true,[0x06B4]=true,[0x06B5]=true, - [0x06B6]=true,[0x06B7]=true,[0x06B8]=true,[0x06B9]=true, - [0x06BA]=true,[0x06BB]=true,[0x06BC]=true,[0x06BD]=true, - [0x06BE]=true,[0x06BF]=true,[0x06C1]=true,[0x06C2]=true, - [0x06CC]=true,[0x06CE]=true,[0x06D0]=true,[0x06D1]=true, - [0x06FA]=true,[0x06FB]=true,[0x06FC]=true,[0x06FF]=true, - [0x0750]=true,[0x0751]=true,[0x0752]=true,[0x0753]=true, - [0x0754]=true,[0x0755]=true,[0x0756]=true,[0x0757]=true, - [0x0758]=true,[0x075C]=true,[0x075D]=true,[0x075E]=true, - [0x075F]=true,[0x0760]=true,[0x0761]=true,[0x0762]=true, - [0x0763]=true,[0x0764]=true,[0x0765]=true,[0x0766]=true, - [0x0767]=true,[0x0768]=true,[0x0769]=true,[0x076A]=true, - [0x076D]=true,[0x076E]=true,[0x076F]=true,[0x0770]=true, - [0x0772]=true,[0x0775]=true,[0x0776]=true,[0x0777]=true, - [0x077A]=true,[0x077B]=true,[0x077C]=true,[0x077D]=true, - [0x077E]=true,[0x077F]=true, - [0x08A0]=true,[0x08A2]=true,[0x08A4]=true,[0x08A5]=true, - [0x08A6]=true,[0x0620]=true,[0x08A8]=true,[0x08A9]=true, - [0x08A7]=true,[0x08A3]=true, - [0x0712]=true,[0x0713]=true,[0x0714]=true,[0x071A]=true, - [0x071B]=true,[0x071C]=true,[0x071D]=true,[0x071F]=true, - [0x0720]=true,[0x0721]=true,[0x0722]=true,[0x0723]=true, - [0x0724]=true,[0x0725]=true,[0x0726]=true,[0x0727]=true, - [0x0729]=true,[0x072B]=true,[0x072D]=true,[0x072E]=true, - [0x074E]=true,[0x074F]=true, - [0x0841]=true,[0x0842]=true,[0x0843]=true,[0x0844]=true, - [0x0845]=true,[0x0847]=true,[0x0848]=true,[0x0855]=true, - [0x0851]=true,[0x084E]=true,[0x084D]=true,[0x084A]=true, - [0x084B]=true,[0x084C]=true,[0x0850]=true,[0x0852]=true, - [0x0853]=true, - [0x07D7]=true,[0x07E8]=true,[0x07D9]=true,[0x07EA]=true, - [0x07CA]=true,[0x07DB]=true,[0x07CC]=true,[0x07DD]=true, - [0x07CE]=true,[0x07DF]=true,[0x07D4]=true,[0x07E5]=true, - [0x07E9]=true,[0x07E7]=true,[0x07E3]=true,[0x07E2]=true, - [0x07E0]=true,[0x07E1]=true,[0x07DE]=true,[0x07DC]=true, - [0x07D1]=true,[0x07DA]=true,[0x07D8]=true,[0x07D6]=true, - [0x07D2]=true,[0x07D0]=true,[0x07CF]=true,[0x07CD]=true, - [0x07CB]=true,[0x07D3]=true,[0x07E4]=true,[0x07D5]=true, - [0x07E6]=true, - [tatweel]=true,[zwj]=true, - [0x08A1]=true,[0x08AF]=true,[0x08B0]=true, -} -local arab_warned={} -local function warning(current,what) - local char=getchar(current) - if not arab_warned[char] then - log.report("analyze","arab: character %C has no %a class",char,what) - arab_warned[char]=true - end -end -local function finish(first,last) - if last then - if first==last then - local fc=getchar(first) - if medial[fc] or final[fc] then - setprop(first,a_state,s_isol) - else - warning(first,"isol") - setprop(first,a_state,s_error) - end - else - local lc=getchar(last) - if medial[lc] or final[lc] then - setprop(last,a_state,s_fina) - else - warning(last,"fina") - setprop(last,a_state,s_error) - end - end - first,last=nil,nil - elseif first then - local fc=getchar(first) - if medial[fc] or final[fc] then - setprop(first,a_state,s_isol) - else - warning(first,"isol") - setprop(first,a_state,s_error) - end - first=nil - end - return first,last -end -function methods.arab(head,font,attr) - local useunicodemarks=analyzers.useunicodemarks - local tfmdata=fontdata[font] - local marks=tfmdata.resources.marks - local first,last,current,done=nil,nil,head,false - current=tonut(current) - while current do - local id=getid(current) - if id==glyph_code and getfont(current)==font and getsubtype(current)<256 and not getprop(current,a_state) then - done=true - local char=getchar(current) - if marks[char] or (useunicodemarks and categories[char]=="mn") then - setprop(current,a_state,s_mark) - elseif isolated[char] then - first,last=finish(first,last) - setprop(current,a_state,s_isol) - first,last=nil,nil - elseif not first then - if medial[char] then - setprop(current,a_state,s_init) - first,last=first or current,current - elseif final[char] then - setprop(current,a_state,s_isol) - first,last=nil,nil - else - first,last=finish(first,last) - end - elseif medial[char] then - first,last=first or current,current - setprop(current,a_state,s_medi) - elseif final[char] then - if getprop(last,a_state)~=s_init then - setprop(last,a_state,s_medi) - end - setprop(current,a_state,s_fina) - first,last=nil,nil - elseif char>=0x0600 and char<=0x06FF then - setprop(current,a_state,s_rest) - first,last=finish(first,last) - else - first,last=finish(first,last) - end - else - if first or last then - first,last=finish(first,last) - end - if id==math_code then - current=end_of_math(current) - end - end - current=getnext(current) - end - if first or last then - finish(first,last) - end - return head,done -end -methods.syrc=methods.arab -methods.mand=methods.arab -methods.nko=methods.arab -directives.register("otf.analyze.useunicodemarks",function(v) - analyzers.useunicodemarks=v -end) - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-otn']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files", -} -local concat,insert,remove=table.concat,table.insert,table.remove -local gmatch,gsub,find,match,lower,strip=string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip -local type,next,tonumber,tostring=type,next,tonumber,tostring -local lpegmatch=lpeg.match -local random=math.random -local formatters=string.formatters -local logs,trackers,nodes,attributes=logs,trackers,nodes,attributes -local registertracker=trackers.register -local fonts=fonts -local otf=fonts.handlers.otf -local trace_lookups=false registertracker("otf.lookups",function(v) trace_lookups=v end) -local trace_singles=false registertracker("otf.singles",function(v) trace_singles=v end) -local trace_multiples=false registertracker("otf.multiples",function(v) trace_multiples=v end) -local trace_alternatives=false registertracker("otf.alternatives",function(v) trace_alternatives=v end) -local trace_ligatures=false registertracker("otf.ligatures",function(v) trace_ligatures=v end) -local trace_contexts=false registertracker("otf.contexts",function(v) trace_contexts=v end) -local trace_marks=false registertracker("otf.marks",function(v) trace_marks=v end) -local trace_kerns=false registertracker("otf.kerns",function(v) trace_kerns=v end) -local trace_cursive=false registertracker("otf.cursive",function(v) trace_cursive=v end) -local trace_preparing=false registertracker("otf.preparing",function(v) trace_preparing=v end) -local trace_bugs=false registertracker("otf.bugs",function(v) trace_bugs=v end) -local trace_details=false registertracker("otf.details",function(v) trace_details=v end) -local trace_applied=false registertracker("otf.applied",function(v) trace_applied=v end) -local trace_steps=false registertracker("otf.steps",function(v) trace_steps=v end) -local trace_skips=false registertracker("otf.skips",function(v) trace_skips=v end) -local trace_directions=false registertracker("otf.directions",function(v) trace_directions=v end) -local report_direct=logs.reporter("fonts","otf direct") -local report_subchain=logs.reporter("fonts","otf subchain") -local report_chain=logs.reporter("fonts","otf chain") -local report_process=logs.reporter("fonts","otf process") -local report_prepare=logs.reporter("fonts","otf prepare") -local report_warning=logs.reporter("fonts","otf warning") -registertracker("otf.verbose_chain",function(v) otf.setcontextchain(v and "verbose") end) -registertracker("otf.normal_chain",function(v) otf.setcontextchain(v and "normal") end) -registertracker("otf.replacements","otf.singles,otf.multiples,otf.alternatives,otf.ligatures") -registertracker("otf.positions","otf.marks,otf.kerns,otf.cursive") -registertracker("otf.actions","otf.replacements,otf.positions") -registertracker("otf.injections","nodes.injections") -registertracker("*otf.sample","otf.steps,otf.actions,otf.analyzing") -local nuts=nodes.nuts -local tonode=nuts.tonode -local tonut=nuts.tonut -local getfield=nuts.getfield -local setfield=nuts.setfield -local getnext=nuts.getnext -local getprev=nuts.getprev -local getid=nuts.getid -local getattr=nuts.getattr -local setattr=nuts.setattr -local getprop=nuts.getprop -local setprop=nuts.setprop -local getfont=nuts.getfont -local getsubtype=nuts.getsubtype -local getchar=nuts.getchar -local insert_node_after=nuts.insert_after -local delete_node=nuts.delete -local copy_node=nuts.copy -local find_node_tail=nuts.tail -local flush_node_list=nuts.flush_list -local end_of_math=nuts.end_of_math -local setmetatableindex=table.setmetatableindex -local zwnj=0x200C -local zwj=0x200D -local wildcard="*" -local default="dflt" -local nodecodes=nodes.nodecodes -local whatcodes=nodes.whatcodes -local glyphcodes=nodes.glyphcodes -local disccodes=nodes.disccodes -local glyph_code=nodecodes.glyph -local glue_code=nodecodes.glue -local disc_code=nodecodes.disc -local whatsit_code=nodecodes.whatsit -local math_code=nodecodes.math -local dir_code=whatcodes.dir -local localpar_code=whatcodes.localpar -local discretionary_code=disccodes.discretionary -local ligature_code=glyphcodes.ligature -local privateattribute=attributes.private -local a_state=privateattribute('state') -local a_cursbase=privateattribute('cursbase') -local injections=nodes.injections -local setmark=injections.setmark -local setcursive=injections.setcursive -local setkern=injections.setkern -local setpair=injections.setpair -local resetinjection=injections.reset -local setligaindex=injections.setligaindex -local getligaindex=injections.getligaindex -local cursonce=true -local fonthashes=fonts.hashes -local fontdata=fonthashes.identifiers -local otffeatures=fonts.constructors.newfeatures("otf") -local registerotffeature=otffeatures.register -local onetimemessage=fonts.loggers.onetimemessage or function() end -otf.defaultnodealternate="none" -local tfmdata=false -local characters=false -local descriptions=false -local resources=false -local marks=false -local currentfont=false -local lookuptable=false -local anchorlookups=false -local lookuptypes=false -local lookuptags=false -local handlers={} -local rlmode=0 -local featurevalue=false -local checkstep=(nodes and nodes.tracers and nodes.tracers.steppers.check) or function() end -local registerstep=(nodes and nodes.tracers and nodes.tracers.steppers.register) or function() end -local registermessage=(nodes and nodes.tracers and nodes.tracers.steppers.message) or function() end -local function logprocess(...) - if trace_steps then - registermessage(...) - end - report_direct(...) -end -local function logwarning(...) - report_direct(...) -end -local f_unicode=formatters["%U"] -local f_uniname=formatters["%U (%s)"] -local f_unilist=formatters["% t (% t)"] -local function gref(n) - if type(n)=="number" then - local description=descriptions[n] - local name=description and description.name - if name then - return f_uniname(n,name) - else - return f_unicode(n) - end - elseif n then - local num,nam={},{} - for i=1,#n do - local ni=n[i] - if tonumber(ni) then - local di=descriptions[ni] - num[i]=f_unicode(ni) - nam[i]=di and di.name or "-" - end - end - return f_unilist(num,nam) - else - return "" - end -end -local function cref(kind,chainname,chainlookupname,lookupname,index) - if index then - return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookuptags[lookupname],index) - elseif lookupname then - return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookuptags[lookupname]) - elseif chainlookupname then - return formatters["feature %a, chain %a, sub %a"](kind,lookuptags[chainname],lookuptags[chainlookupname]) - elseif chainname then - return formatters["feature %a, chain %a"](kind,lookuptags[chainname]) - else - return formatters["feature %a"](kind) - end -end -local function pref(kind,lookupname) - return formatters["feature %a, lookup %a"](kind,lookuptags[lookupname]) -end -local function copy_glyph(g) - local components=getfield(g,"components") - if components then - setfield(g,"components",nil) - local n=copy_node(g) - setfield(g,"components",components) - return n - else - return copy_node(g) - end -end -local function markstoligature(kind,lookupname,head,start,stop,char) - if start==stop and getchar(start)==char then - return head,start - else - local prev=getprev(start) - local next=getnext(stop) - setfield(start,"prev",nil) - setfield(stop,"next",nil) - local base=copy_glyph(start) - if head==start then - head=base - end - resetinjection(base) - setfield(base,"char",char) - setfield(base,"subtype",ligature_code) - setfield(base,"components",start) - if prev then - setfield(prev,"next",base) - end - if next then - setfield(next,"prev",base) - end - setfield(base,"next",next) - setfield(base,"prev",prev) - return head,base - end -end -local function getcomponentindex(start) - if getid(start)~=glyph_code then - return 0 - elseif getsubtype(start)==ligature_code then - local i=0 - local components=getfield(start,"components") - while components do - i=i+getcomponentindex(components) - components=getnext(components) - end - return i - elseif not marks[getchar(start)] then - return 1 - else - return 0 - end -end -local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound) - if start==stop and getchar(start)==char then - resetinjection(start) - setfield(start,"char",char) - return head,start - end - local prev=getprev(start) - local next=getnext(stop) - setfield(start,"prev",nil) - setfield(stop,"next",nil) - local base=copy_glyph(start) - if start==head then - head=base - end - resetinjection(base) - setfield(base,"char",char) - setfield(base,"subtype",ligature_code) - setfield(base,"components",start) - if prev then - setfield(prev,"next",base) - end - if next then - setfield(next,"prev",base) - end - setfield(base,"next",next) - setfield(base,"prev",prev) - if not discfound then - local deletemarks=markflag~="mark" - local components=start - local baseindex=0 - local componentindex=0 - local head=base - local current=base - while start do - local char=getchar(start) - if not marks[char] then - baseindex=baseindex+componentindex - componentindex=getcomponentindex(start) - elseif not deletemarks then - setligaindex(start,baseindex+getligaindex(start,componentindex)) - if trace_marks then - logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),getligaindex(start)) - end - head,current=insert_node_after(head,current,copy_node(start)) - elseif trace_marks then - logwarning("%s: delete mark %s",pref(kind,lookupname),gref(char)) - end - start=getnext(start) - end - local start=getnext(current) - while start and getid(start)==glyph_code do - local char=getchar(start) - if marks[char] then - setligaindex(start,baseindex+getligaindex(start,componentindex)) - if trace_marks then - logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),getligaindex(start)) - end - else - break - end - start=getnext(start) - end - end - return head,base -end -function handlers.gsub_single(head,start,kind,lookupname,replacement) - if trace_singles then - logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(getchar(start)),gref(replacement)) - end - resetinjection(start) - setfield(start,"char",replacement) - return head,start,true -end -local function get_alternative_glyph(start,alternatives,value,trace_alternatives) - local n=#alternatives - if value=="random" then - local r=random(1,n) - return alternatives[r],trace_alternatives and formatters["value %a, taking %a"](value,r) - elseif value=="first" then - return alternatives[1],trace_alternatives and formatters["value %a, taking %a"](value,1) - elseif value=="last" then - return alternatives[n],trace_alternatives and formatters["value %a, taking %a"](value,n) - else - value=tonumber(value) - if type(value)~="number" then - return alternatives[1],trace_alternatives and formatters["invalid value %s, taking %a"](value,1) - elseif value>n then - local defaultalt=otf.defaultnodealternate - if defaultalt=="first" then - return alternatives[n],trace_alternatives and formatters["invalid value %s, taking %a"](value,1) - elseif defaultalt=="last" then - return alternatives[1],trace_alternatives and formatters["invalid value %s, taking %a"](value,n) - else - return false,trace_alternatives and formatters["invalid value %a, %s"](value,"out of range") - end - elseif value==0 then - return getchar(start),trace_alternatives and formatters["invalid value %a, %s"](value,"no change") - elseif value<1 then - return alternatives[1],trace_alternatives and formatters["invalid value %a, taking %a"](value,1) - else - return alternatives[value],trace_alternatives and formatters["value %a, taking %a"](value,value) - end - end -end -local function multiple_glyphs(head,start,multiple,ignoremarks) - local nofmultiples=#multiple - if nofmultiples>0 then - resetinjection(start) - setfield(start,"char",multiple[1]) - if nofmultiples>1 then - local sn=getnext(start) - for k=2,nofmultiples do - local n=copy_node(start) - resetinjection(n) - setfield(n,"char",multiple[k]) - setfield(n,"next",sn) - setfield(n,"prev",start) - if sn then - setfield(sn,"prev",n) - end - setfield(start,"next",n) - start=n - end - end - return head,start,true - else - if trace_multiples then - logprocess("no multiple for %s",gref(getchar(start))) - end - return head,start,false - end -end -function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence) - local value=featurevalue==true and tfmdata.shared.features[kind] or featurevalue - local choice,comment=get_alternative_glyph(start,alternative,value,trace_alternatives) - if choice then - if trace_alternatives then - logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(getchar(start)),choice,gref(choice),comment) - end - resetinjection(start) - setfield(start,"char",choice) - else - if trace_alternatives then - logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(getchar(start)),comment) - end - end - return head,start,true -end -function handlers.gsub_multiple(head,start,kind,lookupname,multiple,sequence) - if trace_multiples then - logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(getchar(start)),gref(multiple)) - end - return multiple_glyphs(head,start,multiple,sequence.flags[1]) -end -function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence) - local s,stop,discfound=getnext(start),nil,false - local startchar=getchar(start) - if marks[startchar] then - while s do - local id=getid(s) - if id==glyph_code and getfont(s)==currentfont and getsubtype(s)<256 then - local lg=ligature[getchar(s)] - if lg then - stop=s - ligature=lg - s=getnext(s) - else - break - end - else - break - end - end - if stop then - local lig=ligature.ligature - if lig then - if trace_ligatures then - local stopchar=getchar(stop) - head,start=markstoligature(kind,lookupname,head,start,stop,lig) - logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start))) - else - head,start=markstoligature(kind,lookupname,head,start,stop,lig) - end - return head,start,true - else - end - end - else - local skipmark=sequence.flags[1] - while s do - local id=getid(s) - if id==glyph_code and getsubtype(s)<256 then - if getfont(s)==currentfont then - local char=getchar(s) - if skipmark and marks[char] then - s=getnext(s) - else - local lg=ligature[char] - if lg then - stop=s - ligature=lg - s=getnext(s) - else - break - end - end - else - break - end - elseif id==disc_code then - discfound=true - s=getnext(s) - else - break - end - end - local lig=ligature.ligature - if lig then - if stop then - if trace_ligatures then - local stopchar=getchar(stop) - head,start=toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound) - logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start))) - else - head,start=toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound) - end - else - resetinjection(start) - setfield(start,"char",lig) - if trace_ligatures then - logprocess("%s: replacing %s by (no real) ligature %s case 3",pref(kind,lookupname),gref(startchar),gref(lig)) - end - end - return head,start,true - else - end - end - return head,start,false -end -function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence) - local markchar=getchar(start) - if marks[markchar] then - local base=getprev(start) - if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then - local basechar=getchar(base) - if marks[basechar] then - while true do - base=getprev(base) - if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then - basechar=getchar(base) - if not marks[basechar] then - break - end - else - if trace_bugs then - logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar)) - end - return head,start,false - end - end - end - local baseanchors=descriptions[basechar] - if baseanchors then - baseanchors=baseanchors.anchors - end - if baseanchors then - local baseanchors=baseanchors['basechar'] - if baseanchors then - local al=anchorlookups[lookupname] - for anchor,ba in next,baseanchors do - if al[anchor] then - local ma=markanchors[anchor] - if ma then - local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) - if trace_marks then - logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)", - pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) - end - return head,start,true - end - end - end - if trace_bugs then - logwarning("%s, no matching anchors for mark %s and base %s",pref(kind,lookupname),gref(markchar),gref(basechar)) - end - end - elseif trace_bugs then - onetimemessage(currentfont,basechar,"no base anchors",report_fonts) - end - elseif trace_bugs then - logwarning("%s: prev node is no char",pref(kind,lookupname)) - end - elseif trace_bugs then - logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar)) - end - return head,start,false -end -function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence) - local markchar=getchar(start) - if marks[markchar] then - local base=getprev(start) - if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then - local basechar=getchar(base) - if marks[basechar] then - while true do - base=getprev(base) - if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then - basechar=getchar(base) - if not marks[basechar] then - break - end - else - if trace_bugs then - logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar)) - end - return head,start,false - end - end - end - local index=getligaindex(start) - local baseanchors=descriptions[basechar] - if baseanchors then - baseanchors=baseanchors.anchors - if baseanchors then - local baseanchors=baseanchors['baselig'] - if baseanchors then - local al=anchorlookups[lookupname] - for anchor,ba in next,baseanchors do - if al[anchor] then - local ma=markanchors[anchor] - if ma then - ba=ba[index] - if ba then - local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) - if trace_marks then - logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)", - pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy) - end - return head,start,true - else - if trace_bugs then - logwarning("%s: no matching anchors for mark %s and baselig %s with index %a",pref(kind,lookupname),gref(markchar),gref(basechar),index) - end - end - end - end - end - if trace_bugs then - logwarning("%s: no matching anchors for mark %s and baselig %s",pref(kind,lookupname),gref(markchar),gref(basechar)) - end - end - end - elseif trace_bugs then - onetimemessage(currentfont,basechar,"no base anchors",report_fonts) - end - elseif trace_bugs then - logwarning("%s: prev node is no char",pref(kind,lookupname)) - end - elseif trace_bugs then - logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar)) - end - return head,start,false -end -function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence) - local markchar=getchar(start) - if marks[markchar] then - local base=getprev(start) - local slc=getligaindex(start) - if slc then - while base do - local blc=getligaindex(base) - if blc and blc~=slc then - base=getprev(base) - else - break - end - end - end - if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then - local basechar=getchar(base) - local baseanchors=descriptions[basechar] - if baseanchors then - baseanchors=baseanchors.anchors - if baseanchors then - baseanchors=baseanchors['basemark'] - if baseanchors then - local al=anchorlookups[lookupname] - for anchor,ba in next,baseanchors do - if al[anchor] then - local ma=markanchors[anchor] - if ma then - local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) - if trace_marks then - logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)", - pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) - end - return head,start,true - end - end - end - if trace_bugs then - logwarning("%s: no matching anchors for mark %s and basemark %s",pref(kind,lookupname),gref(markchar),gref(basechar)) - end - end - end - elseif trace_bugs then - onetimemessage(currentfont,basechar,"no base anchors",report_fonts) - end - elseif trace_bugs then - logwarning("%s: prev node is no mark",pref(kind,lookupname)) - end - elseif trace_bugs then - logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar)) - end - return head,start,false -end -function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence) - local alreadydone=cursonce and getprop(start,a_cursbase) - if not alreadydone then - local done=false - local startchar=getchar(start) - if marks[startchar] then - if trace_cursive then - logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar)) - end - else - local nxt=getnext(start) - while not done and nxt and getid(nxt)==glyph_code and getfont(nxt)==currentfont and getsubtype(nxt)<256 do - local nextchar=getchar(nxt) - if marks[nextchar] then - nxt=getnext(nxt) - else - local entryanchors=descriptions[nextchar] - if entryanchors then - entryanchors=entryanchors.anchors - if entryanchors then - entryanchors=entryanchors['centry'] - if entryanchors then - local al=anchorlookups[lookupname] - for anchor,entry in next,entryanchors do - if al[anchor] then - local exit=exitanchors[anchor] - if exit then - local dx,dy,bound=setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar]) - if trace_cursive then - logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode) - end - done=true - break - end - end - end - end - end - elseif trace_bugs then - onetimemessage(currentfont,startchar,"no entry anchors",report_fonts) - end - break - end - end - end - return head,start,done - else - if trace_cursive and trace_details then - logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone) - end - return head,start,false - end -end -function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence) - local startchar=getchar(start) - local dx,dy,w,h=setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar]) - if trace_kerns then - logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h) - end - return head,start,false -end -function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence) - local snext=getnext(start) - if not snext then - return head,start,false - else - local prev,done=start,false - local factor=tfmdata.parameters.factor - local lookuptype=lookuptypes[lookupname] - while snext and getid(snext)==glyph_code and getfont(snext)==currentfont and getsubtype(snext)<256 do - local nextchar=getchar(snext) - local krn=kerns[nextchar] - if not krn and marks[nextchar] then - prev=snext - snext=getnext(snext) - else - if not krn then - elseif type(krn)=="table" then - if lookuptype=="pair" then - local a,b=krn[2],krn[3] - if a and #a>0 then - local startchar=getchar(start) - local x,y,w,h=setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar]) - if trace_kerns then - logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h) - end - end - if b and #b>0 then - local startchar=getchar(start) - local x,y,w,h=setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar]) - if trace_kerns then - logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h) - end - end - else - report_process("%s: check this out (old kern stuff)",pref(kind,lookupname)) - end - done=true - elseif krn~=0 then - local k=setkern(snext,factor,rlmode,krn) - if trace_kerns then - logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(getchar(prev)),gref(nextchar)) - end - done=true - end - break - end - end - return head,start,done - end -end -local chainmores={} -local chainprocs={} -local function logprocess(...) - if trace_steps then - registermessage(...) - end - report_subchain(...) -end -local logwarning=report_subchain -local function logprocess(...) - if trace_steps then - registermessage(...) - end - report_chain(...) -end -local logwarning=report_chain -function chainprocs.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname) - logwarning("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname)) - return head,start,false -end -function chainmores.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname,n) - logprocess("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname)) - return head,start,false -end -function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements) - local char=getchar(start) - local replacement=replacements[char] - if replacement then - if trace_singles then - logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement)) - end - resetinjection(start) - setfield(start,"char",replacement) - return head,start,true - else - return head,start,false - end -end -function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex) - local current=start - local subtables=currentlookup.subtables - if #subtables>1 then - logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," ")) - end - while current do - if getid(current)==glyph_code then - local currentchar=getchar(current) - local lookupname=subtables[1] - local replacement=lookuphash[lookupname] - if not replacement then - if trace_bugs then - logwarning("%s: no single hits",cref(kind,chainname,chainlookupname,lookupname,chainindex)) - end - else - replacement=replacement[currentchar] - if not replacement or replacement=="" then - if trace_bugs then - logwarning("%s: no single for %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar)) - end - else - if trace_singles then - logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement)) - end - resetinjection(current) - setfield(current,"char",replacement) - end - end - return head,start,true - elseif current==stop then - break - else - current=getnext(current) - end - end - return head,start,false -end -chainmores.gsub_single=chainprocs.gsub_single -function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - local startchar=getchar(start) - local subtables=currentlookup.subtables - local lookupname=subtables[1] - local replacements=lookuphash[lookupname] - if not replacements then - if trace_bugs then - logwarning("%s: no multiple hits",cref(kind,chainname,chainlookupname,lookupname)) - end - else - replacements=replacements[startchar] - if not replacements or replacement=="" then - if trace_bugs then - logwarning("%s: no multiple for %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar)) - end - else - if trace_multiples then - logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements)) - end - return multiple_glyphs(head,start,replacements,currentlookup.flags[1]) - end - end - return head,start,false -end -chainmores.gsub_multiple=chainprocs.gsub_multiple -function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - local current=start - local subtables=currentlookup.subtables - local value=featurevalue==true and tfmdata.shared.features[kind] or featurevalue - while current do - if getid(current)==glyph_code then - local currentchar=getchar(current) - local lookupname=subtables[1] - local alternatives=lookuphash[lookupname] - if not alternatives then - if trace_bugs then - logwarning("%s: no alternative hit",cref(kind,chainname,chainlookupname,lookupname)) - end - else - alternatives=alternatives[currentchar] - if alternatives then - local choice,comment=get_alternative_glyph(current,alternatives,value,trace_alternatives) - if choice then - if trace_alternatives then - logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment) - end - resetinjection(start) - setfield(start,"char",choice) - else - if trace_alternatives then - logwarning("%s: no variant %a for %s, %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(char),comment) - end - end - elseif trace_bugs then - logwarning("%s: no alternative for %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(currentchar),comment) - end - end - return head,start,true - elseif current==stop then - break - else - current=getnext(current) - end - end - return head,start,false -end -chainmores.gsub_alternate=chainprocs.gsub_alternate -function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex) - local startchar=getchar(start) - local subtables=currentlookup.subtables - local lookupname=subtables[1] - local ligatures=lookuphash[lookupname] - if not ligatures then - if trace_bugs then - logwarning("%s: no ligature hits",cref(kind,chainname,chainlookupname,lookupname,chainindex)) - end - else - ligatures=ligatures[startchar] - if not ligatures then - if trace_bugs then - logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar)) - end - else - local s=getnext(start) - local discfound=false - local last=stop - local nofreplacements=0 - local skipmark=currentlookup.flags[1] - while s do - local id=getid(s) - if id==disc_code then - s=getnext(s) - discfound=true - else - local schar=getchar(s) - if skipmark and marks[schar] then - s=getnext(s) - else - local lg=ligatures[schar] - if lg then - ligatures,last,nofreplacements=lg,s,nofreplacements+1 - if s==stop then - break - else - s=getnext(s) - end - else - break - end - end - end - end - local l2=ligatures.ligature - if l2 then - if chainindex then - stop=last - end - if trace_ligatures then - if start==stop then - logprocess("%s: replacing character %s by ligature %s case 3",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2)) - else - logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop)),gref(l2)) - end - end - head,start=toligature(kind,lookupname,head,start,stop,l2,currentlookup.flags[1],discfound) - return head,start,true,nofreplacements - elseif trace_bugs then - if start==stop then - logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar)) - else - logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop))) - end - end - end - end - return head,start,false,0 -end -chainmores.gsub_ligature=chainprocs.gsub_ligature -function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - local markchar=getchar(start) - if marks[markchar] then - local subtables=currentlookup.subtables - local lookupname=subtables[1] - local markanchors=lookuphash[lookupname] - if markanchors then - markanchors=markanchors[markchar] - end - if markanchors then - local base=getprev(start) - if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then - local basechar=getchar(base) - if marks[basechar] then - while true do - base=getprev(base) - if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then - basechar=getchar(base) - if not marks[basechar] then - break - end - else - if trace_bugs then - logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar)) - end - return head,start,false - end - end - end - local baseanchors=descriptions[basechar].anchors - if baseanchors then - local baseanchors=baseanchors['basechar'] - if baseanchors then - local al=anchorlookups[lookupname] - for anchor,ba in next,baseanchors do - if al[anchor] then - local ma=markanchors[anchor] - if ma then - local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) - if trace_marks then - logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)", - cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) - end - return head,start,true - end - end - end - if trace_bugs then - logwarning("%s, no matching anchors for mark %s and base %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar)) - end - end - end - elseif trace_bugs then - logwarning("%s: prev node is no char",cref(kind,chainname,chainlookupname,lookupname)) - end - elseif trace_bugs then - logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar)) - end - elseif trace_bugs then - logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar)) - end - return head,start,false -end -function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - local markchar=getchar(start) - if marks[markchar] then - local subtables=currentlookup.subtables - local lookupname=subtables[1] - local markanchors=lookuphash[lookupname] - if markanchors then - markanchors=markanchors[markchar] - end - if markanchors then - local base=getprev(start) - if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then - local basechar=getchar(base) - if marks[basechar] then - while true do - base=getprev(base) - if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then - basechar=getchar(base) - if not marks[basechar] then - break - end - else - if trace_bugs then - logwarning("%s: no base for mark %s",cref(kind,chainname,chainlookupname,lookupname),markchar) - end - return head,start,false - end - end - end - local index=getligaindex(start) - local baseanchors=descriptions[basechar].anchors - if baseanchors then - local baseanchors=baseanchors['baselig'] - if baseanchors then - local al=anchorlookups[lookupname] - for anchor,ba in next,baseanchors do - if al[anchor] then - local ma=markanchors[anchor] - if ma then - ba=ba[index] - if ba then - local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) - if trace_marks then - logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)", - cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy) - end - return head,start,true - end - end - end - end - if trace_bugs then - logwarning("%s: no matching anchors for mark %s and baselig %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar)) - end - end - end - elseif trace_bugs then - logwarning("feature %s, lookup %s: prev node is no char",kind,lookupname) - end - elseif trace_bugs then - logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar)) - end - elseif trace_bugs then - logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar)) - end - return head,start,false -end -function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - local markchar=getchar(start) - if marks[markchar] then - local subtables=currentlookup.subtables - local lookupname=subtables[1] - local markanchors=lookuphash[lookupname] - if markanchors then - markanchors=markanchors[markchar] - end - if markanchors then - local base=getprev(start) - local slc=getligaindex(start) - if slc then - while base do - local blc=getligaindex(base) - if blc and blc~=slc then - base=getprev(base) - else - break - end - end - end - if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then - local basechar=getchar(base) - local baseanchors=descriptions[basechar].anchors - if baseanchors then - baseanchors=baseanchors['basemark'] - if baseanchors then - local al=anchorlookups[lookupname] - for anchor,ba in next,baseanchors do - if al[anchor] then - local ma=markanchors[anchor] - if ma then - local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) - if trace_marks then - logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)", - cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) - end - return head,start,true - end - end - end - if trace_bugs then - logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar)) - end - end - end - elseif trace_bugs then - logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname)) - end - elseif trace_bugs then - logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar)) - end - elseif trace_bugs then - logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar)) - end - return head,start,false -end -function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - local alreadydone=cursonce and getprop(start,a_cursbase) - if not alreadydone then - local startchar=getchar(start) - local subtables=currentlookup.subtables - local lookupname=subtables[1] - local exitanchors=lookuphash[lookupname] - if exitanchors then - exitanchors=exitanchors[startchar] - end - if exitanchors then - local done=false - if marks[startchar] then - if trace_cursive then - logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar)) - end - else - local nxt=getnext(start) - while not done and nxt and getid(nxt)==glyph_code and getfont(nxt)==currentfont and getsubtype(nxt)<256 do - local nextchar=getchar(nxt) - if marks[nextchar] then - nxt=getnext(nxt) - else - local entryanchors=descriptions[nextchar] - if entryanchors then - entryanchors=entryanchors.anchors - if entryanchors then - entryanchors=entryanchors['centry'] - if entryanchors then - local al=anchorlookups[lookupname] - for anchor,entry in next,entryanchors do - if al[anchor] then - local exit=exitanchors[anchor] - if exit then - local dx,dy,bound=setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar]) - if trace_cursive then - logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode) - end - done=true - break - end - end - end - end - end - elseif trace_bugs then - onetimemessage(currentfont,startchar,"no entry anchors",report_fonts) - end - break - end - end - end - return head,start,done - else - if trace_cursive and trace_details then - logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone) - end - return head,start,false - end - end - return head,start,false -end -function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence) - local startchar=getchar(start) - local subtables=currentlookup.subtables - local lookupname=subtables[1] - local kerns=lookuphash[lookupname] - if kerns then - kerns=kerns[startchar] - if kerns then - local dx,dy,w,h=setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar]) - if trace_kerns then - logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h) - end - end - end - return head,start,false -end -chainmores.gpos_single=chainprocs.gpos_single -function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence) - local snext=getnext(start) - if snext then - local startchar=getchar(start) - local subtables=currentlookup.subtables - local lookupname=subtables[1] - local kerns=lookuphash[lookupname] - if kerns then - kerns=kerns[startchar] - if kerns then - local lookuptype=lookuptypes[lookupname] - local prev,done=start,false - local factor=tfmdata.parameters.factor - while snext and getid(snext)==glyph_code and getfont(snext)==currentfont and getsubtype(snext)<256 do - local nextchar=getchar(snext) - local krn=kerns[nextchar] - if not krn and marks[nextchar] then - prev=snext - snext=getnext(snext) - else - if not krn then - elseif type(krn)=="table" then - if lookuptype=="pair" then - local a,b=krn[2],krn[3] - if a and #a>0 then - local startchar=getchar(start) - local x,y,w,h=setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar]) - if trace_kerns then - logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h) - end - end - if b and #b>0 then - local startchar=getchar(start) - local x,y,w,h=setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar]) - if trace_kerns then - logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h) - end - end - else - report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname)) - local a,b=krn[2],krn[6] - if a and a~=0 then - local k=setkern(snext,factor,rlmode,a) - if trace_kerns then - logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar)) - end - end - if b and b~=0 then - logwarning("%s: ignoring second kern xoff %s",cref(kind,chainname,chainlookupname),b*factor) - end - end - done=true - elseif krn~=0 then - local k=setkern(snext,factor,rlmode,krn) - if trace_kerns then - logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar)) - end - done=true - end - break - end - end - return head,start,done - end - end - end - return head,start,false -end -chainmores.gpos_pair=chainprocs.gpos_pair -local function show_skip(kind,chainname,char,ck,class) - if ck[9] then - logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a, %a => %a",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10]) - else - logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a",cref(kind,chainname),gref(char),class,ck[1],ck[2]) - end -end -local quit_on_no_replacement=true -directives.register("otf.chain.quitonnoreplacement",function(value) - quit_on_no_replacement=value -end) -local function normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash) - local flags=sequence.flags - local done=false - local skipmark=flags[1] - local skipligature=flags[2] - local skipbase=flags[3] - local someskip=skipmark or skipligature or skipbase - local markclass=sequence.markclass - local skipped=false - for k=1,#contexts do - local match=true - local current=start - local last=start - local ck=contexts[k] - local seq=ck[3] - local s=#seq - if s==1 then - match=getid(current)==glyph_code and getfont(current)==currentfont and getsubtype(current)<256 and seq[1][getchar(current)] - else - local f,l=ck[4],ck[5] - if f==1 and f==l then - else - if f==l then - else - local n=f+1 - last=getnext(last) - while n<=l do - if last then - local id=getid(last) - if id==glyph_code then - if getfont(last)==currentfont and getsubtype(last)<256 then - local char=getchar(last) - local ccd=descriptions[char] - if ccd then - local class=ccd.class - if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then - skipped=true - if trace_skips then - show_skip(kind,chainname,char,ck,class) - end - last=getnext(last) - elseif seq[n][char] then - if n1 then - local prev=getprev(start) - if prev then - local n=f-1 - while n>=1 do - if prev then - local id=getid(prev) - if id==glyph_code then - if getfont(prev)==currentfont and getsubtype(prev)<256 then - local char=getchar(prev) - local ccd=descriptions[char] - if ccd then - local class=ccd.class - if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then - skipped=true - if trace_skips then - show_skip(kind,chainname,char,ck,class) - end - elseif seq[n][char] then - n=n -1 - else - match=false - break - end - else - match=false - break - end - else - match=false - break - end - elseif id==disc_code then - elseif seq[n][32] then - n=n -1 - else - match=false - break - end - prev=getprev(prev) - elseif seq[n][32] then - n=n -1 - else - match=false - break - end - end - elseif f==2 then - match=seq[1][32] - else - for n=f-1,1 do - if not seq[n][32] then - match=false - break - end - end - end - end - if match and s>l then - local current=last and getnext(last) - if current then - local n=l+1 - while n<=s do - if current then - local id=getid(current) - if id==glyph_code then - if getfont(current)==currentfont and getsubtype(current)<256 then - local char=getchar(current) - local ccd=descriptions[char] - if ccd then - local class=ccd.class - if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then - skipped=true - if trace_skips then - show_skip(kind,chainname,char,ck,class) - end - elseif seq[n][char] then - n=n+1 - else - match=false - break - end - else - match=false - break - end - else - match=false - break - end - elseif id==disc_code then - elseif seq[n][32] then - n=n+1 - else - match=false - break - end - current=getnext(current) - elseif seq[n][32] then - n=n+1 - else - match=false - break - end - end - elseif s-l==1 then - match=seq[s][32] - else - for n=l+1,s do - if not seq[n][32] then - match=false - break - end - end - end - end - end - if match then - if trace_contexts then - local rule,lookuptype,f,l=ck[1],ck[2],ck[4],ck[5] - local char=getchar(start) - if ck[9] then - logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a, %a => %a", - cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10]) - else - logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a", - cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype) - end - end - local chainlookups=ck[6] - if chainlookups then - local nofchainlookups=#chainlookups - if nofchainlookups==1 then - local chainlookupname=chainlookups[1] - local chainlookup=lookuptable[chainlookupname] - if chainlookup then - local cp=chainprocs[chainlookup.type] - if cp then - local ok - head,start,ok=cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence) - if ok then - done=true - end - else - logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type) - end - else - logprocess("%s is not yet supported",cref(kind,chainname,chainlookupname)) - end - else - local i=1 - while true do - if skipped then - while true do - local char=getchar(start) - local ccd=descriptions[char] - if ccd then - local class=ccd.class - if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then - start=getnext(start) - else - break - end - else - break - end - end - end - local chainlookupname=chainlookups[i] - local chainlookup=lookuptable[chainlookupname] - if not chainlookup then - i=i+1 - else - local cp=chainmores[chainlookup.type] - if not cp then - logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type) - i=i+1 - else - local ok,n - head,start,ok,n=cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence) - if ok then - done=true - i=i+(n or 1) - else - i=i+1 - end - end - end - if i>nofchainlookups then - break - elseif start then - start=getnext(start) - else - end - end - end - else - local replacements=ck[7] - if replacements then - head,start,done=chainprocs.reversesub(head,start,last,kind,chainname,ck,lookuphash,replacements) - else - done=quit_on_no_replacement - if trace_contexts then - logprocess("%s: skipping match",cref(kind,chainname)) - end - end - end - end - end - return head,start,done -end -local verbose_handle_contextchain=function(font,...) - logwarning("no verbose handler installed, reverting to 'normal'") - otf.setcontextchain() - return normal_handle_contextchain(...) -end -otf.chainhandlers={ - normal=normal_handle_contextchain, - verbose=verbose_handle_contextchain, -} -function otf.setcontextchain(method) - if not method or method=="normal" or not otf.chainhandlers[method] then - if handlers.contextchain then - logwarning("installing normal contextchain handler") - end - handlers.contextchain=normal_handle_contextchain - else - logwarning("installing contextchain handler %a",method) - local handler=otf.chainhandlers[method] - handlers.contextchain=function(...) - return handler(currentfont,...) - end - end - handlers.gsub_context=handlers.contextchain - handlers.gsub_contextchain=handlers.contextchain - handlers.gsub_reversecontextchain=handlers.contextchain - handlers.gpos_contextchain=handlers.contextchain - handlers.gpos_context=handlers.contextchain -end -otf.setcontextchain() -local missing={} -local function logprocess(...) - if trace_steps then - registermessage(...) - end - report_process(...) -end -local logwarning=report_process -local function report_missing_cache(typ,lookup) - local f=missing[currentfont] if not f then f={} missing[currentfont]=f end - local t=f[typ] if not t then t={} f[typ]=t end - if not t[lookup] then - t[lookup]=true - logwarning("missing cache for lookup %a, type %a, font %a, name %a",lookup,typ,currentfont,tfmdata.properties.fullname) - end -end -local resolved={} -local lookuphashes={} -setmetatableindex(lookuphashes,function(t,font) - local lookuphash=fontdata[font].resources.lookuphash - if not lookuphash or not next(lookuphash) then - lookuphash=false - end - t[font]=lookuphash - return lookuphash -end) -local autofeatures=fonts.analyzers.features -local function initialize(sequence,script,language,enabled) - local features=sequence.features - if features then - local order=sequence.order - if order then - for i=1,#order do - local kind=order[i] - local valid=enabled[kind] - if valid then - local scripts=features[kind] - local languages=scripts[script] or scripts[wildcard] - if languages and (languages[language] or languages[wildcard]) then - return { valid,autofeatures[kind] or false,sequence.chain or 0,kind,sequence } - end - end - end - else - end - end - return false -end -function otf.dataset(tfmdata,font) - local shared=tfmdata.shared - local properties=tfmdata.properties - local language=properties.language or "dflt" - local script=properties.script or "dflt" - local enabled=shared.features - local res=resolved[font] - if not res then - res={} - resolved[font]=res - end - local rs=res[script] - if not rs then - rs={} - res[script]=rs - end - local rl=rs[language] - if not rl then - rl={ - } - rs[language]=rl - local sequences=tfmdata.resources.sequences - for s=1,#sequences do - local v=enabled and initialize(sequences[s],script,language,enabled) - if v then - rl[#rl+1]=v - end - end - end - return rl -end -local function featuresprocessor(head,font,attr) - local lookuphash=lookuphashes[font] - if not lookuphash then - return head,false - end - head=tonut(head) - if trace_steps then - checkstep(head) - end - tfmdata=fontdata[font] - descriptions=tfmdata.descriptions - characters=tfmdata.characters - resources=tfmdata.resources - marks=resources.marks - anchorlookups=resources.lookup_to_anchor - lookuptable=resources.lookups - lookuptypes=resources.lookuptypes - lookuptags=resources.lookuptags - currentfont=font - rlmode=0 - local sequences=resources.sequences - local done=false - local datasets=otf.dataset(tfmdata,font,attr) - local dirstack={} - for s=1,#datasets do - local dataset=datasets[s] - featurevalue=dataset[1] - local sequence=dataset[5] - local rlparmode=0 - local topstack=0 - local success=false - local attribute=dataset[2] - local chain=dataset[3] - local typ=sequence.type - local subtables=sequence.subtables - if chain<0 then - local handler=handlers[typ] - local start=find_node_tail(head) - while start do - local id=getid(start) - if id==glyph_code then - if getfont(start)==font and getsubtype(start)<256 then - local a=getattr(start,0) - if a then - a=a==attr - else - a=true - end - if a then - for i=1,#subtables do - local lookupname=subtables[i] - local lookupcache=lookuphash[lookupname] - if lookupcache then - local lookupmatch=lookupcache[getchar(start)] - if lookupmatch then - head,start,success=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) - if success then - break - end - end - else - report_missing_cache(typ,lookupname) - end - end - if start then start=getprev(start) end - else - start=getprev(start) - end - else - start=getprev(start) - end - else - start=getprev(start) - end - end - else - local handler=handlers[typ] - local ns=#subtables - local start=head - rlmode=0 - if ns==1 then - local lookupname=subtables[1] - local lookupcache=lookuphash[lookupname] - if not lookupcache then - report_missing_cache(typ,lookupname) - else - local function subrun(start) - local head=start - local done=false - while start do - local id=getid(start) - if id==glyph_code and getfont(start)==font and getsubtype(start)<256 then - local a=getattr(start,0) - if a then - a=(a==attr) and (not attribute or getprop(start,a_state)==attribute) - else - a=not attribute or getprop(start,a_state)==attribute - end - if a then - local lookupmatch=lookupcache[getchar(start)] - if lookupmatch then - local ok - head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) - if ok then - done=true - end - end - if start then start=getnext(start) end - else - start=getnext(start) - end - else - start=getnext(start) - end - end - if done then - success=true - return head - end - end - local function kerndisc(disc) - local prev=getprev(disc) - local next=getnext(disc) - if prev and next then - setfield(prev,"next",next) - local a=getattr(prev,0) - if a then - a=(a==attr) and (not attribute or getprop(prev,a_state)==attribute) - else - a=not attribute or getprop(prev,a_state)==attribute - end - if a then - local lookupmatch=lookupcache[getchar(prev)] - if lookupmatch then - local h,d,ok=handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) - if ok then - done=true - success=true - end - end - end - setfield(prev,"next",disc) - end - return next - end - while start do - local id=getid(start) - if id==glyph_code then - if getfont(start)==font and getsubtype(start)<256 then - local a=getattr(start,0) - if a then - a=(a==attr) and (not attribute or getprop(start,a_state)==attribute) - else - a=not attribute or getprop(start,a_state)==attribute - end - if a then - local lookupmatch=lookupcache[getchar(start)] - if lookupmatch then - local ok - head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) - if ok then - success=true - end - end - if start then start=getnext(start) end - else - start=getnext(start) - end - else - start=getnext(start) - end - elseif id==disc_code then - if getsubtype(start)==discretionary_code then - local pre=getfield(start,"pre") - if pre then - local new=subrun(pre) - if new then setfield(start,"pre",new) end - end - local post=getfield(start,"post") - if post then - local new=subrun(post) - if new then setfield(start,"post",new) end - end - local replace=getfield(start,"replace") - if replace then - local new=subrun(replace) - if new then setfield(start,"replace",new) end - end -elseif typ=="gpos_single" or typ=="gpos_pair" then - kerndisc(start) - end - start=getnext(start) - elseif id==whatsit_code then - local subtype=getsubtype(start) - if subtype==dir_code then - local dir=getfield(start,"dir") - if dir=="+TRT" or dir=="+TLT" then - topstack=topstack+1 - dirstack[topstack]=dir - elseif dir=="-TRT" or dir=="-TLT" then - topstack=topstack-1 - end - local newdir=dirstack[topstack] - if newdir=="+TRT" then - rlmode=-1 - elseif newdir=="+TLT" then - rlmode=1 - else - rlmode=rlparmode - end - if trace_directions then - report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir) - end - elseif subtype==localpar_code then - local dir=getfield(start,"dir") - if dir=="TRT" then - rlparmode=-1 - elseif dir=="TLT" then - rlparmode=1 - else - rlparmode=0 - end - rlmode=rlparmode - if trace_directions then - report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode) - end - end - start=getnext(start) - elseif id==math_code then - start=getnext(end_of_math(start)) - else - start=getnext(start) - end - end - end - else - local function subrun(start) - local head=start - local done=false - while start do - local id=getid(start) - if id==glyph_code and getfont(start)==font and getsubtype(start)<256 then - local a=getattr(start,0) - if a then - a=(a==attr) and (not attribute or getprop(start,a_state)==attribute) - else - a=not attribute or getprop(start,a_state)==attribute - end - if a then - for i=1,ns do - local lookupname=subtables[i] - local lookupcache=lookuphash[lookupname] - if lookupcache then - local lookupmatch=lookupcache[getchar(start)] - if lookupmatch then - local ok - head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) - if ok then - done=true - break - elseif not start then - break - end - end - else - report_missing_cache(typ,lookupname) - end - end - if start then start=getnext(start) end - else - start=getnext(start) - end - else - start=getnext(start) - end - end - if done then - success=true - return head - end - end - local function kerndisc(disc) - local prev=getprev(disc) - local next=getnext(disc) - if prev and next then - setfield(prev,"next",next) - local a=getattr(prev,0) - if a then - a=(a==attr) and (not attribute or getprop(prev,a_state)==attribute) - else - a=not attribute or getprop(prev,a_state)==attribute - end - if a then - for i=1,ns do - local lookupname=subtables[i] - local lookupcache=lookuphash[lookupname] - if lookupcache then - local lookupmatch=lookupcache[getchar(prev)] - if lookupmatch then - local h,d,ok=handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) - if ok then - done=true - break - end - end - else - report_missing_cache(typ,lookupname) - end - end - end - setfield(prev,"next",disc) - end - return next - end - while start do - local id=getid(start) - if id==glyph_code then - if getfont(start)==font and getsubtype(start)<256 then - local a=getattr(start,0) - if a then - a=(a==attr) and (not attribute or getprop(start,a_state)==attribute) - else - a=not attribute or getprop(start,a_state)==attribute - end - if a then - for i=1,ns do - local lookupname=subtables[i] - local lookupcache=lookuphash[lookupname] - if lookupcache then - local lookupmatch=lookupcache[getchar(start)] - if lookupmatch then - local ok - head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) - if ok then - success=true - break - elseif not start then - break - end - end - else - report_missing_cache(typ,lookupname) - end - end - if start then start=getnext(start) end - else - start=getnext(start) - end - else - start=getnext(start) - end - elseif id==disc_code then - if getsubtype(start)==discretionary_code then - local pre=getfield(start,"pre") - if pre then - local new=subrun(pre) - if new then setfield(start,"pre",new) end - end - local post=getfield(start,"post") - if post then - local new=subrun(post) - if new then setfield(start,"post",new) end - end - local replace=getfield(start,"replace") - if replace then - local new=subrun(replace) - if new then setfield(start,"replace",new) end - end -elseif typ=="gpos_single" or typ=="gpos_pair" then - kerndisc(start) - end - start=getnext(start) - elseif id==whatsit_code then - local subtype=getsubtype(start) - if subtype==dir_code then - local dir=getfield(start,"dir") - if dir=="+TRT" or dir=="+TLT" then - topstack=topstack+1 - dirstack[topstack]=dir - elseif dir=="-TRT" or dir=="-TLT" then - topstack=topstack-1 - end - local newdir=dirstack[topstack] - if newdir=="+TRT" then - rlmode=-1 - elseif newdir=="+TLT" then - rlmode=1 - else - rlmode=rlparmode - end - if trace_directions then - report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir) - end - elseif subtype==localpar_code then - local dir=getfield(start,"dir") - if dir=="TRT" then - rlparmode=-1 - elseif dir=="TLT" then - rlparmode=1 - else - rlparmode=0 - end - rlmode=rlparmode - if trace_directions then - report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode) - end - end - start=getnext(start) - elseif id==math_code then - start=getnext(end_of_math(start)) - else - start=getnext(start) - end - end - end - end - if success then - done=true - end - if trace_steps then - registerstep(head) - end - end - head=tonode(head) - return head,done -end -local function generic(lookupdata,lookupname,unicode,lookuphash) - local target=lookuphash[lookupname] - if target then - target[unicode]=lookupdata - else - lookuphash[lookupname]={ [unicode]=lookupdata } - end -end -local action={ - substitution=generic, - multiple=generic, - alternate=generic, - position=generic, - ligature=function(lookupdata,lookupname,unicode,lookuphash) - local target=lookuphash[lookupname] - if not target then - target={} - lookuphash[lookupname]=target - end - for i=1,#lookupdata do - local li=lookupdata[i] - local tu=target[li] - if not tu then - tu={} - target[li]=tu - end - target=tu - end - target.ligature=unicode - end, - pair=function(lookupdata,lookupname,unicode,lookuphash) - local target=lookuphash[lookupname] - if not target then - target={} - lookuphash[lookupname]=target - end - local others=target[unicode] - local paired=lookupdata[1] - if others then - others[paired]=lookupdata - else - others={ [paired]=lookupdata } - target[unicode]=others - end - end, -} -local function prepare_lookups(tfmdata) - local rawdata=tfmdata.shared.rawdata - local resources=rawdata.resources - local lookuphash=resources.lookuphash - local anchor_to_lookup=resources.anchor_to_lookup - local lookup_to_anchor=resources.lookup_to_anchor - local lookuptypes=resources.lookuptypes - local characters=tfmdata.characters - local descriptions=tfmdata.descriptions - for unicode,character in next,characters do - local description=descriptions[unicode] - if description then - local lookups=description.slookups - if lookups then - for lookupname,lookupdata in next,lookups do - action[lookuptypes[lookupname]](lookupdata,lookupname,unicode,lookuphash) - end - end - local lookups=description.mlookups - if lookups then - for lookupname,lookuplist in next,lookups do - local lookuptype=lookuptypes[lookupname] - for l=1,#lookuplist do - local lookupdata=lookuplist[l] - action[lookuptype](lookupdata,lookupname,unicode,lookuphash) - end - end - end - local list=description.kerns - if list then - for lookup,krn in next,list do - local target=lookuphash[lookup] - if target then - target[unicode]=krn - else - lookuphash[lookup]={ [unicode]=krn } - end - end - end - local list=description.anchors - if list then - for typ,anchors in next,list do - if typ=="mark" or typ=="cexit" then - for name,anchor in next,anchors do - local lookups=anchor_to_lookup[name] - if lookups then - for lookup,_ in next,lookups do - local target=lookuphash[lookup] - if target then - target[unicode]=anchors - else - lookuphash[lookup]={ [unicode]=anchors } - end - end - end - end - end - end - end - end - end -end -local function split(replacement,original) - local result={} - for i=1,#replacement do - result[original[i]]=replacement[i] - end - return result -end -local valid={ - coverage={ chainsub=true,chainpos=true,contextsub=true }, - reversecoverage={ reversesub=true }, - glyphs={ chainsub=true,chainpos=true }, -} -local function prepare_contextchains(tfmdata) - local rawdata=tfmdata.shared.rawdata - local resources=rawdata.resources - local lookuphash=resources.lookuphash - local lookuptags=resources.lookuptags - local lookups=rawdata.lookups - if lookups then - for lookupname,lookupdata in next,rawdata.lookups do - local lookuptype=lookupdata.type - if lookuptype then - local rules=lookupdata.rules - if rules then - local format=lookupdata.format - local validformat=valid[format] - if not validformat then - report_prepare("unsupported format %a",format) - elseif not validformat[lookuptype] then - report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookuptags[lookupname]) - else - local contexts=lookuphash[lookupname] - if not contexts then - contexts={} - lookuphash[lookupname]=contexts - end - local t,nt={},0 - for nofrules=1,#rules do - local rule=rules[nofrules] - local current=rule.current - local before=rule.before - local after=rule.after - local replacements=rule.replacements - local sequence={} - local nofsequences=0 - if before then - for n=1,#before do - nofsequences=nofsequences+1 - sequence[nofsequences]=before[n] - end - end - local start=nofsequences+1 - for n=1,#current do - nofsequences=nofsequences+1 - sequence[nofsequences]=current[n] - end - local stop=nofsequences - if after then - for n=1,#after do - nofsequences=nofsequences+1 - sequence[nofsequences]=after[n] - end - end - if sequence[1] then - nt=nt+1 - t[nt]={ nofrules,lookuptype,sequence,start,stop,rule.lookups,replacements } - for unic,_ in next,sequence[start] do - local cu=contexts[unic] - if not cu then - contexts[unic]=t - end - end - end - end - end - else - end - else - report_prepare("missing lookuptype for lookupname %a",lookuptags[lookupname]) - end - end - end -end -local function featuresinitializer(tfmdata,value) - if true then - local rawdata=tfmdata.shared.rawdata - local properties=rawdata.properties - if not properties.initialized then - local starttime=trace_preparing and os.clock() - local resources=rawdata.resources - resources.lookuphash=resources.lookuphash or {} - prepare_contextchains(tfmdata) - prepare_lookups(tfmdata) - properties.initialized=true - if trace_preparing then - report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,tfmdata.properties.fullname) - end - end - end -end -registerotffeature { - name="features", - description="features", - default=true, - initializers={ - position=1, - node=featuresinitializer, - }, - processors={ - node=featuresprocessor, - } -} -otf.handlers=handlers - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-otp']={ - version=1.001, - comment="companion to font-otf.lua (packing)", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local next,type=next,type -local sort,concat=table.sort,table.concat -local sortedhash=table.sortedhash -local trace_packing=false trackers.register("otf.packing",function(v) trace_packing=v end) -local trace_loading=false trackers.register("otf.loading",function(v) trace_loading=v end) -local report_otf=logs.reporter("fonts","otf loading") -fonts=fonts or {} -local handlers=fonts.handlers or {} -fonts.handlers=handlers -local otf=handlers.otf or {} -handlers.otf=otf -local enhancers=otf.enhancers or {} -otf.enhancers=enhancers -local glists=otf.glists or { "gsub","gpos" } -otf.glists=glists -local criterium=1 -local threshold=0 -local function tabstr_normal(t) - local s={} - local n=0 - for k,v in next,t do - n=n+1 - if type(v)=="table" then - s[n]=k..">"..tabstr_normal(v) - elseif v==true then - s[n]=k.."+" - elseif v then - s[n]=k.."="..v - else - s[n]=k.."-" - end - end - if n==0 then - return "" - elseif n==1 then - return s[1] - else - sort(s) - return concat(s,",") - end -end -local function tabstr_flat(t) - local s={} - local n=0 - for k,v in next,t do - n=n+1 - s[n]=k.."="..v - end - if n==0 then - return "" - elseif n==1 then - return s[1] - else - sort(s) - return concat(s,",") - end -end -local function tabstr_mixed(t) - local s={} - local n=#t - if n==0 then - return "" - elseif n==1 then - local k=t[1] - if k==true then - return "++" - elseif k==false then - return "--" - else - return tostring(k) - end - else - for i=1,n do - local k=t[i] - if k==true then - s[i]="++" - elseif k==false then - s[i]="--" - else - s[i]=k - end - end - return concat(s,",") - end -end -local function tabstr_boolean(t) - local s={} - local n=0 - for k,v in next,t do - n=n+1 - if v then - s[n]=k.."+" - else - s[n]=k.."-" - end - end - if n==0 then - return "" - elseif n==1 then - return s[1] - else - sort(s) - return concat(s,",") - end -end -local function packdata(data) - if data then - local h,t,c={},{},{} - local hh,tt,cc={},{},{} - local nt,ntt=0,0 - local function pack_normal(v) - local tag=tabstr_normal(v) - local ht=h[tag] - if ht then - c[ht]=c[ht]+1 - return ht - else - nt=nt+1 - t[nt]=v - h[tag]=nt - c[nt]=1 - return nt - end - end - local function pack_flat(v) - local tag=tabstr_flat(v) - local ht=h[tag] - if ht then - c[ht]=c[ht]+1 - return ht - else - nt=nt+1 - t[nt]=v - h[tag]=nt - c[nt]=1 - return nt - end - end - local function pack_boolean(v) - local tag=tabstr_boolean(v) - local ht=h[tag] - if ht then - c[ht]=c[ht]+1 - return ht - else - nt=nt+1 - t[nt]=v - h[tag]=nt - c[nt]=1 - return nt - end - end - local function pack_indexed(v) - local tag=concat(v," ") - local ht=h[tag] - if ht then - c[ht]=c[ht]+1 - return ht - else - nt=nt+1 - t[nt]=v - h[tag]=nt - c[nt]=1 - return nt - end - end - local function pack_mixed(v) - local tag=tabstr_mixed(v) - local ht=h[tag] - if ht then - c[ht]=c[ht]+1 - return ht - else - nt=nt+1 - t[nt]=v - h[tag]=nt - c[nt]=1 - return nt - end - end - local function pack_final(v) - if c[v]<=criterium then - return t[v] - else - local hv=hh[v] - if hv then - return hv - else - ntt=ntt+1 - tt[ntt]=t[v] - hh[v]=ntt - cc[ntt]=c[v] - return ntt - end - end - end - local function success(stage,pass) - if nt==0 then - if trace_loading or trace_packing then - report_otf("pack quality: nothing to pack") - end - return false - elseif nt>=threshold then - local one,two,rest=0,0,0 - if pass==1 then - for k,v in next,c do - if v==1 then - one=one+1 - elseif v==2 then - two=two+1 - else - rest=rest+1 - end - end - else - for k,v in next,cc do - if v>20 then - rest=rest+1 - elseif v>10 then - two=two+1 - else - one=one+1 - end - end - data.tables=tt - end - if trace_loading or trace_packing then - report_otf("pack quality: stage %s, pass %s, %s packed, 1-10:%s, 11-20:%s, rest:%s (criterium: %s)",stage,pass,one+two+rest,one,two,rest,criterium) - end - return true - else - if trace_loading or trace_packing then - report_otf("pack quality: stage %s, pass %s, %s packed, aborting pack (threshold: %s)",stage,pass,nt,threshold) - end - return false - end - end - local function packers(pass) - if pass==1 then - return pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed - else - return pack_final,pack_final,pack_final,pack_final,pack_final - end - end - local resources=data.resources - local lookuptypes=resources.lookuptypes - for pass=1,2 do - if trace_packing then - report_otf("start packing: stage 1, pass %s",pass) - end - local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass) - for unicode,description in next,data.descriptions do - local boundingbox=description.boundingbox - if boundingbox then - description.boundingbox=pack_indexed(boundingbox) - end - local slookups=description.slookups - if slookups then - for tag,slookup in next,slookups do - local what=lookuptypes[tag] - if what=="pair" then - local t=slookup[2] if t then slookup[2]=pack_indexed(t) end - local t=slookup[3] if t then slookup[3]=pack_indexed(t) end - elseif what~="substitution" then - slookups[tag]=pack_indexed(slookup) - end - end - end - local mlookups=description.mlookups - if mlookups then - for tag,mlookup in next,mlookups do - local what=lookuptypes[tag] - if what=="pair" then - for i=1,#mlookup do - local lookup=mlookup[i] - local t=lookup[2] if t then lookup[2]=pack_indexed(t) end - local t=lookup[3] if t then lookup[3]=pack_indexed(t) end - end - elseif what~="substitution" then - for i=1,#mlookup do - mlookup[i]=pack_indexed(mlookup[i]) - end - end - end - end - local kerns=description.kerns - if kerns then - for tag,kern in next,kerns do - kerns[tag]=pack_flat(kern) - end - end - local math=description.math - if math then - local kerns=math.kerns - if kerns then - for tag,kern in next,kerns do - kerns[tag]=pack_normal(kern) - end - end - end - local anchors=description.anchors - if anchors then - for what,anchor in next,anchors do - if what=="baselig" then - for _,a in next,anchor do - for k=1,#a do - a[k]=pack_indexed(a[k]) - end - end - else - for k,v in next,anchor do - anchor[k]=pack_indexed(v) - end - end - end - end - local altuni=description.altuni - if altuni then - for i=1,#altuni do - altuni[i]=pack_flat(altuni[i]) - end - end - end - local lookups=data.lookups - if lookups then - for _,lookup in next,lookups do - local rules=lookup.rules - if rules then - for i=1,#rules do - local rule=rules[i] - local r=rule.before if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end - local r=rule.after if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end - local r=rule.current if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end - local r=rule.replacements if r then rule.replacements=pack_flat (r) end - local r=rule.lookups if r then rule.lookups=pack_indexed(r) end - end - end - end - end - local anchor_to_lookup=resources.anchor_to_lookup - if anchor_to_lookup then - for anchor,lookup in next,anchor_to_lookup do - anchor_to_lookup[anchor]=pack_normal(lookup) - end - end - local lookup_to_anchor=resources.lookup_to_anchor - if lookup_to_anchor then - for lookup,anchor in next,lookup_to_anchor do - lookup_to_anchor[lookup]=pack_normal(anchor) - end - end - local sequences=resources.sequences - if sequences then - for feature,sequence in next,sequences do - local flags=sequence.flags - if flags then - sequence.flags=pack_normal(flags) - end - local subtables=sequence.subtables - if subtables then - sequence.subtables=pack_normal(subtables) - end - local features=sequence.features - if features then - for script,feature in next,features do - features[script]=pack_normal(feature) - end - end - local order=sequence.order - if order then - sequence.order=pack_indexed(order) - end - local markclass=sequence.markclass - if markclass then - sequence.markclass=pack_boolean(markclass) - end - end - end - local lookups=resources.lookups - if lookups then - for name,lookup in next,lookups do - local flags=lookup.flags - if flags then - lookup.flags=pack_normal(flags) - end - local subtables=lookup.subtables - if subtables then - lookup.subtables=pack_normal(subtables) - end - end - end - local features=resources.features - if features then - for _,what in next,glists do - local list=features[what] - if list then - for feature,spec in next,list do - list[feature]=pack_normal(spec) - end - end - end - end - if not success(1,pass) then - return - end - end - if nt>0 then - for pass=1,2 do - if trace_packing then - report_otf("start packing: stage 2, pass %s",pass) - end - local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass) - for unicode,description in next,data.descriptions do - local kerns=description.kerns - if kerns then - description.kerns=pack_normal(kerns) - end - local math=description.math - if math then - local kerns=math.kerns - if kerns then - math.kerns=pack_normal(kerns) - end - end - local anchors=description.anchors - if anchors then - description.anchors=pack_normal(anchors) - end - local mlookups=description.mlookups - if mlookups then - for tag,mlookup in next,mlookups do - mlookups[tag]=pack_normal(mlookup) - end - end - local altuni=description.altuni - if altuni then - description.altuni=pack_normal(altuni) - end - end - local lookups=data.lookups - if lookups then - for _,lookup in next,lookups do - local rules=lookup.rules - if rules then - for i=1,#rules do - local rule=rules[i] - local r=rule.before if r then rule.before=pack_normal(r) end - local r=rule.after if r then rule.after=pack_normal(r) end - local r=rule.current if r then rule.current=pack_normal(r) end - end - end - end - end - local sequences=resources.sequences - if sequences then - for feature,sequence in next,sequences do - sequence.features=pack_normal(sequence.features) - end - end - if not success(2,pass) then - end - end - for pass=1,2 do - local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass) - for unicode,description in next,data.descriptions do - local slookups=description.slookups - if slookups then - description.slookups=pack_normal(slookups) - end - local mlookups=description.mlookups - if mlookups then - description.mlookups=pack_normal(mlookups) - end - end - end - end - end -end -local unpacked_mt={ - __index=function(t,k) - t[k]=false - return k - end -} -local function unpackdata(data) - if data then - local tables=data.tables - if tables then - local resources=data.resources - local lookuptypes=resources.lookuptypes - local unpacked={} - setmetatable(unpacked,unpacked_mt) - for unicode,description in next,data.descriptions do - local tv=tables[description.boundingbox] - if tv then - description.boundingbox=tv - end - local slookups=description.slookups - if slookups then - local tv=tables[slookups] - if tv then - description.slookups=tv - slookups=unpacked[tv] - end - if slookups then - for tag,lookup in next,slookups do - local what=lookuptypes[tag] - if what=="pair" then - local tv=tables[lookup[2]] - if tv then - lookup[2]=tv - end - local tv=tables[lookup[3]] - if tv then - lookup[3]=tv - end - elseif what~="substitution" then - local tv=tables[lookup] - if tv then - slookups[tag]=tv - end - end - end - end - end - local mlookups=description.mlookups - if mlookups then - local tv=tables[mlookups] - if tv then - description.mlookups=tv - mlookups=unpacked[tv] - end - if mlookups then - for tag,list in next,mlookups do - local tv=tables[list] - if tv then - mlookups[tag]=tv - list=unpacked[tv] - end - if list then - local what=lookuptypes[tag] - if what=="pair" then - for i=1,#list do - local lookup=list[i] - local tv=tables[lookup[2]] - if tv then - lookup[2]=tv - end - local tv=tables[lookup[3]] - if tv then - lookup[3]=tv - end - end - elseif what~="substitution" then - for i=1,#list do - local tv=tables[list[i]] - if tv then - list[i]=tv - end - end - end - end - end - end - end - local kerns=description.kerns - if kerns then - local tm=tables[kerns] - if tm then - description.kerns=tm - kerns=unpacked[tm] - end - if kerns then - for k,kern in next,kerns do - local tv=tables[kern] - if tv then - kerns[k]=tv - end - end - end - end - local math=description.math - if math then - local kerns=math.kerns - if kerns then - local tm=tables[kerns] - if tm then - math.kerns=tm - kerns=unpacked[tm] - end - if kerns then - for k,kern in next,kerns do - local tv=tables[kern] - if tv then - kerns[k]=tv - end - end - end - end - end - local anchors=description.anchors - if anchors then - local ta=tables[anchors] - if ta then - description.anchors=ta - anchors=unpacked[ta] - end - if anchors then - for tag,anchor in next,anchors do - if tag=="baselig" then - for _,list in next,anchor do - for i=1,#list do - local tv=tables[list[i]] - if tv then - list[i]=tv - end - end - end - else - for a,data in next,anchor do - local tv=tables[data] - if tv then - anchor[a]=tv - end - end - end - end - end - end - local altuni=description.altuni - if altuni then - local altuni=tables[altuni] - if altuni then - description.altuni=altuni - for i=1,#altuni do - local tv=tables[altuni[i]] - if tv then - altuni[i]=tv - end - end - end - end - end - local lookups=data.lookups - if lookups then - for _,lookup in next,lookups do - local rules=lookup.rules - if rules then - for i=1,#rules do - local rule=rules[i] - local before=rule.before - if before then - local tv=tables[before] - if tv then - rule.before=tv - before=unpacked[tv] - end - if before then - for i=1,#before do - local tv=tables[before[i]] - if tv then - before[i]=tv - end - end - end - end - local after=rule.after - if after then - local tv=tables[after] - if tv then - rule.after=tv - after=unpacked[tv] - end - if after then - for i=1,#after do - local tv=tables[after[i]] - if tv then - after[i]=tv - end - end - end - end - local current=rule.current - if current then - local tv=tables[current] - if tv then - rule.current=tv - current=unpacked[tv] - end - if current then - for i=1,#current do - local tv=tables[current[i]] - if tv then - current[i]=tv - end - end - end - end - local replacements=rule.replacements - if replacements then - local tv=tables[replacements] - if tv then - rule.replacements=tv - end - end - local lookups=rule.lookups - if lookups then - local tv=tables[lookups] - if tv then - rule.lookups=tv - end - end - end - end - end - end - local anchor_to_lookup=resources.anchor_to_lookup - if anchor_to_lookup then - for anchor,lookup in next,anchor_to_lookup do - local tv=tables[lookup] - if tv then - anchor_to_lookup[anchor]=tv - end - end - end - local lookup_to_anchor=resources.lookup_to_anchor - if lookup_to_anchor then - for lookup,anchor in next,lookup_to_anchor do - local tv=tables[anchor] - if tv then - lookup_to_anchor[lookup]=tv - end - end - end - local ls=resources.sequences - if ls then - for _,feature in next,ls do - local flags=feature.flags - if flags then - local tv=tables[flags] - if tv then - feature.flags=tv - end - end - local subtables=feature.subtables - if subtables then - local tv=tables[subtables] - if tv then - feature.subtables=tv - end - end - local features=feature.features - if features then - local tv=tables[features] - if tv then - feature.features=tv - features=unpacked[tv] - end - if features then - for script,data in next,features do - local tv=tables[data] - if tv then - features[script]=tv - end - end - end - end - local order=feature.order - if order then - local tv=tables[order] - if tv then - feature.order=tv - end - end - local markclass=feature.markclass - if markclass then - local tv=tables[markclass] - if tv then - feature.markclass=tv - end - end - end - end - local lookups=resources.lookups - if lookups then - for _,lookup in next,lookups do - local flags=lookup.flags - if flags then - local tv=tables[flags] - if tv then - lookup.flags=tv - end - end - local subtables=lookup.subtables - if subtables then - local tv=tables[subtables] - if tv then - lookup.subtables=tv - end - end - end - end - local features=resources.features - if features then - for _,what in next,glists do - local feature=features[what] - if feature then - for tag,spec in next,feature do - local tv=tables[spec] - if tv then - feature[tag]=tv - end - end - end - end - end - data.tables=nil - end - end -end -if otf.enhancers.register then - otf.enhancers.register("pack",packdata) - otf.enhancers.register("unpack",unpackdata) -end -otf.enhancers.unpack=unpackdata -otf.enhancers.pack=packdata - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['luatex-fonts-lua']={ - version=1.001, - comment="companion to luatex-*.tex", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end -local fonts=fonts -fonts.formats.lua="lua" -function fonts.readers.lua(specification) - local fullname=specification.filename or "" - if fullname=="" then - local forced=specification.forced or "" - if forced~="" then - fullname=specification.name.."."..forced - else - fullname=specification.name - end - end - local fullname=resolvers.findfile(fullname) or "" - if fullname~="" then - local loader=loadfile(fullname) - loader=loader and loader() - return loader and loader(specification) - end -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-def']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local format,gmatch,match,find,lower,gsub=string.format,string.gmatch,string.match,string.find,string.lower,string.gsub -local tostring,next=tostring,next -local lpegmatch=lpeg.match -local suffixonly,removesuffix=file.suffix,file.removesuffix -local allocate=utilities.storage.allocate -local trace_defining=false trackers .register("fonts.defining",function(v) trace_defining=v end) -local directive_embedall=false directives.register("fonts.embedall",function(v) directive_embedall=v end) -trackers.register("fonts.loading","fonts.defining","otf.loading","afm.loading","tfm.loading") -trackers.register("fonts.all","fonts.*","otf.*","afm.*","tfm.*") -local report_defining=logs.reporter("fonts","defining") -local fonts=fonts -local fontdata=fonts.hashes.identifiers -local readers=fonts.readers -local definers=fonts.definers -local specifiers=fonts.specifiers -local constructors=fonts.constructors -local fontgoodies=fonts.goodies -readers.sequence=allocate { 'otf','ttf','afm','tfm','lua' } -local variants=allocate() -specifiers.variants=variants -definers.methods=definers.methods or {} -local internalized=allocate() -local lastdefined=nil -local loadedfonts=constructors.loadedfonts -local designsizes=constructors.designsizes -local resolvefile=fontgoodies and fontgoodies.filenames and fontgoodies.filenames.resolve or function(s) return s end -local splitter,splitspecifiers=nil,"" -local P,C,S,Cc=lpeg.P,lpeg.C,lpeg.S,lpeg.Cc -local left=P("(") -local right=P(")") -local colon=P(":") -local space=P(" ") -definers.defaultlookup="file" -local prefixpattern=P(false) -local function addspecifier(symbol) - splitspecifiers=splitspecifiers..symbol - local method=S(splitspecifiers) - local lookup=C(prefixpattern)*colon - local sub=left*C(P(1-left-right-method)^1)*right - local specification=C(method)*C(P(1)^1) - local name=C((1-sub-specification)^1) - splitter=P((lookup+Cc(""))*name*(sub+Cc(""))*(specification+Cc(""))) -end -local function addlookup(str,default) - prefixpattern=prefixpattern+P(str) -end -definers.addlookup=addlookup -addlookup("file") -addlookup("name") -addlookup("spec") -local function getspecification(str) - return lpegmatch(splitter,str or "") -end -definers.getspecification=getspecification -function definers.registersplit(symbol,action,verbosename) - addspecifier(symbol) - variants[symbol]=action - if verbosename then - variants[verbosename]=action - end -end -local function makespecification(specification,lookup,name,sub,method,detail,size) - size=size or 655360 - if not lookup or lookup=="" then - lookup=definers.defaultlookup - end - if trace_defining then - report_defining("specification %a, lookup %a, name %a, sub %a, method %a, detail %a", - specification,lookup,name,sub,method,detail) - end - local t={ - lookup=lookup, - specification=specification, - size=size, - name=name, - sub=sub, - method=method, - detail=detail, - resolved="", - forced="", - features={}, - } - return t -end -definers.makespecification=makespecification -function definers.analyze(specification,size) - local lookup,name,sub,method,detail=getspecification(specification or "") - return makespecification(specification,lookup,name,sub,method,detail,size) -end -definers.resolvers=definers.resolvers or {} -local resolvers=definers.resolvers -function resolvers.file(specification) - local name=resolvefile(specification.name) - local suffix=lower(suffixonly(name)) - if fonts.formats[suffix] then - specification.forced=suffix - specification.forcedname=name - specification.name=removesuffix(name) - else - specification.name=name - end -end -function resolvers.name(specification) - local resolve=fonts.names.resolve - if resolve then - local resolved,sub=resolve(specification.name,specification.sub,specification) - if resolved then - specification.resolved=resolved - specification.sub=sub - local suffix=lower(suffixonly(resolved)) - if fonts.formats[suffix] then - specification.forced=suffix - specification.forcedname=resolved - specification.name=removesuffix(resolved) - else - specification.name=resolved - end - end - else - resolvers.file(specification) - end -end -function resolvers.spec(specification) - local resolvespec=fonts.names.resolvespec - if resolvespec then - local resolved,sub=resolvespec(specification.name,specification.sub,specification) - if resolved then - specification.resolved=resolved - specification.sub=sub - specification.forced=lower(suffixonly(resolved)) - specification.forcedname=resolved - specification.name=removesuffix(resolved) - end - else - resolvers.name(specification) - end -end -function definers.resolve(specification) - if not specification.resolved or specification.resolved=="" then - local r=resolvers[specification.lookup] - if r then - r(specification) - end - end - if specification.forced=="" then - specification.forced=nil - specification.forcedname=nil - end - specification.hash=lower(specification.name..' @ '..constructors.hashfeatures(specification)) - if specification.sub and specification.sub~="" then - specification.hash=specification.sub..' @ '..specification.hash - end - return specification -end -function definers.applypostprocessors(tfmdata) - local postprocessors=tfmdata.postprocessors - if postprocessors then - local properties=tfmdata.properties - for i=1,#postprocessors do - local extrahash=postprocessors[i](tfmdata) - if type(extrahash)=="string" and extrahash~="" then - extrahash=gsub(lower(extrahash),"[^a-z]","-") - properties.fullname=format("%s-%s",properties.fullname,extrahash) - end - end - end - return tfmdata -end -local function checkembedding(tfmdata) - local properties=tfmdata.properties - local embedding - if directive_embedall then - embedding="full" - elseif properties and properties.filename and constructors.dontembed[properties.filename] then - embedding="no" - else - embedding="subset" - end - if properties then - properties.embedding=embedding - else - tfmdata.properties={ embedding=embedding } - end - tfmdata.embedding=embedding -end -function definers.loadfont(specification) - local hash=constructors.hashinstance(specification) - local tfmdata=loadedfonts[hash] - if not tfmdata then - local forced=specification.forced or "" - if forced~="" then - local reader=readers[lower(forced)] - tfmdata=reader and reader(specification) - if not tfmdata then - report_defining("forced type %a of %a not found",forced,specification.name) - end - else - local sequence=readers.sequence - for s=1,#sequence do - local reader=sequence[s] - if readers[reader] then - if trace_defining then - report_defining("trying (reader sequence driven) type %a for %a with file %a",reader,specification.name,specification.filename) - end - tfmdata=readers[reader](specification) - if tfmdata then - break - else - specification.filename=nil - end - end - end - end - if tfmdata then - tfmdata=definers.applypostprocessors(tfmdata) - checkembedding(tfmdata) - loadedfonts[hash]=tfmdata - designsizes[specification.hash]=tfmdata.parameters.designsize - end - end - if not tfmdata then - report_defining("font with asked name %a is not found using lookup %a",specification.name,specification.lookup) - end - return tfmdata -end -function constructors.checkvirtualids() -end -function constructors.readanddefine(name,size) - local specification=definers.analyze(name,size) - local method=specification.method - if method and variants[method] then - specification=variants[method](specification) - end - specification=definers.resolve(specification) - local hash=constructors.hashinstance(specification) - local id=definers.registered(hash) - if not id then - local tfmdata=definers.loadfont(specification) - if tfmdata then - tfmdata.properties.hash=hash - constructors.checkvirtualids(tfmdata) - id=font.define(tfmdata) - definers.register(tfmdata,id) - else - id=0 - end - end - return fontdata[id],id -end -function definers.current() - return lastdefined -end -function definers.registered(hash) - local id=internalized[hash] - return id,id and fontdata[id] -end -function definers.register(tfmdata,id) - if tfmdata and id then - local hash=tfmdata.properties.hash - if not hash then - report_defining("registering font, id %a, name %a, invalid hash",id,tfmdata.properties.filename or "?") - elseif not internalized[hash] then - internalized[hash]=id - if trace_defining then - report_defining("registering font, id %s, hash %a",id,hash) - end - fontdata[id]=tfmdata - end - end -end -function definers.read(specification,size,id) - statistics.starttiming(fonts) - if type(specification)=="string" then - specification=definers.analyze(specification,size) - end - local method=specification.method - if method and variants[method] then - specification=variants[method](specification) - end - specification=definers.resolve(specification) - local hash=constructors.hashinstance(specification) - local tfmdata=definers.registered(hash) - if tfmdata then - if trace_defining then - report_defining("already hashed: %s",hash) - end - else - tfmdata=definers.loadfont(specification) - if tfmdata then - if trace_defining then - report_defining("loaded and hashed: %s",hash) - end - tfmdata.properties.hash=hash - if id then - definers.register(tfmdata,id) - end - else - if trace_defining then - report_defining("not loaded and hashed: %s",hash) - end - end - end - lastdefined=tfmdata or id - if not tfmdata then - report_defining("unknown font %a, loading aborted",specification.name) - elseif trace_defining and type(tfmdata)=="table" then - local properties=tfmdata.properties or {} - local parameters=tfmdata.parameters or {} - report_defining("using %a font with id %a, name %a, size %a, bytes %a, encoding %a, fullname %a, filename %a", - properties.format or "unknown",id,properties.name,parameters.size,properties.encodingbytes, - properties.encodingname,properties.fullname,file.basename(properties.filename)) - end - statistics.stoptiming(fonts) - return tfmdata -end -function font.getfont(id) - return fontdata[id] -end -callbacks.register('define_font',definers.read,"definition of fonts (tfmdata preparation)") - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['luatex-font-def']={ - version=1.001, - comment="companion to luatex-*.tex", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end -local fonts=fonts -fonts.constructors.namemode="specification" -function fonts.definers.getspecification(str) - return "",str,"",":",str -end -local list={} -local function issome () list.lookup='name' end -local function isfile () list.lookup='file' end -local function isname () list.lookup='name' end -local function thename(s) list.name=s end -local function issub (v) list.sub=v end -local function iscrap (s) list.crap=string.lower(s) end -local function iskey (k,v) list[k]=v end -local function istrue (s) list[s]=true end -local function isfalse(s) list[s]=false end -local P,S,R,C=lpeg.P,lpeg.S,lpeg.R,lpeg.C -local spaces=P(" ")^0 -local namespec=(1-S("/:("))^0 -local crapspec=spaces*P("/")*(((1-P(":"))^0)/iscrap)*spaces -local filename_1=P("file:")/isfile*(namespec/thename) -local filename_2=P("[")*P(true)/isname*(((1-P("]"))^0)/thename)*P("]") -local fontname_1=P("name:")/isname*(namespec/thename) -local fontname_2=P(true)/issome*(namespec/thename) -local sometext=(R("az","AZ","09")+S("+-."))^1 -local truevalue=P("+")*spaces*(sometext/istrue) -local falsevalue=P("-")*spaces*(sometext/isfalse) -local keyvalue=(C(sometext)*spaces*P("=")*spaces*C(sometext))/iskey -local somevalue=sometext/istrue -local subvalue=P("(")*(C(P(1-S("()"))^1)/issub)*P(")") -local option=spaces*(keyvalue+falsevalue+truevalue+somevalue)*spaces -local options=P(":")*spaces*(P(";")^0*option)^0 -local pattern=(filename_1+filename_2+fontname_1+fontname_2)*subvalue^0*crapspec^0*options^0 -local function colonized(specification) - list={} - lpeg.match(pattern,specification.specification) - list.crap=nil - if list.name then - specification.name=list.name - list.name=nil - end - if list.lookup then - specification.lookup=list.lookup - list.lookup=nil - end - if list.sub then - specification.sub=list.sub - list.sub=nil - end - specification.features.normal=fonts.handlers.otf.features.normalize(list) - return specification -end -fonts.definers.registersplit(":",colonized,"cryptic") -fonts.definers.registersplit("",colonized,"more cryptic") -function fonts.definers.applypostprocessors(tfmdata) - local postprocessors=tfmdata.postprocessors - if postprocessors then - for i=1,#postprocessors do - local extrahash=postprocessors[i](tfmdata) - if type(extrahash)=="string" and extrahash~="" then - extrahash=string.gsub(lower(extrahash),"[^a-z]","-") - tfmdata.properties.fullname=format("%s-%s",tfmdata.properties.fullname,extrahash) - end - end - end - return tfmdata -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['luatex-fonts-ext']={ - version=1.001, - comment="companion to luatex-*.tex", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end -local fonts=fonts -local otffeatures=fonts.constructors.newfeatures("otf") -local function initializeitlc(tfmdata,value) - if value then - local parameters=tfmdata.parameters - local italicangle=parameters.italicangle - if italicangle and italicangle~=0 then - local properties=tfmdata.properties - local factor=tonumber(value) or 1 - properties.hasitalics=true - properties.autoitalicamount=factor*(parameters.uwidth or 40)/2 - end - end -end -otffeatures.register { - name="itlc", - description="italic correction", - initializers={ - base=initializeitlc, - node=initializeitlc, - } -} -local function initializeslant(tfmdata,value) - value=tonumber(value) - if not value then - value=0 - elseif value>1 then - value=1 - elseif value<-1 then - value=-1 - end - tfmdata.parameters.slantfactor=value -end -otffeatures.register { - name="slant", - description="slant glyphs", - initializers={ - base=initializeslant, - node=initializeslant, - } -} -local function initializeextend(tfmdata,value) - value=tonumber(value) - if not value then - value=0 - elseif value>10 then - value=10 - elseif value<-10 then - value=-10 - end - tfmdata.parameters.extendfactor=value -end -otffeatures.register { - name="extend", - description="scale glyphs horizontally", - initializers={ - base=initializeextend, - node=initializeextend, - } -} -fonts.protrusions=fonts.protrusions or {} -fonts.protrusions.setups=fonts.protrusions.setups or {} -local setups=fonts.protrusions.setups -local function initializeprotrusion(tfmdata,value) - if value then - local setup=setups[value] - if setup then - local factor,left,right=setup.factor or 1,setup.left or 1,setup.right or 1 - local emwidth=tfmdata.parameters.quad - tfmdata.parameters.protrusion={ - auto=true, - } - for i,chr in next,tfmdata.characters do - local v,pl,pr=setup[i],nil,nil - if v then - pl,pr=v[1],v[2] - end - if pl and pl~=0 then chr.left_protruding=left*pl*factor end - if pr and pr~=0 then chr.right_protruding=right*pr*factor end - end - end - end -end -otffeatures.register { - name="protrusion", - description="shift characters into the left and or right margin", - initializers={ - base=initializeprotrusion, - node=initializeprotrusion, - } -} -fonts.expansions=fonts.expansions or {} -fonts.expansions.setups=fonts.expansions.setups or {} -local setups=fonts.expansions.setups -local function initializeexpansion(tfmdata,value) - if value then - local setup=setups[value] - if setup then - local factor=setup.factor or 1 - tfmdata.parameters.expansion={ - stretch=10*(setup.stretch or 0), - shrink=10*(setup.shrink or 0), - step=10*(setup.step or 0), - auto=true, - } - for i,chr in next,tfmdata.characters do - local v=setup[i] - if v and v~=0 then - chr.expansion_factor=v*factor - else - chr.expansion_factor=factor - end - end - end - end -end -otffeatures.register { - name="expansion", - description="apply hz optimization", - initializers={ - base=initializeexpansion, - node=initializeexpansion, - } -} -function fonts.loggers.onetimemessage() end -local byte=string.byte -fonts.expansions.setups['default']={ - stretch=2,shrink=2,step=.5,factor=1, - [byte('A')]=0.5,[byte('B')]=0.7,[byte('C')]=0.7,[byte('D')]=0.5,[byte('E')]=0.7, - [byte('F')]=0.7,[byte('G')]=0.5,[byte('H')]=0.7,[byte('K')]=0.7,[byte('M')]=0.7, - [byte('N')]=0.7,[byte('O')]=0.5,[byte('P')]=0.7,[byte('Q')]=0.5,[byte('R')]=0.7, - [byte('S')]=0.7,[byte('U')]=0.7,[byte('W')]=0.7,[byte('Z')]=0.7, - [byte('a')]=0.7,[byte('b')]=0.7,[byte('c')]=0.7,[byte('d')]=0.7,[byte('e')]=0.7, - [byte('g')]=0.7,[byte('h')]=0.7,[byte('k')]=0.7,[byte('m')]=0.7,[byte('n')]=0.7, - [byte('o')]=0.7,[byte('p')]=0.7,[byte('q')]=0.7,[byte('s')]=0.7,[byte('u')]=0.7, - [byte('w')]=0.7,[byte('z')]=0.7, - [byte('2')]=0.7,[byte('3')]=0.7,[byte('6')]=0.7,[byte('8')]=0.7,[byte('9')]=0.7, -} -fonts.protrusions.setups['default']={ - factor=1,left=1,right=1, - [0x002C]={ 0,1 }, - [0x002E]={ 0,1 }, - [0x003A]={ 0,1 }, - [0x003B]={ 0,1 }, - [0x002D]={ 0,1 }, - [0x2013]={ 0,0.50 }, - [0x2014]={ 0,0.33 }, - [0x3001]={ 0,1 }, - [0x3002]={ 0,1 }, - [0x060C]={ 0,1 }, - [0x061B]={ 0,1 }, - [0x06D4]={ 0,1 }, -} -fonts.handlers.otf.features.normalize=function(t) - if t.rand then - t.rand="random" - end - return t -end -function fonts.helpers.nametoslot(name) - local t=type(name) - if t=="string" then - local tfmdata=fonts.hashes.identifiers[currentfont()] - local shared=tfmdata and tfmdata.shared - local fntdata=shared and shared.rawdata - return fntdata and fntdata.resources.unicodes[name] - elseif t=="number" then - return n - end -end -fonts.encodings=fonts.encodings or {} -local reencodings={} -fonts.encodings.reencodings=reencodings -local function specialreencode(tfmdata,value) - local encoding=value and reencodings[value] - if encoding then - local temp={} - local char=tfmdata.characters - for k,v in next,encoding do - temp[k]=char[v] - end - for k,v in next,temp do - char[k]=temp[k] - end - return string.format("reencoded:%s",value) - end -end -local function reencode(tfmdata,value) - tfmdata.postprocessors=tfmdata.postprocessors or {} - table.insert(tfmdata.postprocessors, - function(tfmdata) - return specialreencode(tfmdata,value) - end - ) -end -otffeatures.register { - name="reencode", - description="reencode characters", - manipulators={ - base=reencode, - node=reencode, - } -} - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['luatex-fonts-cbk']={ - version=1.001, - comment="companion to luatex-*.tex", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end -local fonts=fonts -local nodes=nodes -local traverse_id=node.traverse_id -local glyph_code=nodes.nodecodes.glyph -local ligaturing=node.ligaturing -local kerning=node.kerning -function node.ligaturing() texio.write_nl("warning: node.ligaturing is already applied") end -function node.kerning () texio.write_nl("warning: node.kerning is already applied") end -function nodes.handlers.characters(head) - local fontdata=fonts.hashes.identifiers - if fontdata then - local usedfonts,basefonts,prevfont,basefont={},{},nil,nil - for n in traverse_id(glyph_code,head) do - local font=n.font - if font~=prevfont then - if basefont then - basefont[2]=n.prev - end - prevfont=font - local used=usedfonts[font] - if not used then - local tfmdata=fontdata[font] - if tfmdata then - local shared=tfmdata.shared - if shared then - local processors=shared.processes - if processors and #processors>0 then - usedfonts[font]=processors - else - basefont={ n,nil } - basefonts[#basefonts+1]=basefont - end - end - end - end - end - end - if next(usedfonts) then - for font,processors in next,usedfonts do - for i=1,#processors do - head=processors[i](head,font,0) or head - end - end - end - if #basefonts>0 then - for i=1,#basefonts do - local range=basefonts[i] - local start,stop=range[1],range[2] - if stop then - ligaturing(start,stop) - kerning(start,stop) - else - ligaturing(start) - kerning(start) - end - end - end - return head,true - else - return head,false - end -end -function nodes.simple_font_handler(head) - head=nodes.handlers.characters(head) - nodes.injections.handler(head) - nodes.handlers.protectglyphs(head) - return head -end - -end -- closure diff --git a/src/fontloader/fontloader-fonts-cbk.lua b/src/fontloader/fontloader-fonts-cbk.lua deleted file mode 100644 index 9db94f6..0000000 --- a/src/fontloader/fontloader-fonts-cbk.lua +++ /dev/null @@ -1,68 +0,0 @@ -if not modules then modules = { } end modules ['luatex-fonts-cbk'] = { - version = 1.001, - comment = "companion to luatex-*.tex", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end - -local fonts = fonts -local nodes = nodes - --- Fonts: (might move to node-gef.lua) - -local traverse_id = node.traverse_id -local glyph_code = nodes.nodecodes.glyph - -function nodes.handlers.characters(head) - local fontdata = fonts.hashes.identifiers - if fontdata then - local usedfonts, done, prevfont = { }, false, nil - for n in traverse_id(glyph_code,head) do - local font = n.font - if font ~= prevfont then - prevfont = font - local used = usedfonts[font] - if not used then - local tfmdata = fontdata[font] -- - if tfmdata then - local shared = tfmdata.shared -- we need to check shared, only when same features - if shared then - local processors = shared.processes - if processors and #processors > 0 then - usedfonts[font] = processors - done = true - end - end - end - end - end - end - if done then - for font, processors in next, usedfonts do - for i=1,#processors do - local h, d = processors[i](head,font,0) - head, done = h or head, done or d - end - end - end - return head, true - else - return head, false - end -end - -function nodes.simple_font_handler(head) --- lang.hyphenate(head) - head = nodes.handlers.characters(head) - nodes.injections.handler(head) - nodes.handlers.protectglyphs(head) - head = node.ligaturing(head) - head = node.kerning(head) - return head -end diff --git a/src/fontloader/fontloader-fonts-def.lua b/src/fontloader/fontloader-fonts-def.lua deleted file mode 100644 index 0c2f0db..0000000 --- a/src/fontloader/fontloader-fonts-def.lua +++ /dev/null @@ -1,97 +0,0 @@ -if not modules then modules = { } end modules ['luatex-font-def'] = { - version = 1.001, - comment = "companion to luatex-*.tex", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end - -local fonts = fonts - --- A bit of tuning for definitions. - -fonts.constructors.namemode = "specification" -- somehow latex needs this (changed name!) => will change into an overload - --- tricky: we sort of bypass the parser and directly feed all into --- the sub parser - -function fonts.definers.getspecification(str) - return "", str, "", ":", str -end - --- the generic name parser (different from context!) - -local list = { } - -local function issome () list.lookup = 'name' end -- xetex mode prefers name (not in context!) -local function isfile () list.lookup = 'file' end -local function isname () list.lookup = 'name' end -local function thename(s) list.name = s end -local function issub (v) list.sub = v end -local function iscrap (s) list.crap = string.lower(s) end -local function iskey (k,v) list[k] = v end -local function istrue (s) list[s] = true end -local function isfalse(s) list[s] = false end - -local P, S, R, C = lpeg.P, lpeg.S, lpeg.R, lpeg.C - -local spaces = P(" ")^0 -local namespec = (1-S("/:("))^0 -- was: (1-S("/: ("))^0 -local crapspec = spaces * P("/") * (((1-P(":"))^0)/iscrap) * spaces -local filename_1 = P("file:")/isfile * (namespec/thename) -local filename_2 = P("[") * P(true)/isname * (((1-P("]"))^0)/thename) * P("]") -local fontname_1 = P("name:")/isname * (namespec/thename) -local fontname_2 = P(true)/issome * (namespec/thename) -local sometext = (R("az","AZ","09") + S("+-."))^1 -local truevalue = P("+") * spaces * (sometext/istrue) -local falsevalue = P("-") * spaces * (sometext/isfalse) -local keyvalue = (C(sometext) * spaces * P("=") * spaces * C(sometext))/iskey -local somevalue = sometext/istrue -local subvalue = P("(") * (C(P(1-S("()"))^1)/issub) * P(")") -- for Kim -local option = spaces * (keyvalue + falsevalue + truevalue + somevalue) * spaces -local options = P(":") * spaces * (P(";")^0 * option)^0 - -local pattern = (filename_1 + filename_2 + fontname_1 + fontname_2) * subvalue^0 * crapspec^0 * options^0 - -local function colonized(specification) -- xetex mode - list = { } - lpeg.match(pattern,specification.specification) - list.crap = nil -- style not supported, maybe some day - if list.name then - specification.name = list.name - list.name = nil - end - if list.lookup then - specification.lookup = list.lookup - list.lookup = nil - end - if list.sub then - specification.sub = list.sub - list.sub = nil - end - specification.features.normal = fonts.handlers.otf.features.normalize(list) - return specification -end - -fonts.definers.registersplit(":",colonized,"cryptic") -fonts.definers.registersplit("", colonized,"more cryptic") -- catches \font\text=[names] - -function fonts.definers.applypostprocessors(tfmdata) - local postprocessors = tfmdata.postprocessors - if postprocessors then - for i=1,#postprocessors do - local extrahash = postprocessors[i](tfmdata) -- after scaling etc - if type(extrahash) == "string" and extrahash ~= "" then - -- e.g. a reencoding needs this - extrahash = string.gsub(lower(extrahash),"[^a-z]","-") - tfmdata.properties.fullname = format("%s-%s",tfmdata.properties.fullname,extrahash) - end - end - end - return tfmdata -end diff --git a/src/fontloader/fontloader-fonts-enc.lua b/src/fontloader/fontloader-fonts-enc.lua deleted file mode 100644 index e20c3a0..0000000 --- a/src/fontloader/fontloader-fonts-enc.lua +++ /dev/null @@ -1,28 +0,0 @@ -if not modules then modules = { } end modules ['luatex-font-enc'] = { - version = 1.001, - comment = "companion to luatex-*.tex", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end - -local fonts = fonts -fonts.encodings = { } -fonts.encodings.agl = { } - -setmetatable(fonts.encodings.agl, { __index = function(t,k) - if k == "unicodes" then - texio.write(" ") - local unicodes = dofile(resolvers.findfile("font-age.lua")) - fonts.encodings.agl = { unicodes = unicodes } - return unicodes - else - return nil - end -end }) - diff --git a/src/fontloader/fontloader-fonts-ext.lua b/src/fontloader/fontloader-fonts-ext.lua deleted file mode 100644 index b60d045..0000000 --- a/src/fontloader/fontloader-fonts-ext.lua +++ /dev/null @@ -1,272 +0,0 @@ -if not modules then modules = { } end modules ['luatex-fonts-ext'] = { - version = 1.001, - comment = "companion to luatex-*.tex", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end - -local fonts = fonts -local otffeatures = fonts.constructors.newfeatures("otf") - --- A few generic extensions. - -local function initializeitlc(tfmdata,value) - if value then - -- the magic 40 and it formula come from Dohyun Kim but we might need another guess - local parameters = tfmdata.parameters - local italicangle = parameters.italicangle - if italicangle and italicangle ~= 0 then - local properties = tfmdata.properties - local factor = tonumber(value) or 1 - properties.hasitalics = true - properties.autoitalicamount = factor * (parameters.uwidth or 40)/2 - end - end -end - -otffeatures.register { - name = "itlc", - description = "italic correction", - initializers = { - base = initializeitlc, - node = initializeitlc, - } -} - --- slant and extend - -local function initializeslant(tfmdata,value) - value = tonumber(value) - if not value then - value = 0 - elseif value > 1 then - value = 1 - elseif value < -1 then - value = -1 - end - tfmdata.parameters.slantfactor = value -end - -otffeatures.register { - name = "slant", - description = "slant glyphs", - initializers = { - base = initializeslant, - node = initializeslant, - } -} - -local function initializeextend(tfmdata,value) - value = tonumber(value) - if not value then - value = 0 - elseif value > 10 then - value = 10 - elseif value < -10 then - value = -10 - end - tfmdata.parameters.extendfactor = value -end - -otffeatures.register { - name = "extend", - description = "scale glyphs horizontally", - initializers = { - base = initializeextend, - node = initializeextend, - } -} - --- expansion and protrusion - -fonts.protrusions = fonts.protrusions or { } -fonts.protrusions.setups = fonts.protrusions.setups or { } - -local setups = fonts.protrusions.setups - -local function initializeprotrusion(tfmdata,value) - if value then - local setup = setups[value] - if setup then - local factor, left, right = setup.factor or 1, setup.left or 1, setup.right or 1 - local emwidth = tfmdata.parameters.quad - tfmdata.parameters.protrusion = { - auto = true, - } - for i, chr in next, tfmdata.characters do - local v, pl, pr = setup[i], nil, nil - if v then - pl, pr = v[1], v[2] - end - if pl and pl ~= 0 then chr.left_protruding = left *pl*factor end - if pr and pr ~= 0 then chr.right_protruding = right*pr*factor end - end - end - end -end - -otffeatures.register { - name = "protrusion", - description = "shift characters into the left and or right margin", - initializers = { - base = initializeprotrusion, - node = initializeprotrusion, - } -} - -fonts.expansions = fonts.expansions or { } -fonts.expansions.setups = fonts.expansions.setups or { } - -local setups = fonts.expansions.setups - -local function initializeexpansion(tfmdata,value) - if value then - local setup = setups[value] - if setup then - local factor = setup.factor or 1 - tfmdata.parameters.expansion = { - stretch = 10 * (setup.stretch or 0), - shrink = 10 * (setup.shrink or 0), - step = 10 * (setup.step or 0), - auto = true, - } - for i, chr in next, tfmdata.characters do - local v = setup[i] - if v and v ~= 0 then - chr.expansion_factor = v*factor - else -- can be option - chr.expansion_factor = factor - end - end - end - end -end - -otffeatures.register { - name = "expansion", - description = "apply hz optimization", - initializers = { - base = initializeexpansion, - node = initializeexpansion, - } -} - --- left over - -function fonts.loggers.onetimemessage() end - --- example vectors - -local byte = string.byte - -fonts.expansions.setups['default'] = { - - stretch = 2, shrink = 2, step = .5, factor = 1, - - [byte('A')] = 0.5, [byte('B')] = 0.7, [byte('C')] = 0.7, [byte('D')] = 0.5, [byte('E')] = 0.7, - [byte('F')] = 0.7, [byte('G')] = 0.5, [byte('H')] = 0.7, [byte('K')] = 0.7, [byte('M')] = 0.7, - [byte('N')] = 0.7, [byte('O')] = 0.5, [byte('P')] = 0.7, [byte('Q')] = 0.5, [byte('R')] = 0.7, - [byte('S')] = 0.7, [byte('U')] = 0.7, [byte('W')] = 0.7, [byte('Z')] = 0.7, - [byte('a')] = 0.7, [byte('b')] = 0.7, [byte('c')] = 0.7, [byte('d')] = 0.7, [byte('e')] = 0.7, - [byte('g')] = 0.7, [byte('h')] = 0.7, [byte('k')] = 0.7, [byte('m')] = 0.7, [byte('n')] = 0.7, - [byte('o')] = 0.7, [byte('p')] = 0.7, [byte('q')] = 0.7, [byte('s')] = 0.7, [byte('u')] = 0.7, - [byte('w')] = 0.7, [byte('z')] = 0.7, - [byte('2')] = 0.7, [byte('3')] = 0.7, [byte('6')] = 0.7, [byte('8')] = 0.7, [byte('9')] = 0.7, -} - -fonts.protrusions.setups['default'] = { - - factor = 1, left = 1, right = 1, - - [0x002C] = { 0, 1 }, -- comma - [0x002E] = { 0, 1 }, -- period - [0x003A] = { 0, 1 }, -- colon - [0x003B] = { 0, 1 }, -- semicolon - [0x002D] = { 0, 1 }, -- hyphen - [0x2013] = { 0, 0.50 }, -- endash - [0x2014] = { 0, 0.33 }, -- emdash - [0x3001] = { 0, 1 }, -- ideographic comma 、 - [0x3002] = { 0, 1 }, -- ideographic full stop 。 - [0x060C] = { 0, 1 }, -- arabic comma ، - [0x061B] = { 0, 1 }, -- arabic semicolon ؛ - [0x06D4] = { 0, 1 }, -- arabic full stop ۔ - -} - --- normalizer - -fonts.handlers.otf.features.normalize = function(t) - if t.rand then - t.rand = "random" - end - return t -end - --- bonus - -function fonts.helpers.nametoslot(name) - local t = type(name) - if t == "string" then - local tfmdata = fonts.hashes.identifiers[currentfont()] - local shared = tfmdata and tfmdata.shared - local fntdata = shared and shared.rawdata - return fntdata and fntdata.resources.unicodes[name] - elseif t == "number" then - return n - end -end - --- \font\test=file:somefont:reencode=mymessup --- --- fonts.encodings.reencodings.mymessup = { --- [109] = 110, -- m --- [110] = 109, -- n --- } - -fonts.encodings = fonts.encodings or { } -local reencodings = { } -fonts.encodings.reencodings = reencodings - -local function specialreencode(tfmdata,value) - -- we forget about kerns as we assume symbols and we - -- could issue a message if ther are kerns but it's - -- a hack anyway so we odn't care too much here - local encoding = value and reencodings[value] - if encoding then - local temp = { } - local char = tfmdata.characters - for k, v in next, encoding do - temp[k] = char[v] - end - for k, v in next, temp do - char[k] = temp[k] - end - -- if we use the font otherwise luatex gets confused so - -- we return an additional hash component for fullname - return string.format("reencoded:%s",value) - end -end - -local function reencode(tfmdata,value) - tfmdata.postprocessors = tfmdata.postprocessors or { } - table.insert(tfmdata.postprocessors, - function(tfmdata) - return specialreencode(tfmdata,value) - end - ) -end - -otffeatures.register { - name = "reencode", - description = "reencode characters", - manipulators = { - base = reencode, - node = reencode, - } -} diff --git a/src/fontloader/fontloader-fonts-inj.lua b/src/fontloader/fontloader-fonts-inj.lua deleted file mode 100644 index ae48150..0000000 --- a/src/fontloader/fontloader-fonts-inj.lua +++ /dev/null @@ -1,526 +0,0 @@ -if not modules then modules = { } end modules ['node-inj'] = { - version = 1.001, - comment = "companion to node-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files", -} - --- This is very experimental (this will change when we have luatex > .50 and --- a few pending thingies are available. Also, Idris needs to make a few more --- test fonts. Btw, future versions of luatex will have extended glyph properties --- that can be of help. Some optimizations can go away when we have faster machines. - --- todo: make a special one for context - -local next = next -local utfchar = utf.char - -local trace_injections = false trackers.register("nodes.injections", function(v) trace_injections = v end) - -local report_injections = logs.reporter("nodes","injections") - -local attributes, nodes, node = attributes, nodes, node - -fonts = fonts -local fontdata = fonts.hashes.identifiers - -nodes.injections = nodes.injections or { } -local injections = nodes.injections - -local nodecodes = nodes.nodecodes -local glyph_code = nodecodes.glyph -local kern_code = nodecodes.kern -local nodepool = nodes.pool -local newkern = nodepool.kern - -local traverse_id = node.traverse_id -local insert_node_before = node.insert_before -local insert_node_after = node.insert_after - -local a_kernpair = attributes.private('kernpair') -local a_ligacomp = attributes.private('ligacomp') -local a_markbase = attributes.private('markbase') -local a_markmark = attributes.private('markmark') -local a_markdone = attributes.private('markdone') -local a_cursbase = attributes.private('cursbase') -local a_curscurs = attributes.private('curscurs') -local a_cursdone = attributes.private('cursdone') - --- This injector has been tested by Idris Samawi Hamid (several arabic fonts as well as --- the rather demanding Husayni font), Khaled Hosny (latin and arabic) and Kaj Eigner --- (arabic, hebrew and thai) and myself (whatever font I come across). I'm pretty sure --- that this code is not 100% okay but examples are needed to figure things out. - -function injections.installnewkern(nk) - newkern = nk or newkern -end - -local cursives = { } -local marks = { } -local kerns = { } - --- Currently we do gpos/kern in a bit inofficial way but when we have the extra fields in --- glyphnodes to manipulate ht/dp/wd explicitly I will provide an alternative; also, we --- can share tables. - --- For the moment we pass the r2l key ... volt/arabtype tests .. idris: this needs --- checking with husayni (volt and fontforge). - -function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext) - local dx, dy = factor*(exit[1]-entry[1]), factor*(exit[2]-entry[2]) - local ws, wn = tfmstart.width, tfmnext.width - local bound = #cursives + 1 - start[a_cursbase] = bound - nxt[a_curscurs] = bound - cursives[bound] = { rlmode, dx, dy, ws, wn } - return dx, dy, bound -end - -function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr) - local x, y, w, h = factor*spec[1], factor*spec[2], factor*spec[3], factor*spec[4] - -- dy = y - h - if x ~= 0 or w ~= 0 or y ~= 0 or h ~= 0 then - local bound = current[a_kernpair] - if bound then - local kb = kerns[bound] - -- inefficient but singles have less, but weird anyway, needs checking - kb[2], kb[3], kb[4], kb[5] = (kb[2] or 0) + x, (kb[3] or 0) + y, (kb[4] or 0)+ w, (kb[5] or 0) + h - else - bound = #kerns + 1 - current[a_kernpair] = bound - kerns[bound] = { rlmode, x, y, w, h, r2lflag, tfmchr.width } - end - return x, y, w, h, bound - end - return x, y, w, h -- no bound -end - -function injections.setkern(current,factor,rlmode,x,tfmchr) - local dx = factor*x - if dx ~= 0 then - local bound = #kerns + 1 - current[a_kernpair] = bound - kerns[bound] = { rlmode, dx } - return dx, bound - else - return 0, 0 - end -end - -function injections.setmark(start,base,factor,rlmode,ba,ma,index,baseismark) -- ba=baseanchor, ma=markanchor - local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2]) -- the index argument is no longer used but when this - local bound = base[a_markbase] -- fails again we should pass it - local index = 1 - if bound then - local mb = marks[bound] - if mb then - -- if not index then index = #mb + 1 end - index = #mb + 1 - mb[index] = { dx, dy, rlmode } - start[a_markmark] = bound - start[a_markdone] = index - return dx, dy, bound - else - report_injections("possible problem, %U is base mark without data (id %a)",base.char,bound) - end - end --- index = index or 1 - index = index or 1 - bound = #marks + 1 - base[a_markbase] = bound - start[a_markmark] = bound - start[a_markdone] = index - marks[bound] = { [index] = { dx, dy, rlmode, baseismark } } - return dx, dy, bound -end - -local function dir(n) - return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset" -end - -local function trace(head) - report_injections("begin run") - for n in traverse_id(glyph_code,head) do - if n.subtype < 256 then - local kp = n[a_kernpair] - local mb = n[a_markbase] - local mm = n[a_markmark] - local md = n[a_markdone] - local cb = n[a_cursbase] - local cc = n[a_curscurs] - local char = n.char - report_injections("font %s, char %U, glyph %c",n.font,char,char) - if kp then - local k = kerns[kp] - if k[3] then - report_injections(" pairkern: dir %a, x %p, y %p, w %p, h %p",dir(k[1]),k[2],k[3],k[4],k[5]) - else - report_injections(" kern: dir %a, dx %p",dir(k[1]),k[2]) - end - end - if mb then - report_injections(" markbase: bound %a",mb) - end - if mm then - local m = marks[mm] - if mb then - local m = m[mb] - if m then - report_injections(" markmark: bound %a, index %a, dx %p, dy %p",mm,md,m[1],m[2]) - else - report_injections(" markmark: bound %a, missing index",mm) - end - else - m = m[1] - report_injections(" markmark: bound %a, dx %p, dy %p",mm,m and m[1],m and m[2]) - end - end - if cb then - report_injections(" cursbase: bound %a",cb) - end - if cc then - local c = cursives[cc] - report_injections(" curscurs: bound %a, dir %a, dx %p, dy %p",cc,dir(c[1]),c[2],c[3]) - end - end - end - report_injections("end run") -end - --- todo: reuse tables (i.e. no collection), but will be extra fields anyway --- todo: check for attribute - --- We can have a fast test on a font being processed, so we can check faster for marks etc --- but I'll make a context variant anyway. - -local function show_result(head) - local current = head - local skipping = false - while current do - local id = current.id - if id == glyph_code then - report_injections("char: %C, width %p, xoffset %p, yoffset %p",current.char,current.width,current.xoffset,current.yoffset) - skipping = false - elseif id == kern_code then - report_injections("kern: %p",current.kern) - skipping = false - elseif not skipping then - report_injections() - skipping = true - end - current = current.next - end -end - -function injections.handler(head,where,keep) - local has_marks, has_cursives, has_kerns = next(marks), next(cursives), next(kerns) - if has_marks or has_cursives then - if trace_injections then - trace(head) - end - -- in the future variant we will not copy items but refs to tables - local done, ky, rl, valid, cx, wx, mk, nofvalid = false, { }, { }, { }, { }, { }, { }, 0 - if has_kerns then -- move outside loop - local nf, tm = nil, nil - for n in traverse_id(glyph_code,head) do -- only needed for relevant fonts - if n.subtype < 256 then - nofvalid = nofvalid + 1 - valid[nofvalid] = n - if n.font ~= nf then - nf = n.font - tm = fontdata[nf].resources.marks - end - if tm then - mk[n] = tm[n.char] - end - local k = n[a_kernpair] - if k then - local kk = kerns[k] - if kk then - local x, y, w, h = kk[2] or 0, kk[3] or 0, kk[4] or 0, kk[5] or 0 - local dy = y - h - if dy ~= 0 then - ky[n] = dy - end - if w ~= 0 or x ~= 0 then - wx[n] = kk - end - rl[n] = kk[1] -- could move in test - end - end - end - end - else - local nf, tm = nil, nil - for n in traverse_id(glyph_code,head) do - if n.subtype < 256 then - nofvalid = nofvalid + 1 - valid[nofvalid] = n - if n.font ~= nf then - nf = n.font - tm = fontdata[nf].resources.marks - end - if tm then - mk[n] = tm[n.char] - end - end - end - end - if nofvalid > 0 then - -- we can assume done == true because we have cursives and marks - local cx = { } - if has_kerns and next(ky) then - for n, k in next, ky do - n.yoffset = k - end - end - -- todo: reuse t and use maxt - if has_cursives then - local p_cursbase, p = nil, nil - -- since we need valid[n+1] we can also use a "while true do" - local t, d, maxt = { }, { }, 0 - for i=1,nofvalid do -- valid == glyphs - local n = valid[i] - if not mk[n] then - local n_cursbase = n[a_cursbase] - if p_cursbase then - local n_curscurs = n[a_curscurs] - if p_cursbase == n_curscurs then - local c = cursives[n_curscurs] - if c then - local rlmode, dx, dy, ws, wn = c[1], c[2], c[3], c[4], c[5] - if rlmode >= 0 then - dx = dx - ws - else - dx = dx + wn - end - if dx ~= 0 then - cx[n] = dx - rl[n] = rlmode - end - -- if rlmode and rlmode < 0 then - dy = -dy - -- end - maxt = maxt + 1 - t[maxt] = p - d[maxt] = dy - else - maxt = 0 - end - end - elseif maxt > 0 then - local ny = n.yoffset - for i=maxt,1,-1 do - ny = ny + d[i] - local ti = t[i] - ti.yoffset = ti.yoffset + ny - end - maxt = 0 - end - if not n_cursbase and maxt > 0 then - local ny = n.yoffset - for i=maxt,1,-1 do - ny = ny + d[i] - local ti = t[i] - ti.yoffset = ny - end - maxt = 0 - end - p_cursbase, p = n_cursbase, n - end - end - if maxt > 0 then - local ny = n.yoffset - for i=maxt,1,-1 do - ny = ny + d[i] - local ti = t[i] - ti.yoffset = ny - end - maxt = 0 - end - if not keep then - cursives = { } - end - end - if has_marks then - for i=1,nofvalid do - local p = valid[i] - local p_markbase = p[a_markbase] - if p_markbase then - local mrks = marks[p_markbase] - local nofmarks = #mrks - for n in traverse_id(glyph_code,p.next) do - local n_markmark = n[a_markmark] - if p_markbase == n_markmark then - local index = n[a_markdone] or 1 - local d = mrks[index] - if d then - local rlmode = d[3] - -- - local k = wx[p] - if k then - local x = k[2] - local w = k[4] - if w then - if rlmode and rlmode >= 0 then - -- kern(x) glyph(p) kern(w-x) mark(n) - n.xoffset = p.xoffset - p.width + d[1] - (w-x) - else - -- kern(w-x) glyph(p) kern(x) mark(n) - n.xoffset = p.xoffset - d[1] - x - end - else - if rlmode and rlmode >= 0 then - -- okay for husayni - n.xoffset = p.xoffset - p.width + d[1] - else - -- needs checking: is x ok here? - n.xoffset = p.xoffset - d[1] - x - end - end - else - if rlmode and rlmode >= 0 then - n.xoffset = p.xoffset - p.width + d[1] - else - n.xoffset = p.xoffset - d[1] - end - local w = n.width - if w ~= 0 then - insert_node_before(head,n,newkern(-w/2)) - insert_node_after(head,n,newkern(-w/2)) - end - end - -- -- - if mk[p] then - n.yoffset = p.yoffset + d[2] - else - n.yoffset = n.yoffset + p.yoffset + d[2] - end - -- - if nofmarks == 1 then - break - else - nofmarks = nofmarks - 1 - end - end - else - -- KE: there can be sequences in ligatures - end - end - end - end - if not keep then - marks = { } - end - end - -- todo : combine - if next(wx) then - for n, k in next, wx do - -- only w can be nil (kernclasses), can be sped up when w == nil - local x = k[2] - local w = k[4] - if w then - local rl = k[1] -- r2l = k[6] - local wx = w - x - if rl < 0 then -- KE: don't use r2l here - if wx ~= 0 then - insert_node_before(head,n,newkern(wx)) -- type 0/2 - end - if x ~= 0 then - insert_node_after (head,n,newkern(x)) -- type 0/2 - end - else - if x ~= 0 then - insert_node_before(head,n,newkern(x)) -- type 0/2 - end - if wx ~= 0 then - insert_node_after (head,n,newkern(wx)) -- type 0/2 - end - end - elseif x ~= 0 then - -- this needs checking for rl < 0 but it is unlikely that a r2l script - -- uses kernclasses between glyphs so we're probably safe (KE has a - -- problematic font where marks interfere with rl < 0 in the previous - -- case) - insert_node_before(head,n,newkern(x)) -- a real font kern, type 0 - end - end - end - if next(cx) then - for n, k in next, cx do - if k ~= 0 then - local rln = rl[n] - if rln and rln < 0 then - insert_node_before(head,n,newkern(-k)) -- type 0/2 - else - insert_node_before(head,n,newkern(k)) -- type 0/2 - end - end - end - end - if not keep then - kerns = { } - end - -- if trace_injections then - -- show_result(head) - -- end - return head, true - elseif not keep then - kerns, cursives, marks = { }, { }, { } - end - elseif has_kerns then - if trace_injections then - trace(head) - end - for n in traverse_id(glyph_code,head) do - if n.subtype < 256 then - local k = n[a_kernpair] - if k then - local kk = kerns[k] - if kk then - local rl, x, y, w = kk[1], kk[2] or 0, kk[3], kk[4] - if y and y ~= 0 then - n.yoffset = y -- todo: h ? - end - if w then - -- copied from above - -- local r2l = kk[6] - local wx = w - x - if rl < 0 then -- KE: don't use r2l here - if wx ~= 0 then - insert_node_before(head,n,newkern(wx)) - end - if x ~= 0 then - insert_node_after (head,n,newkern(x)) - end - else - if x ~= 0 then - insert_node_before(head,n,newkern(x)) - end - if wx ~= 0 then - insert_node_after(head,n,newkern(wx)) - end - end - else - -- simple (e.g. kernclass kerns) - if x ~= 0 then - insert_node_before(head,n,newkern(x)) - end - end - end - end - end - end - if not keep then - kerns = { } - end - -- if trace_injections then - -- show_result(head) - -- end - return head, true - else - -- no tracing needed - end - return head, false -end diff --git a/src/fontloader/fontloader-fonts-lua.lua b/src/fontloader/fontloader-fonts-lua.lua deleted file mode 100644 index ec3fe38..0000000 --- a/src/fontloader/fontloader-fonts-lua.lua +++ /dev/null @@ -1,33 +0,0 @@ -if not modules then modules = { } end modules ['luatex-fonts-lua'] = { - version = 1.001, - comment = "companion to luatex-*.tex", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end - -local fonts = fonts -fonts.formats.lua = "lua" - -function fonts.readers.lua(specification) - local fullname = specification.filename or "" - if fullname == "" then - local forced = specification.forced or "" - if forced ~= "" then - fullname = specification.name .. "." .. forced - else - fullname = specification.name - end - end - local fullname = resolvers.findfile(fullname) or "" - if fullname ~= "" then - local loader = loadfile(fullname) - loader = loader and loader() - return loader and loader(specification) - end -end diff --git a/src/fontloader/fontloader-fonts-otn.lua b/src/fontloader/fontloader-fonts-otn.lua deleted file mode 100644 index c57be5f..0000000 --- a/src/fontloader/fontloader-fonts-otn.lua +++ /dev/null @@ -1,2848 +0,0 @@ -if not modules then modules = { } end modules ['font-otn'] = { - version = 1.001, - comment = "companion to font-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files", -} - --- preprocessors = { "nodes" } - --- this is still somewhat preliminary and it will get better in due time; --- much functionality could only be implemented thanks to the husayni font --- of Idris Samawi Hamid to who we dedicate this module. - --- in retrospect it always looks easy but believe it or not, it took a lot --- of work to get proper open type support done: buggy fonts, fuzzy specs, --- special made testfonts, many skype sessions between taco, idris and me, --- torture tests etc etc ... unfortunately the code does not show how much --- time it took ... - --- todo: --- --- kerning is probably not yet ok for latin around dics nodes (interesting challenge) --- extension infrastructure (for usage out of context) --- sorting features according to vendors/renderers --- alternative loop quitters --- check cursive and r2l --- find out where ignore-mark-classes went --- default features (per language, script) --- handle positions (we need example fonts) --- handle gpos_single (we might want an extra width field in glyph nodes because adding kerns might interfere) --- mark (to mark) code is still not what it should be (too messy but we need some more extreem husayni tests) --- remove some optimizations (when I have a faster machine) --- --- maybe redo the lot some way (more context specific) - ---[[ldx-- -

This module is a bit more split up that I'd like but since we also want to test -with plain it has to be so. This module is part of -and discussion about improvements and functionality mostly happens on the - mailing list.

- -

The specification of OpenType is kind of vague. Apart from a lack of a proper -free specifications there's also the problem that Microsoft and Adobe -may have their own interpretation of how and in what order to apply features. -In general the Microsoft website has more detailed specifications and is a -better reference. There is also some information in the FontForge help files.

- -

Because there is so much possible, fonts might contain bugs and/or be made to -work with certain rederers. These may evolve over time which may have the side -effect that suddenly fonts behave differently.

- -

After a lot of experiments (mostly by Taco, me and Idris) we're now at yet another -implementation. Of course all errors are mine and of course the code can be -improved. There are quite some optimizations going on here and processing speed -is currently acceptable. Not all functions are implemented yet, often because I -lack the fonts for testing. Many scripts are not yet supported either, but I will -look into them as soon as users ask for it.

- -

Because there are different interpretations possible, I will extend the code -with more (configureable) variants. I can also add hooks for users so that they can -write their own extensions.

- -

Glyphs are indexed not by unicode but in their own way. This is because there is no -relationship with unicode at all, apart from the fact that a font might cover certain -ranges of characters. One character can have multiple shapes. However, at the - end we use unicode so and all extra glyphs are mapped into a private -space. This is needed because we need to access them and has to include -then in the output eventually.

- -

The raw table as it coms from gets reorganized in to fit out needs. -In that table is packed (similar tables are shared) and cached on disk -so that successive runs can use the optimized table (after loading the table is -unpacked). The flattening code used later is a prelude to an even more compact table -format (and as such it keeps evolving).

- -

This module is sparsely documented because it is a moving target. The table format -of the reader changes and we experiment a lot with different methods for supporting -features.

- -

As with the code, we may decide to store more information in the - table.

- -

Incrementing the version number will force a re-cache. We jump the number by one -when there's a fix in the library or code that -results in different tables.

---ldx]]-- - --- action handler chainproc chainmore comment --- --- gsub_single ok ok ok --- gsub_multiple ok ok not implemented yet --- gsub_alternate ok ok not implemented yet --- gsub_ligature ok ok ok --- gsub_context ok -- --- gsub_contextchain ok -- --- gsub_reversecontextchain ok -- --- chainsub -- ok --- reversesub -- ok --- gpos_mark2base ok ok --- gpos_mark2ligature ok ok --- gpos_mark2mark ok ok --- gpos_cursive ok untested --- gpos_single ok ok --- gpos_pair ok ok --- gpos_context ok -- --- gpos_contextchain ok -- --- --- todo: contextpos and contextsub and class stuff --- --- actions: --- --- handler : actions triggered by lookup --- chainproc : actions triggered by contextual lookup --- chainmore : multiple substitutions triggered by contextual lookup (e.g. fij -> f + ij) --- --- remark: the 'not implemented yet' variants will be done when we have fonts that use them --- remark: we need to check what to do with discretionaries - --- We used to have independent hashes for lookups but as the tags are unique --- we now use only one hash. If needed we can have multiple again but in that --- case I will probably prefix (i.e. rename) the lookups in the cached font file. - --- Todo: make plugin feature that operates on char/glyphnode arrays - -local concat, insert, remove = table.concat, table.insert, table.remove -local gmatch, gsub, find, match, lower, strip = string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip -local type, next, tonumber, tostring = type, next, tonumber, tostring -local lpegmatch = lpeg.match -local random = math.random -local formatters = string.formatters - -local logs, trackers, nodes, attributes = logs, trackers, nodes, attributes - -local registertracker = trackers.register - -local fonts = fonts -local otf = fonts.handlers.otf - -local trace_lookups = false registertracker("otf.lookups", function(v) trace_lookups = v end) -local trace_singles = false registertracker("otf.singles", function(v) trace_singles = v end) -local trace_multiples = false registertracker("otf.multiples", function(v) trace_multiples = v end) -local trace_alternatives = false registertracker("otf.alternatives", function(v) trace_alternatives = v end) -local trace_ligatures = false registertracker("otf.ligatures", function(v) trace_ligatures = v end) -local trace_contexts = false registertracker("otf.contexts", function(v) trace_contexts = v end) -local trace_marks = false registertracker("otf.marks", function(v) trace_marks = v end) -local trace_kerns = false registertracker("otf.kerns", function(v) trace_kerns = v end) -local trace_cursive = false registertracker("otf.cursive", function(v) trace_cursive = v end) -local trace_preparing = false registertracker("otf.preparing", function(v) trace_preparing = v end) -local trace_bugs = false registertracker("otf.bugs", function(v) trace_bugs = v end) -local trace_details = false registertracker("otf.details", function(v) trace_details = v end) -local trace_applied = false registertracker("otf.applied", function(v) trace_applied = v end) -local trace_steps = false registertracker("otf.steps", function(v) trace_steps = v end) -local trace_skips = false registertracker("otf.skips", function(v) trace_skips = v end) -local trace_directions = false registertracker("otf.directions", function(v) trace_directions = v end) - -local report_direct = logs.reporter("fonts","otf direct") -local report_subchain = logs.reporter("fonts","otf subchain") -local report_chain = logs.reporter("fonts","otf chain") -local report_process = logs.reporter("fonts","otf process") -local report_prepare = logs.reporter("fonts","otf prepare") -local report_warning = logs.reporter("fonts","otf warning") - -registertracker("otf.verbose_chain", function(v) otf.setcontextchain(v and "verbose") end) -registertracker("otf.normal_chain", function(v) otf.setcontextchain(v and "normal") end) - -registertracker("otf.replacements", "otf.singles,otf.multiples,otf.alternatives,otf.ligatures") -registertracker("otf.positions","otf.marks,otf.kerns,otf.cursive") -registertracker("otf.actions","otf.replacements,otf.positions") -registertracker("otf.injections","nodes.injections") - -registertracker("*otf.sample","otf.steps,otf.actions,otf.analyzing") - -local insert_node_after = node.insert_after -local delete_node = nodes.delete -local copy_node = node.copy -local find_node_tail = node.tail or node.slide -local flush_node_list = node.flush_list -local end_of_math = node.end_of_math - -local setmetatableindex = table.setmetatableindex - -local zwnj = 0x200C -local zwj = 0x200D -local wildcard = "*" -local default = "dflt" - -local nodecodes = nodes.nodecodes -local whatcodes = nodes.whatcodes -local glyphcodes = nodes.glyphcodes -local disccodes = nodes.disccodes - -local glyph_code = nodecodes.glyph -local glue_code = nodecodes.glue -local disc_code = nodecodes.disc -local whatsit_code = nodecodes.whatsit -local math_code = nodecodes.math - -local dir_code = whatcodes.dir -local localpar_code = whatcodes.localpar - -local discretionary_code = disccodes.discretionary - -local ligature_code = glyphcodes.ligature - -local privateattribute = attributes.private - --- Something is messed up: we have two mark / ligature indices, one at the injection --- end and one here ... this is bases in KE's patches but there is something fishy --- there as I'm pretty sure that for husayni we need some connection (as it's much --- more complex than an average font) but I need proper examples of all cases, not --- of only some. - -local a_state = privateattribute('state') -local a_markbase = privateattribute('markbase') -local a_markmark = privateattribute('markmark') -local a_markdone = privateattribute('markdone') -- assigned at the injection end -local a_cursbase = privateattribute('cursbase') -local a_curscurs = privateattribute('curscurs') -local a_cursdone = privateattribute('cursdone') -local a_kernpair = privateattribute('kernpair') -local a_ligacomp = privateattribute('ligacomp') -- assigned here (ideally it should be combined) - -local injections = nodes.injections -local setmark = injections.setmark -local setcursive = injections.setcursive -local setkern = injections.setkern -local setpair = injections.setpair - -local markonce = true -local cursonce = true -local kernonce = true - -local fonthashes = fonts.hashes -local fontdata = fonthashes.identifiers - -local otffeatures = fonts.constructors.newfeatures("otf") -local registerotffeature = otffeatures.register - -local onetimemessage = fonts.loggers.onetimemessage or function() end - -otf.defaultnodealternate = "none" -- first last - --- we share some vars here, after all, we have no nested lookups and less code - -local tfmdata = false -local characters = false -local descriptions = false -local resources = false -local marks = false -local currentfont = false -local lookuptable = false -local anchorlookups = false -local lookuptypes = false -local handlers = { } -local rlmode = 0 -local featurevalue = false - --- head is always a whatsit so we can safely assume that head is not changed - --- we use this for special testing and documentation - -local checkstep = (nodes and nodes.tracers and nodes.tracers.steppers.check) or function() end -local registerstep = (nodes and nodes.tracers and nodes.tracers.steppers.register) or function() end -local registermessage = (nodes and nodes.tracers and nodes.tracers.steppers.message) or function() end - -local function logprocess(...) - if trace_steps then - registermessage(...) - end - report_direct(...) -end - -local function logwarning(...) - report_direct(...) -end - -local f_unicode = formatters["%U"] -local f_uniname = formatters["%U (%s)"] -local f_unilist = formatters["% t (% t)"] - -local function gref(n) -- currently the same as in font-otb - if type(n) == "number" then - local description = descriptions[n] - local name = description and description.name - if name then - return f_uniname(n,name) - else - return f_unicode(n) - end - elseif n then - local num, nam = { }, { } - for i=1,#n do - local ni = n[i] - if tonumber(ni) then -- later we will start at 2 - local di = descriptions[ni] - num[i] = f_unicode(ni) - nam[i] = di and di.name or "-" - end - end - return f_unilist(num,nam) - else - return "" - end -end - -local function cref(kind,chainname,chainlookupname,lookupname,index) -- not in the mood to alias f_ - if index then - return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookupname,index) - elseif lookupname then - return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookupname) - elseif chainlookupname then - return formatters["feature %a, chain %a, sub %a"](kind,chainname,chainlookupname) - elseif chainname then - return formatters["feature %a, chain %a"](kind,chainname) - else - return formatters["feature %a"](kind) - end -end - -local function pref(kind,lookupname) - return formatters["feature %a, lookup %a"](kind,lookupname) -end - --- We can assume that languages that use marks are not hyphenated. We can also assume --- that at most one discretionary is present. - --- We do need components in funny kerning mode but maybe I can better reconstruct then --- as we do have the font components info available; removing components makes the --- previous code much simpler. Also, later on copying and freeing becomes easier. --- However, for arabic we need to keep them around for the sake of mark placement --- and indices. - -local function copy_glyph(g) -- next and prev are untouched ! - local components = g.components - if components then - g.components = nil - local n = copy_node(g) - g.components = components - return n - else - return copy_node(g) - end -end - --- start is a mark and we need to keep that one - -local function markstoligature(kind,lookupname,head,start,stop,char) - if start == stop and start.char == char then - return head, start - else - local prev = start.prev - local next = stop.next - start.prev = nil - stop.next = nil - local base = copy_glyph(start) - if head == start then - head = base - end - base.char = char - base.subtype = ligature_code - base.components = start - if prev then - prev.next = base - end - if next then - next.prev = base - end - base.next = next - base.prev = prev - return head, base - end -end - --- The next code is somewhat complicated by the fact that some fonts can have ligatures made --- from ligatures that themselves have marks. This was identified by Kai in for instance --- arabtype: KAF LAM SHADDA ALEF FATHA (0x0643 0x0644 0x0651 0x0627 0x064E). This becomes --- KAF LAM-ALEF with a SHADDA on the first and a FATHA op de second component. In a next --- iteration this becomes a KAF-LAM-ALEF with a SHADDA on the second and a FATHA on the --- third component. - -local function getcomponentindex(start) - if start.id ~= glyph_code then - return 0 - elseif start.subtype == ligature_code then - local i = 0 - local components = start.components - while components do - i = i + getcomponentindex(components) - components = components.next - end - return i - elseif not marks[start.char] then - return 1 - else - return 0 - end -end - --- eventually we will do positioning in an other way (needs addional w/h/d fields) - -local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound) -- brr head - if start == stop and start.char == char then - start.char = char - return head, start - end - local prev = start.prev - local next = stop.next - start.prev = nil - stop.next = nil - local base = copy_glyph(start) - if start == head then - head = base - end - base.char = char - base.subtype = ligature_code - base.components = start -- start can have components - if prev then - prev.next = base - end - if next then - next.prev = base - end - base.next = next - base.prev = prev - if not discfound then - local deletemarks = markflag ~= "mark" - local components = start - local baseindex = 0 - local componentindex = 0 - local head = base - local current = base - -- first we loop over the glyphs in start .. stop - while start do - local char = start.char - if not marks[char] then - baseindex = baseindex + componentindex - componentindex = getcomponentindex(start) - elseif not deletemarks then -- quite fishy - start[a_ligacomp] = baseindex + (start[a_ligacomp] or componentindex) - if trace_marks then - logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp]) - end - head, current = insert_node_after(head,current,copy_node(start)) -- unlikely that mark has components - elseif trace_marks then - logwarning("%s: delete mark %s",pref(kind,lookupname),gref(char)) - end - start = start.next - end - -- we can have one accent as part of a lookup and another following - -- local start = components -- was wrong (component scanning was introduced when more complex ligs in devanagari was added) - local start = current.next - while start and start.id == glyph_code do - local char = start.char - if marks[char] then - start[a_ligacomp] = baseindex + (start[a_ligacomp] or componentindex) - if trace_marks then - logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp]) - end - else - break - end - start = start.next - end - end - return head, base -end - -function handlers.gsub_single(head,start,kind,lookupname,replacement) - if trace_singles then - logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(start.char),gref(replacement)) - end - start.char = replacement - return head, start, true -end - -local function get_alternative_glyph(start,alternatives,value,trace_alternatives) - local n = #alternatives - if value == "random" then - local r = random(1,n) - return alternatives[r], trace_alternatives and formatters["value %a, taking %a"](value,r) - elseif value == "first" then - return alternatives[1], trace_alternatives and formatters["value %a, taking %a"](value,1) - elseif value == "last" then - return alternatives[n], trace_alternatives and formatters["value %a, taking %a"](value,n) - else - value = tonumber(value) - if type(value) ~= "number" then - return alternatives[1], trace_alternatives and formatters["invalid value %s, taking %a"](value,1) - elseif value > n then - local defaultalt = otf.defaultnodealternate - if defaultalt == "first" then - return alternatives[n], trace_alternatives and formatters["invalid value %s, taking %a"](value,1) - elseif defaultalt == "last" then - return alternatives[1], trace_alternatives and formatters["invalid value %s, taking %a"](value,n) - else - return false, trace_alternatives and formatters["invalid value %a, %s"](value,"out of range") - end - elseif value == 0 then - return start.char, trace_alternatives and formatters["invalid value %a, %s"](value,"no change") - elseif value < 1 then - return alternatives[1], trace_alternatives and formatters["invalid value %a, taking %a"](value,1) - else - return alternatives[value], trace_alternatives and formatters["value %a, taking %a"](value,value) - end - end -end - -local function multiple_glyphs(head,start,multiple,ignoremarks) - local nofmultiples = #multiple - if nofmultiples > 0 then - start.char = multiple[1] - if nofmultiples > 1 then - local sn = start.next - for k=2,nofmultiples do -- todo: use insert_node --- untested: --- --- while ignoremarks and marks[sn.char] then --- local sn = sn.next --- end - local n = copy_node(start) -- ignore components - n.char = multiple[k] - n.next = sn - n.prev = start - if sn then - sn.prev = n - end - start.next = n - start = n - end - end - return head, start, true - else - if trace_multiples then - logprocess("no multiple for %s",gref(start.char)) - end - return head, start, false - end -end - -function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence) - local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue - local choice, comment = get_alternative_glyph(start,alternative,value,trace_alternatives) - if choice then - if trace_alternatives then - logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(start.char),choice,gref(choice),comment) - end - start.char = choice - else - if trace_alternatives then - logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(start.char),comment) - end - end - return head, start, true -end - -function handlers.gsub_multiple(head,start,kind,lookupname,multiple,sequence) - if trace_multiples then - logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(start.char),gref(multiple)) - end - return multiple_glyphs(head,start,multiple,sequence.flags[1]) -end - -function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence) - local s, stop, discfound = start.next, nil, false - local startchar = start.char - if marks[startchar] then - while s do - local id = s.id - if id == glyph_code and s.font == currentfont and s.subtype<256 then - local lg = ligature[s.char] - if lg then - stop = s - ligature = lg - s = s.next - else - break - end - else - break - end - end - if stop then - local lig = ligature.ligature - if lig then - if trace_ligatures then - local stopchar = stop.char - head, start = markstoligature(kind,lookupname,head,start,stop,lig) - logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char)) - else - head, start = markstoligature(kind,lookupname,head,start,stop,lig) - end - return head, start, true - else - -- ok, goto next lookup - end - end - else - local skipmark = sequence.flags[1] - while s do - local id = s.id - if id == glyph_code and s.subtype<256 then - if s.font == currentfont then - local char = s.char - if skipmark and marks[char] then - s = s.next - else - local lg = ligature[char] - if lg then - stop = s - ligature = lg - s = s.next - else - break - end - end - else - break - end - elseif id == disc_code then - discfound = true - s = s.next - else - break - end - end - local lig = ligature.ligature - if lig then - if stop then - if trace_ligatures then - local stopchar = stop.char - head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound) - logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char)) - else - head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound) - end - return head, start, true - else - -- weird but happens (in some arabic font) - start.char = lig - if trace_ligatures then - logprocess("%s: replacing %s by (no real) ligature %s case 3",pref(kind,lookupname),gref(startchar),gref(lig)) - end - return head, start, true - end - else - -- weird but happens - end - end - return head, start, false -end - ---[[ldx-- -

We get hits on a mark, but we're not sure if the it has to be applied so -we need to explicitly test for basechar, baselig and basemark entries.

---ldx]]-- - -function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence) - local markchar = start.char - if marks[markchar] then - local base = start.prev -- [glyph] [start=mark] - if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then - local basechar = base.char - if marks[basechar] then - while true do - base = base.prev - if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then - basechar = base.char - if not marks[basechar] then - break - end - else - if trace_bugs then - logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar)) - end - return head, start, false - end - end - end - local baseanchors = descriptions[basechar] - if baseanchors then - baseanchors = baseanchors.anchors - end - if baseanchors then - local baseanchors = baseanchors['basechar'] - if baseanchors then - local al = anchorlookups[lookupname] - for anchor,ba in next, baseanchors do - if al[anchor] then - local ma = markanchors[anchor] - if ma then - local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) - if trace_marks then - logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)", - pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) - end - return head, start, true - end - end - end - if trace_bugs then - logwarning("%s, no matching anchors for mark %s and base %s",pref(kind,lookupname),gref(markchar),gref(basechar)) - end - end - elseif trace_bugs then - -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar)) - onetimemessage(currentfont,basechar,"no base anchors",report_fonts) - end - elseif trace_bugs then - logwarning("%s: prev node is no char",pref(kind,lookupname)) - end - elseif trace_bugs then - logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar)) - end - return head, start, false -end - -function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence) - -- check chainpos variant - local markchar = start.char - if marks[markchar] then - local base = start.prev -- [glyph] [optional marks] [start=mark] - if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then - local basechar = base.char - if marks[basechar] then - while true do - base = base.prev - if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then - basechar = base.char - if not marks[basechar] then - break - end - else - if trace_bugs then - logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar)) - end - return head, start, false - end - end - end - local index = start[a_ligacomp] - local baseanchors = descriptions[basechar] - if baseanchors then - baseanchors = baseanchors.anchors - if baseanchors then - local baseanchors = baseanchors['baselig'] - if baseanchors then - local al = anchorlookups[lookupname] - for anchor, ba in next, baseanchors do - if al[anchor] then - local ma = markanchors[anchor] - if ma then - ba = ba[index] - if ba then - local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) -- index - if trace_marks then - logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)", - pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy) - end - return head, start, true - else - if trace_bugs then - logwarning("%s: no matching anchors for mark %s and baselig %s with index %a",pref(kind,lookupname),gref(markchar),gref(basechar),index) - end - end - end - end - end - if trace_bugs then - logwarning("%s: no matching anchors for mark %s and baselig %s",pref(kind,lookupname),gref(markchar),gref(basechar)) - end - end - end - elseif trace_bugs then - -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar)) - onetimemessage(currentfont,basechar,"no base anchors",report_fonts) - end - elseif trace_bugs then - logwarning("%s: prev node is no char",pref(kind,lookupname)) - end - elseif trace_bugs then - logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar)) - end - return head, start, false -end - -function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence) - local markchar = start.char - if marks[markchar] then - local base = start.prev -- [glyph] [basemark] [start=mark] - local slc = start[a_ligacomp] - if slc then -- a rather messy loop ... needs checking with husayni - while base do - local blc = base[a_ligacomp] - if blc and blc ~= slc then - base = base.prev - else - break - end - end - end - if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then -- subtype test can go - local basechar = base.char - local baseanchors = descriptions[basechar] - if baseanchors then - baseanchors = baseanchors.anchors - if baseanchors then - baseanchors = baseanchors['basemark'] - if baseanchors then - local al = anchorlookups[lookupname] - for anchor,ba in next, baseanchors do - if al[anchor] then - local ma = markanchors[anchor] - if ma then - local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true) - if trace_marks then - logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)", - pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) - end - return head, start, true - end - end - end - if trace_bugs then - logwarning("%s: no matching anchors for mark %s and basemark %s",pref(kind,lookupname),gref(markchar),gref(basechar)) - end - end - end - elseif trace_bugs then - -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar)) - onetimemessage(currentfont,basechar,"no base anchors",report_fonts) - end - elseif trace_bugs then - logwarning("%s: prev node is no mark",pref(kind,lookupname)) - end - elseif trace_bugs then - logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar)) - end - return head, start, false -end - -function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence) -- to be checked - local alreadydone = cursonce and start[a_cursbase] - if not alreadydone then - local done = false - local startchar = start.char - if marks[startchar] then - if trace_cursive then - logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar)) - end - else - local nxt = start.next - while not done and nxt and nxt.id == glyph_code and nxt.font == currentfont and nxt.subtype<256 do - local nextchar = nxt.char - if marks[nextchar] then - -- should not happen (maybe warning) - nxt = nxt.next - else - local entryanchors = descriptions[nextchar] - if entryanchors then - entryanchors = entryanchors.anchors - if entryanchors then - entryanchors = entryanchors['centry'] - if entryanchors then - local al = anchorlookups[lookupname] - for anchor, entry in next, entryanchors do - if al[anchor] then - local exit = exitanchors[anchor] - if exit then - local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar]) - if trace_cursive then - logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode) - end - done = true - break - end - end - end - end - end - elseif trace_bugs then - -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar)) - onetimemessage(currentfont,startchar,"no entry anchors",report_fonts) - end - break - end - end - end - return head, start, done - else - if trace_cursive and trace_details then - logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone) - end - return head, start, false - end -end - -function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence) - local startchar = start.char - local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar]) - if trace_kerns then - logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h) - end - return head, start, false -end - -function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence) - -- todo: kerns in disc nodes: pre, post, replace -> loop over disc too - -- todo: kerns in components of ligatures - local snext = start.next - if not snext then - return head, start, false - else - local prev, done = start, false - local factor = tfmdata.parameters.factor - local lookuptype = lookuptypes[lookupname] - while snext and snext.id == glyph_code and snext.font == currentfont and snext.subtype<256 do - local nextchar = snext.char - local krn = kerns[nextchar] - if not krn and marks[nextchar] then - prev = snext - snext = snext.next - else - if not krn then - -- skip - elseif type(krn) == "table" then - if lookuptype == "pair" then -- probably not needed - local a, b = krn[2], krn[3] - if a and #a > 0 then - local startchar = start.char - local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar]) - if trace_kerns then - logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h) - end - end - if b and #b > 0 then - local startchar = start.char - local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar]) - if trace_kerns then - logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h) - end - end - else -- wrong ... position has different entries - report_process("%s: check this out (old kern stuff)",pref(kind,lookupname)) - -- local a, b = krn[2], krn[6] - -- if a and a ~= 0 then - -- local k = setkern(snext,factor,rlmode,a) - -- if trace_kerns then - -- logprocess("%s: inserting first kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar)) - -- end - -- end - -- if b and b ~= 0 then - -- logwarning("%s: ignoring second kern xoff %s",pref(kind,lookupname),b*factor) - -- end - end - done = true - elseif krn ~= 0 then - local k = setkern(snext,factor,rlmode,krn) - if trace_kerns then - logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar)) - end - done = true - end - break - end - end - return head, start, done - end -end - ---[[ldx-- -

I will implement multiple chain replacements once I run into a font that uses -it. It's not that complex to handle.

---ldx]]-- - -local chainmores = { } -local chainprocs = { } - -local function logprocess(...) - if trace_steps then - registermessage(...) - end - report_subchain(...) -end - -local logwarning = report_subchain - -local function logprocess(...) - if trace_steps then - registermessage(...) - end - report_chain(...) -end - -local logwarning = report_chain - --- We could share functions but that would lead to extra function calls with many --- arguments, redundant tests and confusing messages. - -function chainprocs.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname) - logwarning("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname)) - return head, start, false -end - -function chainmores.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname,n) - logprocess("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname)) - return head, start, false -end - --- The reversesub is a special case, which is why we need to store the replacements --- in a bit weird way. There is no lookup and the replacement comes from the lookup --- itself. It is meant mostly for dealing with Urdu. - -function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements) - local char = start.char - local replacement = replacements[char] - if replacement then - if trace_singles then - logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement)) - end - start.char = replacement - return head, start, true - else - return head, start, false - end -end - ---[[ldx-- -

This chain stuff is somewhat tricky since we can have a sequence of actions to be -applied: single, alternate, multiple or ligature where ligature can be an invalid -one in the sense that it will replace multiple by one but not neccessary one that -looks like the combination (i.e. it is the counterpart of multiple then). For -example, the following is valid:

- - -xxxabcdexxx [single a->A][multiple b->BCD][ligature cde->E] xxxABCDExxx - - -

Therefore we we don't really do the replacement here already unless we have the -single lookup case. The efficiency of the replacements can be improved by deleting -as less as needed but that would also make the code even more messy.

---ldx]]-- - --- local function delete_till_stop(head,start,stop,ignoremarks) -- keeps start --- local n = 1 --- if start == stop then --- -- done --- elseif ignoremarks then --- repeat -- start x x m x x stop => start m --- local next = start.next --- if not marks[next.char] then --- local components = next.components --- if components then -- probably not needed --- flush_node_list(components) --- end --- head = delete_node(head,next) --- end --- n = n + 1 --- until next == stop --- else -- start x x x stop => start --- repeat --- local next = start.next --- local components = next.components --- if components then -- probably not needed --- flush_node_list(components) --- end --- head = delete_node(head,next) --- n = n + 1 --- until next == stop --- end --- return head, n --- end - ---[[ldx-- -

Here we replace start by a single variant, First we delete the rest of the -match.

---ldx]]-- - -function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex) - -- todo: marks ? - local current = start - local subtables = currentlookup.subtables - if #subtables > 1 then - logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," ")) - end - while current do - if current.id == glyph_code then - local currentchar = current.char - local lookupname = subtables[1] -- only 1 - local replacement = lookuphash[lookupname] - if not replacement then - if trace_bugs then - logwarning("%s: no single hits",cref(kind,chainname,chainlookupname,lookupname,chainindex)) - end - else - replacement = replacement[currentchar] - if not replacement or replacement == "" then - if trace_bugs then - logwarning("%s: no single for %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar)) - end - else - if trace_singles then - logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement)) - end - current.char = replacement - end - end - return head, start, true - elseif current == stop then - break - else - current = current.next - end - end - return head, start, false -end - -chainmores.gsub_single = chainprocs.gsub_single - ---[[ldx-- -

Here we replace start by a sequence of new glyphs. First we delete the rest of -the match.

---ldx]]-- - -function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - -- local head, n = delete_till_stop(head,start,stop) - local startchar = start.char - local subtables = currentlookup.subtables - local lookupname = subtables[1] - local replacements = lookuphash[lookupname] - if not replacements then - if trace_bugs then - logwarning("%s: no multiple hits",cref(kind,chainname,chainlookupname,lookupname)) - end - else - replacements = replacements[startchar] - if not replacements or replacement == "" then - if trace_bugs then - logwarning("%s: no multiple for %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar)) - end - else - if trace_multiples then - logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements)) - end - return multiple_glyphs(head,start,replacements,currentlookup.flags[1]) - end - end - return head, start, false -end - -chainmores.gsub_multiple = chainprocs.gsub_multiple - ---[[ldx-- -

Here we replace start by new glyph. First we delete the rest of the match.

---ldx]]-- - --- char_1 mark_1 -> char_x mark_1 (ignore marks) --- char_1 mark_1 -> char_x - --- to be checked: do we always have just one glyph? --- we can also have alternates for marks --- marks come last anyway --- are there cases where we need to delete the mark - -function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - local current = start - local subtables = currentlookup.subtables - local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue - while current do - if current.id == glyph_code then -- is this check needed? - local currentchar = current.char - local lookupname = subtables[1] - local alternatives = lookuphash[lookupname] - if not alternatives then - if trace_bugs then - logwarning("%s: no alternative hit",cref(kind,chainname,chainlookupname,lookupname)) - end - else - alternatives = alternatives[currentchar] - if alternatives then - local choice, comment = get_alternative_glyph(current,alternatives,value,trace_alternatives) - if choice then - if trace_alternatives then - logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment) - end - start.char = choice - else - if trace_alternatives then - logwarning("%s: no variant %a for %s, %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(char),comment) - end - end - elseif trace_bugs then - logwarning("%s: no alternative for %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(currentchar),comment) - end - end - return head, start, true - elseif current == stop then - break - else - current = current.next - end - end - return head, start, false -end - -chainmores.gsub_alternate = chainprocs.gsub_alternate - ---[[ldx-- -

When we replace ligatures we use a helper that handles the marks. I might change -this function (move code inline and handle the marks by a separate function). We -assume rather stupid ligatures (no complex disc nodes).

---ldx]]-- - -function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex) - local startchar = start.char - local subtables = currentlookup.subtables - local lookupname = subtables[1] - local ligatures = lookuphash[lookupname] - if not ligatures then - if trace_bugs then - logwarning("%s: no ligature hits",cref(kind,chainname,chainlookupname,lookupname,chainindex)) - end - else - ligatures = ligatures[startchar] - if not ligatures then - if trace_bugs then - logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar)) - end - else - local s = start.next - local discfound = false - local last = stop - local nofreplacements = 0 - local skipmark = currentlookup.flags[1] - while s do - local id = s.id - if id == disc_code then - s = s.next - discfound = true - else - local schar = s.char - if skipmark and marks[schar] then -- marks - s = s.next - else - local lg = ligatures[schar] - if lg then - ligatures, last, nofreplacements = lg, s, nofreplacements + 1 - if s == stop then - break - else - s = s.next - end - else - break - end - end - end - end - local l2 = ligatures.ligature - if l2 then - if chainindex then - stop = last - end - if trace_ligatures then - if start == stop then - logprocess("%s: replacing character %s by ligature %s case 3",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2)) - else - logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char),gref(l2)) - end - end - head, start = toligature(kind,lookupname,head,start,stop,l2,currentlookup.flags[1],discfound) - return head, start, true, nofreplacements - elseif trace_bugs then - if start == stop then - logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar)) - else - logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char)) - end - end - end - end - return head, start, false, 0 -end - -chainmores.gsub_ligature = chainprocs.gsub_ligature - -function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - local markchar = start.char - if marks[markchar] then - local subtables = currentlookup.subtables - local lookupname = subtables[1] - local markanchors = lookuphash[lookupname] - if markanchors then - markanchors = markanchors[markchar] - end - if markanchors then - local base = start.prev -- [glyph] [start=mark] - if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then - local basechar = base.char - if marks[basechar] then - while true do - base = base.prev - if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then - basechar = base.char - if not marks[basechar] then - break - end - else - if trace_bugs then - logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar)) - end - return head, start, false - end - end - end - local baseanchors = descriptions[basechar].anchors - if baseanchors then - local baseanchors = baseanchors['basechar'] - if baseanchors then - local al = anchorlookups[lookupname] - for anchor,ba in next, baseanchors do - if al[anchor] then - local ma = markanchors[anchor] - if ma then - local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) - if trace_marks then - logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)", - cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) - end - return head, start, true - end - end - end - if trace_bugs then - logwarning("%s, no matching anchors for mark %s and base %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar)) - end - end - end - elseif trace_bugs then - logwarning("%s: prev node is no char",cref(kind,chainname,chainlookupname,lookupname)) - end - elseif trace_bugs then - logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar)) - end - elseif trace_bugs then - logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar)) - end - return head, start, false -end - -function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - local markchar = start.char - if marks[markchar] then - local subtables = currentlookup.subtables - local lookupname = subtables[1] - local markanchors = lookuphash[lookupname] - if markanchors then - markanchors = markanchors[markchar] - end - if markanchors then - local base = start.prev -- [glyph] [optional marks] [start=mark] - if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then - local basechar = base.char - if marks[basechar] then - while true do - base = base.prev - if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then - basechar = base.char - if not marks[basechar] then - break - end - else - if trace_bugs then - logwarning("%s: no base for mark %s",cref(kind,chainname,chainlookupname,lookupname),markchar) - end - return head, start, false - end - end - end - -- todo: like marks a ligatures hash - local index = start[a_ligacomp] - local baseanchors = descriptions[basechar].anchors - if baseanchors then - local baseanchors = baseanchors['baselig'] - if baseanchors then - local al = anchorlookups[lookupname] - for anchor,ba in next, baseanchors do - if al[anchor] then - local ma = markanchors[anchor] - if ma then - ba = ba[index] - if ba then - local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) -- index - if trace_marks then - logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)", - cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy) - end - return head, start, true - end - end - end - end - if trace_bugs then - logwarning("%s: no matching anchors for mark %s and baselig %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar)) - end - end - end - elseif trace_bugs then - logwarning("feature %s, lookup %s: prev node is no char",kind,lookupname) - end - elseif trace_bugs then - logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar)) - end - elseif trace_bugs then - logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar)) - end - return head, start, false -end - -function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - local markchar = start.char - if marks[markchar] then - -- local alreadydone = markonce and start[a_markmark] - -- if not alreadydone then - -- local markanchors = descriptions[markchar].anchors markanchors = markanchors and markanchors.mark - local subtables = currentlookup.subtables - local lookupname = subtables[1] - local markanchors = lookuphash[lookupname] - if markanchors then - markanchors = markanchors[markchar] - end - if markanchors then - local base = start.prev -- [glyph] [basemark] [start=mark] - local slc = start[a_ligacomp] - if slc then -- a rather messy loop ... needs checking with husayni - while base do - local blc = base[a_ligacomp] - if blc and blc ~= slc then - base = base.prev - else - break - end - end - end - if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then -- subtype test can go - local basechar = base.char - local baseanchors = descriptions[basechar].anchors - if baseanchors then - baseanchors = baseanchors['basemark'] - if baseanchors then - local al = anchorlookups[lookupname] - for anchor,ba in next, baseanchors do - if al[anchor] then - local ma = markanchors[anchor] - if ma then - local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true) - if trace_marks then - logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)", - cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) - end - return head, start, true - end - end - end - if trace_bugs then - logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar)) - end - end - end - elseif trace_bugs then - logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname)) - end - elseif trace_bugs then - logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar)) - end - -- elseif trace_marks and trace_details then - -- logprocess("%s, mark %s is already bound (n=%s), ignoring mark2mark",pref(kind,lookupname),gref(markchar),alreadydone) - -- end - elseif trace_bugs then - logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar)) - end - return head, start, false -end - -function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) - local alreadydone = cursonce and start[a_cursbase] - if not alreadydone then - local startchar = start.char - local subtables = currentlookup.subtables - local lookupname = subtables[1] - local exitanchors = lookuphash[lookupname] - if exitanchors then - exitanchors = exitanchors[startchar] - end - if exitanchors then - local done = false - if marks[startchar] then - if trace_cursive then - logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar)) - end - else - local nxt = start.next - while not done and nxt and nxt.id == glyph_code and nxt.font == currentfont and nxt.subtype<256 do - local nextchar = nxt.char - if marks[nextchar] then - -- should not happen (maybe warning) - nxt = nxt.next - else - local entryanchors = descriptions[nextchar] - if entryanchors then - entryanchors = entryanchors.anchors - if entryanchors then - entryanchors = entryanchors['centry'] - if entryanchors then - local al = anchorlookups[lookupname] - for anchor, entry in next, entryanchors do - if al[anchor] then - local exit = exitanchors[anchor] - if exit then - local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar]) - if trace_cursive then - logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode) - end - done = true - break - end - end - end - end - end - elseif trace_bugs then - -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar)) - onetimemessage(currentfont,startchar,"no entry anchors",report_fonts) - end - break - end - end - end - return head, start, done - else - if trace_cursive and trace_details then - logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone) - end - return head, start, false - end - end - return head, start, false -end - -function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence) - -- untested .. needs checking for the new model - local startchar = start.char - local subtables = currentlookup.subtables - local lookupname = subtables[1] - local kerns = lookuphash[lookupname] - if kerns then - kerns = kerns[startchar] -- needed ? - if kerns then - local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar]) - if trace_kerns then - logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h) - end - end - end - return head, start, false -end - -chainmores.gpos_single = chainprocs.gpos_single -- okay? - --- when machines become faster i will make a shared function - -function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence) - local snext = start.next - if snext then - local startchar = start.char - local subtables = currentlookup.subtables - local lookupname = subtables[1] - local kerns = lookuphash[lookupname] - if kerns then - kerns = kerns[startchar] - if kerns then - local lookuptype = lookuptypes[lookupname] - local prev, done = start, false - local factor = tfmdata.parameters.factor - while snext and snext.id == glyph_code and snext.font == currentfont and snext.subtype<256 do - local nextchar = snext.char - local krn = kerns[nextchar] - if not krn and marks[nextchar] then - prev = snext - snext = snext.next - else - if not krn then - -- skip - elseif type(krn) == "table" then - if lookuptype == "pair" then - local a, b = krn[2], krn[3] - if a and #a > 0 then - local startchar = start.char - local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar]) - if trace_kerns then - logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h) - end - end - if b and #b > 0 then - local startchar = start.char - local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar]) - if trace_kerns then - logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h) - end - end - else - report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname)) - local a, b = krn[2], krn[6] - if a and a ~= 0 then - local k = setkern(snext,factor,rlmode,a) - if trace_kerns then - logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar)) - end - end - if b and b ~= 0 then - logwarning("%s: ignoring second kern xoff %s",cref(kind,chainname,chainlookupname),b*factor) - end - end - done = true - elseif krn ~= 0 then - local k = setkern(snext,factor,rlmode,krn) - if trace_kerns then - logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar)) - end - done = true - end - break - end - end - return head, start, done - end - end - end - return head, start, false -end - -chainmores.gpos_pair = chainprocs.gpos_pair -- okay? - --- what pointer to return, spec says stop --- to be discussed ... is bidi changer a space? --- elseif char == zwnj and sequence[n][32] then -- brrr - --- somehow l or f is global --- we don't need to pass the currentcontext, saves a bit --- make a slow variant then can be activated but with more tracing - -local function show_skip(kind,chainname,char,ck,class) - if ck[9] then - logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a, %a => %a",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10]) - else - logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a",cref(kind,chainname),gref(char),class,ck[1],ck[2]) - end -end - -local function normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash) - -- local rule, lookuptype, sequence, f, l, lookups = ck[1], ck[2] ,ck[3], ck[4], ck[5], ck[6] - local flags = sequence.flags - local done = false - local skipmark = flags[1] - local skipligature = flags[2] - local skipbase = flags[3] - local someskip = skipmark or skipligature or skipbase -- could be stored in flags for a fast test (hm, flags could be false !) - local markclass = sequence.markclass -- todo, first we need a proper test - local skipped = false - for k=1,#contexts do - local match = true - local current = start - local last = start - local ck = contexts[k] - local seq = ck[3] - local s = #seq - -- f..l = mid string - if s == 1 then - -- never happens - match = current.id == glyph_code and current.font == currentfont and current.subtype<256 and seq[1][current.char] - else - -- maybe we need a better space check (maybe check for glue or category or combination) - -- we cannot optimize for n=2 because there can be disc nodes - local f, l = ck[4], ck[5] - -- current match - if f == 1 and f == l then -- current only - -- already a hit - -- match = true - else -- before/current/after | before/current | current/after - -- no need to test first hit (to be optimized) - if f == l then -- new, else last out of sync (f is > 1) - -- match = true - else - local n = f + 1 - last = last.next - while n <= l do - if last then - local id = last.id - if id == glyph_code then - if last.font == currentfont and last.subtype<256 then - local char = last.char - local ccd = descriptions[char] - if ccd then - local class = ccd.class - if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then - skipped = true - if trace_skips then - show_skip(kind,chainname,char,ck,class) - end - last = last.next - elseif seq[n][char] then - if n < l then - last = last.next - end - n = n + 1 - else - match = false - break - end - else - match = false - break - end - else - match = false - break - end - elseif id == disc_code then - last = last.next - else - match = false - break - end - else - match = false - break - end - end - end - end - -- before - if match and f > 1 then - local prev = start.prev - if prev then - local n = f-1 - while n >= 1 do - if prev then - local id = prev.id - if id == glyph_code then - if prev.font == currentfont and prev.subtype<256 then -- normal char - local char = prev.char - local ccd = descriptions[char] - if ccd then - local class = ccd.class - if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then - skipped = true - if trace_skips then - show_skip(kind,chainname,char,ck,class) - end - elseif seq[n][char] then - n = n -1 - else - match = false - break - end - else - match = false - break - end - else - match = false - break - end - elseif id == disc_code then - -- skip 'm - elseif seq[n][32] then - n = n -1 - else - match = false - break - end - prev = prev.prev - elseif seq[n][32] then -- somewhat special, as zapfino can have many preceding spaces - n = n -1 - else - match = false - break - end - end - elseif f == 2 then - match = seq[1][32] - else - for n=f-1,1 do - if not seq[n][32] then - match = false - break - end - end - end - end - -- after - if match and s > l then - local current = last and last.next - if current then - -- removed optimization for s-l == 1, we have to deal with marks anyway - local n = l + 1 - while n <= s do - if current then - local id = current.id - if id == glyph_code then - if current.font == currentfont and current.subtype<256 then -- normal char - local char = current.char - local ccd = descriptions[char] - if ccd then - local class = ccd.class - if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then - skipped = true - if trace_skips then - show_skip(kind,chainname,char,ck,class) - end - elseif seq[n][char] then - n = n + 1 - else - match = false - break - end - else - match = false - break - end - else - match = false - break - end - elseif id == disc_code then - -- skip 'm - elseif seq[n][32] then -- brrr - n = n + 1 - else - match = false - break - end - current = current.next - elseif seq[n][32] then - n = n + 1 - else - match = false - break - end - end - elseif s-l == 1 then - match = seq[s][32] - else - for n=l+1,s do - if not seq[n][32] then - match = false - break - end - end - end - end - end - if match then - -- ck == currentcontext - if trace_contexts then - local rule, lookuptype, f, l = ck[1], ck[2], ck[4], ck[5] - local char = start.char - if ck[9] then - logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a, %a => %a", - cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10]) - else - logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a", - cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype) - end - end - local chainlookups = ck[6] - if chainlookups then - local nofchainlookups = #chainlookups - -- we can speed this up if needed - if nofchainlookups == 1 then - local chainlookupname = chainlookups[1] - local chainlookup = lookuptable[chainlookupname] - if chainlookup then - local cp = chainprocs[chainlookup.type] - if cp then - local ok - head, start, ok = cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence) - if ok then - done = true - end - else - logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type) - end - else -- shouldn't happen - logprocess("%s is not yet supported",cref(kind,chainname,chainlookupname)) - end - else - local i = 1 - repeat - if skipped then - while true do - local char = start.char - local ccd = descriptions[char] - if ccd then - local class = ccd.class - if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then - start = start.next - else - break - end - else - break - end - end - end - local chainlookupname = chainlookups[i] - local chainlookup = lookuptable[chainlookupname] - if not chainlookup then - -- okay, n matches, < n replacements - i = i + 1 - else - local cp = chainmores[chainlookup.type] - if not cp then - -- actually an error - logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type) - i = i + 1 - else - local ok, n - head, start, ok, n = cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence) - -- messy since last can be changed ! - if ok then - done = true - -- skip next one(s) if ligature - i = i + (n or 1) - else - i = i + 1 - end - end - end - if start then - start = start.next - else - -- weird - end - until i > nofchainlookups - end - else - local replacements = ck[7] - if replacements then - head, start, done = chainprocs.reversesub(head,start,last,kind,chainname,ck,lookuphash,replacements) -- sequence - else - done = true -- can be meant to be skipped - if trace_contexts then - logprocess("%s: skipping match",cref(kind,chainname)) - end - end - end - end - end - return head, start, done -end - --- Because we want to keep this elsewhere (an because speed is less an issue) we --- pass the font id so that the verbose variant can access the relevant helper tables. - -local verbose_handle_contextchain = function(font,...) - logwarning("no verbose handler installed, reverting to 'normal'") - otf.setcontextchain() - return normal_handle_contextchain(...) -end - -otf.chainhandlers = { - normal = normal_handle_contextchain, - verbose = verbose_handle_contextchain, -} - -function otf.setcontextchain(method) - if not method or method == "normal" or not otf.chainhandlers[method] then - if handlers.contextchain then -- no need for a message while making the format - logwarning("installing normal contextchain handler") - end - handlers.contextchain = normal_handle_contextchain - else - logwarning("installing contextchain handler %a",method) - local handler = otf.chainhandlers[method] - handlers.contextchain = function(...) - return handler(currentfont,...) -- hm, get rid of ... - end - end - handlers.gsub_context = handlers.contextchain - handlers.gsub_contextchain = handlers.contextchain - handlers.gsub_reversecontextchain = handlers.contextchain - handlers.gpos_contextchain = handlers.contextchain - handlers.gpos_context = handlers.contextchain -end - -otf.setcontextchain() - -local missing = { } -- we only report once - -local function logprocess(...) - if trace_steps then - registermessage(...) - end - report_process(...) -end - -local logwarning = report_process - -local function report_missing_cache(typ,lookup) - local f = missing[currentfont] if not f then f = { } missing[currentfont] = f end - local t = f[typ] if not t then t = { } f[typ] = t end - if not t[lookup] then - t[lookup] = true - logwarning("missing cache for lookup %a, type %a, font %a, name %a",lookup,typ,currentfont,tfmdata.properties.fullname) - end -end - -local resolved = { } -- we only resolve a font,script,language pair once - --- todo: pass all these 'locals' in a table - -local lookuphashes = { } - -setmetatableindex(lookuphashes, function(t,font) - local lookuphash = fontdata[font].resources.lookuphash - if not lookuphash or not next(lookuphash) then - lookuphash = false - end - t[font] = lookuphash - return lookuphash -end) - --- fonts.hashes.lookups = lookuphashes - -local autofeatures = fonts.analyzers.features -- was: constants - -local function initialize(sequence,script,language,enabled) - local features = sequence.features - if features then - for kind, scripts in next, features do - local valid = enabled[kind] - if valid then - local languages = scripts[script] or scripts[wildcard] - if languages and (languages[language] or languages[wildcard]) then - return { valid, autofeatures[kind] or false, sequence.chain or 0, kind, sequence } - end - end - end - end - return false -end - -function otf.dataset(tfmdata,font) -- generic variant, overloaded in context - local shared = tfmdata.shared - local properties = tfmdata.properties - local language = properties.language or "dflt" - local script = properties.script or "dflt" - local enabled = shared.features - local res = resolved[font] - if not res then - res = { } - resolved[font] = res - end - local rs = res[script] - if not rs then - rs = { } - res[script] = rs - end - local rl = rs[language] - if not rl then - rl = { - -- indexed but we can also add specific data by key - } - rs[language] = rl - local sequences = tfmdata.resources.sequences --- setmetatableindex(rl, function(t,k) --- if type(k) == "number" then --- local v = enabled and initialize(sequences[k],script,language,enabled) --- t[k] = v --- return v --- end --- end) -for s=1,#sequences do - local v = enabled and initialize(sequences[s],script,language,enabled) - if v then - rl[#rl+1] = v - end -end - end - return rl -end - --- elseif id == glue_code then --- if p[5] then -- chain --- local pc = pp[32] --- if pc then --- start, ok = start, false -- p[1](start,kind,p[2],pc,p[3],p[4]) --- if ok then --- done = true --- end --- if start then start = start.next end --- else --- start = start.next --- end --- else --- start = start.next --- end - --- there will be a new direction parser (pre-parsed etc) - --- less bytecode: 290 -> 254 --- --- attr = attr or false --- --- local a = getattr(start,0) --- if (a == attr and (not attribute or getattr(start,a_state) == attribute)) or (not attribute or getattr(start,a_state) == attribute) then --- -- the action --- end - -local function featuresprocessor(head,font,attr) - - local lookuphash = lookuphashes[font] -- we can also check sequences here - - if not lookuphash then - return head, false - end - - if trace_steps then - checkstep(head) - end - - tfmdata = fontdata[font] - descriptions = tfmdata.descriptions - characters = tfmdata.characters - resources = tfmdata.resources - - marks = resources.marks - anchorlookups = resources.lookup_to_anchor - lookuptable = resources.lookups - lookuptypes = resources.lookuptypes - - currentfont = font - rlmode = 0 - - local sequences = resources.sequences - local done = false - local datasets = otf.dataset(tfmdata,font,attr) - - local dirstack = { } -- could move outside function - - -- We could work on sub start-stop ranges instead but I wonder if there is that - -- much speed gain (experiments showed that it made not much sense) and we need - -- to keep track of directions anyway. Also at some point I want to play with - -- font interactions and then we do need the full sweeps. - - -- Keeping track of the headnode is needed for devanagari (I generalized it a bit - -- so that multiple cases are also covered.) - - for s=1,#datasets do - local dataset = datasets[s] - featurevalue = dataset[1] -- todo: pass to function instead of using a global - - local sequence = dataset[5] -- sequences[s] -- also dataset[5] - local rlparmode = 0 - local topstack = 0 - local success = false - local attribute = dataset[2] - local chain = dataset[3] -- sequence.chain or 0 - local typ = sequence.type - local subtables = sequence.subtables - if chain < 0 then - -- this is a limited case, no special treatments like 'init' etc - local handler = handlers[typ] - -- we need to get rid of this slide! probably no longer needed in latest luatex - local start = find_node_tail(head) -- slow (we can store tail because there's always a skip at the end): todo - while start do - local id = start.id - if id == glyph_code then - if start.font == font and start.subtype<256 then - local a = start[0] - if a then - a = a == attr - else - a = true - end - if a then - for i=1,#subtables do - local lookupname = subtables[i] - local lookupcache = lookuphash[lookupname] - if lookupcache then - local lookupmatch = lookupcache[start.char] - if lookupmatch then - head, start, success = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) - if success then - break - end - end - else - report_missing_cache(typ,lookupname) - end - end - if start then start = start.prev end - else - start = start.prev - end - else - start = start.prev - end - else - start = start.prev - end - end - else - local handler = handlers[typ] - local ns = #subtables - local start = head -- local ? - rlmode = 0 -- to be checked ? - if ns == 1 then -- happens often - local lookupname = subtables[1] - local lookupcache = lookuphash[lookupname] - if not lookupcache then -- also check for empty cache - report_missing_cache(typ,lookupname) - else - - local function subrun(start) - -- mostly for gsub, gpos would demand a more clever approach - local head = start - local done = false - while start do - local id = start.id - if id == glyph_code and start.font == font and start.subtype <256 then - local a = start[0] - if a then - a = (a == attr) and (not attribute or start[a_state] == attribute) - else - a = not attribute or start[a_state] == attribute - end - if a then - local lookupmatch = lookupcache[start.char] - if lookupmatch then - -- sequence kan weg - local ok - head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) - if ok then - done = true - end - end - if start then start = start.next end - else - start = start.next - end - else - start = start.next - end - end - if done then - success = true - return head - end - end - - local function kerndisc(disc) -- we can assume that prev and next are glyphs - local prev = disc.prev - local next = disc.next - if prev and next then - prev.next = next - -- next.prev = prev - local a = prev[0] - if a then - a = (a == attr) and (not attribute or prev[a_state] == attribute) - else - a = not attribute or prev[a_state] == attribute - end - if a then - local lookupmatch = lookupcache[prev.char] - if lookupmatch then - -- sequence kan weg - local h, d, ok = handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) - if ok then - done = true - success = true - end - end - end - prev.next = disc - -- next.prev = disc - end - return next - end - - while start do - local id = start.id - if id == glyph_code then - if start.font == font and start.subtype<256 then - local a = start[0] - if a then - a = (a == attr) and (not attribute or start[a_state] == attribute) - else - a = not attribute or start[a_state] == attribute - end - if a then - local lookupmatch = lookupcache[start.char] - if lookupmatch then - -- sequence kan weg - local ok - head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) - if ok then - success = true - end - end - if start then start = start.next end - else - start = start.next - end - else - start = start.next - end - elseif id == disc_code then - -- mostly for gsub - if start.subtype == discretionary_code then - local pre = start.pre - if pre then - local new = subrun(pre) - if new then start.pre = new end - end - local post = start.post - if post then - local new = subrun(post) - if new then start.post = new end - end - local replace = start.replace - if replace then - local new = subrun(replace) - if new then start.replace = new end - end -elseif typ == "gpos_single" or typ == "gpos_pair" then - kerndisc(start) - end - start = start.next - elseif id == whatsit_code then -- will be function - local subtype = start.subtype - if subtype == dir_code then - local dir = start.dir - if dir == "+TRT" or dir == "+TLT" then - topstack = topstack + 1 - dirstack[topstack] = dir - elseif dir == "-TRT" or dir == "-TLT" then - topstack = topstack - 1 - end - local newdir = dirstack[topstack] - if newdir == "+TRT" then - rlmode = -1 - elseif newdir == "+TLT" then - rlmode = 1 - else - rlmode = rlparmode - end - if trace_directions then - report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir) - end - elseif subtype == localpar_code then - local dir = start.dir - if dir == "TRT" then - rlparmode = -1 - elseif dir == "TLT" then - rlparmode = 1 - else - rlparmode = 0 - end - -- one might wonder if the par dir should be looked at, so we might as well drop the next line - rlmode = rlparmode - if trace_directions then - report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode) - end - end - start = start.next - elseif id == math_code then - start = end_of_math(start).next - else - start = start.next - end - end - end - else - - local function subrun(start) - -- mostly for gsub, gpos would demand a more clever approach - local head = start - local done = false - while start do - local id = start.id - if id == glyph_code and start.id == font and start.subtype <256 then - local a = start[0] - if a then - a = (a == attr) and (not attribute or start[a_state] == attribute) - else - a = not attribute or start[a_state] == attribute - end - if a then - for i=1,ns do - local lookupname = subtables[i] - local lookupcache = lookuphash[lookupname] - if lookupcache then - local lookupmatch = lookupcache[start.char] - if lookupmatch then - -- we could move all code inline but that makes things even more unreadable - local ok - head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) - if ok then - done = true - break - elseif not start then - -- don't ask why ... shouldn't happen - break - end - end - else - report_missing_cache(typ,lookupname) - end - end - if start then start = start.next end - else - start = start.next - end - else - start = start.next - end - end - if done then - success = true - return head - end - end - - local function kerndisc(disc) -- we can assume that prev and next are glyphs - local prev = disc.prev - local next = disc.next - if prev and next then - prev.next = next - -- next.prev = prev - local a = prev[0] - if a then - a = (a == attr) and (not attribute or prev[a_state] == attribute) - else - a = not attribute or prev[a_state] == attribute - end - if a then - for i=1,ns do - local lookupname = subtables[i] - local lookupcache = lookuphash[lookupname] - if lookupcache then - local lookupmatch = lookupcache[prev.char] - if lookupmatch then - -- we could move all code inline but that makes things even more unreadable - local h, d, ok = handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) - if ok then - done = true - break - end - end - else - report_missing_cache(typ,lookupname) - end - end - end - prev.next = disc - -- next.prev = disc - end - return next - end - - while start do - local id = start.id - if id == glyph_code then - if start.font == font and start.subtype<256 then - local a = start[0] - if a then - a = (a == attr) and (not attribute or start[a_state] == attribute) - else - a = not attribute or start[a_state] == attribute - end - if a then - for i=1,ns do - local lookupname = subtables[i] - local lookupcache = lookuphash[lookupname] - if lookupcache then - local lookupmatch = lookupcache[start.char] - if lookupmatch then - -- we could move all code inline but that makes things even more unreadable - local ok - head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) - if ok then - success = true - break - elseif not start then - -- don't ask why ... shouldn't happen - break - end - end - else - report_missing_cache(typ,lookupname) - end - end - if start then start = start.next end - else - start = start.next - end - else - start = start.next - end - elseif id == disc_code then - -- mostly for gsub - if start.subtype == discretionary_code then - local pre = start.pre - if pre then - local new = subrun(pre) - if new then start.pre = new end - end - local post = start.post - if post then - local new = subrun(post) - if new then start.post = new end - end - local replace = start.replace - if replace then - local new = subrun(replace) - if new then start.replace = new end - end -elseif typ == "gpos_single" or typ == "gpos_pair" then - kerndisc(start) - end - start = start.next - elseif id == whatsit_code then - local subtype = start.subtype - if subtype == dir_code then - local dir = start.dir - if dir == "+TRT" or dir == "+TLT" then - topstack = topstack + 1 - dirstack[topstack] = dir - elseif dir == "-TRT" or dir == "-TLT" then - topstack = topstack - 1 - end - local newdir = dirstack[topstack] - if newdir == "+TRT" then - rlmode = -1 - elseif newdir == "+TLT" then - rlmode = 1 - else - rlmode = rlparmode - end - if trace_directions then - report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir) - end - elseif subtype == localpar_code then - local dir = start.dir - if dir == "TRT" then - rlparmode = -1 - elseif dir == "TLT" then - rlparmode = 1 - else - rlparmode = 0 - end - rlmode = rlparmode - if trace_directions then - report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode) - end - end - start = start.next - elseif id == math_code then - start = end_of_math(start).next - else - start = start.next - end - end - end - end - if success then - done = true - end - if trace_steps then -- ? - registerstep(head) - end - end - return head, done -end - -local function generic(lookupdata,lookupname,unicode,lookuphash) - local target = lookuphash[lookupname] - if target then - target[unicode] = lookupdata - else - lookuphash[lookupname] = { [unicode] = lookupdata } - end -end - -local action = { - - substitution = generic, - multiple = generic, - alternate = generic, - position = generic, - - ligature = function(lookupdata,lookupname,unicode,lookuphash) - local target = lookuphash[lookupname] - if not target then - target = { } - lookuphash[lookupname] = target - end - for i=1,#lookupdata do - local li = lookupdata[i] - local tu = target[li] - if not tu then - tu = { } - target[li] = tu - end - target = tu - end - target.ligature = unicode - end, - - pair = function(lookupdata,lookupname,unicode,lookuphash) - local target = lookuphash[lookupname] - if not target then - target = { } - lookuphash[lookupname] = target - end - local others = target[unicode] - local paired = lookupdata[1] - if others then - others[paired] = lookupdata - else - others = { [paired] = lookupdata } - target[unicode] = others - end - end, - -} - -local function prepare_lookups(tfmdata) - - local rawdata = tfmdata.shared.rawdata - local resources = rawdata.resources - local lookuphash = resources.lookuphash - local anchor_to_lookup = resources.anchor_to_lookup - local lookup_to_anchor = resources.lookup_to_anchor - local lookuptypes = resources.lookuptypes - local characters = tfmdata.characters - local descriptions = tfmdata.descriptions - - -- we cannot free the entries in the descriptions as sometimes we access - -- then directly (for instance anchors) ... selectively freeing does save - -- much memory as it's only a reference to a table and the slot in the - -- description hash is not freed anyway - - for unicode, character in next, characters do -- we cannot loop over descriptions ! - - local description = descriptions[unicode] - - if description then - - local lookups = description.slookups - if lookups then - for lookupname, lookupdata in next, lookups do - action[lookuptypes[lookupname]](lookupdata,lookupname,unicode,lookuphash) - end - end - - local lookups = description.mlookups - if lookups then - for lookupname, lookuplist in next, lookups do - local lookuptype = lookuptypes[lookupname] - for l=1,#lookuplist do - local lookupdata = lookuplist[l] - action[lookuptype](lookupdata,lookupname,unicode,lookuphash) - end - end - end - - local list = description.kerns - if list then - for lookup, krn in next, list do -- ref to glyph, saves lookup - local target = lookuphash[lookup] - if target then - target[unicode] = krn - else - lookuphash[lookup] = { [unicode] = krn } - end - end - end - - local list = description.anchors - if list then - for typ, anchors in next, list do -- types - if typ == "mark" or typ == "cexit" then -- or entry? - for name, anchor in next, anchors do - local lookups = anchor_to_lookup[name] - if lookups then - for lookup, _ in next, lookups do - local target = lookuphash[lookup] - if target then - target[unicode] = anchors - else - lookuphash[lookup] = { [unicode] = anchors } - end - end - end - end - end - end - end - - end - - end - -end - -local function split(replacement,original) - local result = { } - for i=1,#replacement do - result[original[i]] = replacement[i] - end - return result -end - -local valid = { - coverage = { chainsub = true, chainpos = true, contextsub = true }, - reversecoverage = { reversesub = true }, - glyphs = { chainsub = true, chainpos = true }, -} - -local function prepare_contextchains(tfmdata) - local rawdata = tfmdata.shared.rawdata - local resources = rawdata.resources - local lookuphash = resources.lookuphash - local lookups = rawdata.lookups - if lookups then - for lookupname, lookupdata in next, rawdata.lookups do - local lookuptype = lookupdata.type - if lookuptype then - local rules = lookupdata.rules - if rules then - local format = lookupdata.format - local validformat = valid[format] - if not validformat then - report_prepare("unsupported format %a",format) - elseif not validformat[lookuptype] then - -- todo: dejavu-serif has one (but i need to see what use it has) - report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookupname) - else - local contexts = lookuphash[lookupname] - if not contexts then - contexts = { } - lookuphash[lookupname] = contexts - end - local t, nt = { }, 0 - for nofrules=1,#rules do - local rule = rules[nofrules] - local current = rule.current - local before = rule.before - local after = rule.after - local replacements = rule.replacements - local sequence = { } - local nofsequences = 0 - -- Eventually we can store start, stop and sequence in the cached file - -- but then less sharing takes place so best not do that without a lot - -- of profiling so let's forget about it. - if before then - for n=1,#before do - nofsequences = nofsequences + 1 - sequence[nofsequences] = before[n] - end - end - local start = nofsequences + 1 - for n=1,#current do - nofsequences = nofsequences + 1 - sequence[nofsequences] = current[n] - end - local stop = nofsequences - if after then - for n=1,#after do - nofsequences = nofsequences + 1 - sequence[nofsequences] = after[n] - end - end - if sequence[1] then - -- Replacements only happen with reverse lookups as they are single only. We - -- could pack them into current (replacement value instead of true) and then - -- use sequence[start] instead but it's somewhat ugly. - nt = nt + 1 - t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups, replacements } - for unic, _ in next, sequence[start] do - local cu = contexts[unic] - if not cu then - contexts[unic] = t - end - end - end - end - end - else - -- no rules - end - else - report_prepare("missing lookuptype for lookupname %a",lookupname) - end - end - end -end - --- we can consider lookuphash == false (initialized but empty) vs lookuphash == table - -local function featuresinitializer(tfmdata,value) - if true then -- value then - -- beware we need to use the topmost properties table - local rawdata = tfmdata.shared.rawdata - local properties = rawdata.properties - if not properties.initialized then - local starttime = trace_preparing and os.clock() - local resources = rawdata.resources - resources.lookuphash = resources.lookuphash or { } - prepare_contextchains(tfmdata) - prepare_lookups(tfmdata) - properties.initialized = true - if trace_preparing then - report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,tfmdata.properties.fullname) - end - end - end -end - -registerotffeature { - name = "features", - description = "features", - default = true, - initializers = { - position = 1, - node = featuresinitializer, - }, - processors = { - node = featuresprocessor, - } -} - --- This can be used for extra handlers, but should be used with care! - -otf.handlers = handlers diff --git a/src/fontloader/fontloader-fonts-tfm.lua b/src/fontloader/fontloader-fonts-tfm.lua deleted file mode 100644 index b9bb1bd..0000000 --- a/src/fontloader/fontloader-fonts-tfm.lua +++ /dev/null @@ -1,38 +0,0 @@ -if not modules then modules = { } end modules ['luatex-fonts-tfm'] = { - version = 1.001, - comment = "companion to luatex-*.tex", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -if context then - texio.write_nl("fatal error: this module is not for context") - os.exit() -end - -local fonts = fonts -local tfm = { } -fonts.handlers.tfm = tfm -fonts.formats.tfm = "type1" -- we need to have at least a value here - -function fonts.readers.tfm(specification) - local fullname = specification.filename or "" - if fullname == "" then - local forced = specification.forced or "" - if forced ~= "" then - fullname = specification.name .. "." .. forced - else - fullname = specification.name - end - end - local foundname = resolvers.findbinfile(fullname, 'tfm') or "" - if foundname == "" then - foundname = resolvers.findbinfile(fullname, 'ofm') or "" - end - if foundname ~= "" then - specification.filename = foundname - specification.format = "ofm" - return font.read_tfm(specification.filename,specification.size) - end -end diff --git a/src/fontloader/misc/fontloader-basics-nod.lua b/src/fontloader/misc/fontloader-basics-nod.lua new file mode 100644 index 0000000..ea539f3 --- /dev/null +++ b/src/fontloader/misc/fontloader-basics-nod.lua @@ -0,0 +1,244 @@ +if not modules then modules = { } end modules ['luatex-fonts-nod'] = { + version = 1.001, + comment = "companion to luatex-fonts.lua", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end + +-- Don't depend on code here as it is only needed to complement the +-- font handler code. + +-- Attributes: + +if tex.attribute[0] ~= 0 then + + texio.write_nl("log","!") + texio.write_nl("log","! Attribute 0 is reserved for ConTeXt's font feature management and has to be") + texio.write_nl("log","! set to zero. Also, some attributes in the range 1-255 are used for special") + texio.write_nl("log","! purposes so setting them at the TeX end might break the font handler.") + texio.write_nl("log","!") + + tex.attribute[0] = 0 -- else no features + +end + +attributes = attributes or { } +attributes.unsetvalue = -0x7FFFFFFF + +local numbers, last = { }, 127 + +attributes.private = attributes.private or function(name) + local number = numbers[name] + if not number then + if last < 255 then + last = last + 1 + end + number = last + numbers[name] = number + end + return number +end + +-- Nodes (a subset of context so that we don't get too much unused code): + +nodes = { } +nodes.pool = { } +nodes.handlers = { } + +local nodecodes = { } for k,v in next, node.types () do nodecodes[string.gsub(v,"_","")] = k end +local whatcodes = { } for k,v in next, node.whatsits() do whatcodes[string.gsub(v,"_","")] = k end +local glyphcodes = { [0] = "character", "glyph", "ligature", "ghost", "left", "right" } +local disccodes = { [0] = "discretionary", "explicit", "automatic", "regular", "first", "second" } + +nodes.nodecodes = nodecodes +nodes.whatcodes = whatcodes +nodes.whatsitcodes = whatcodes +nodes.glyphcodes = glyphcodes +nodes.disccodes = disccodes + +local free_node = node.free +local remove_node = node.remove +local new_node = node.new +local traverse_id = node.traverse_id + +nodes.handlers.protectglyphs = node.protect_glyphs +nodes.handlers.unprotectglyphs = node.unprotect_glyphs + +local math_code = nodecodes.math +local end_of_math = node.end_of_math + +function node.end_of_math(n) + if n.id == math_code and n.subtype == 1 then + return n + else + return end_of_math(n) + end +end + +function nodes.remove(head, current, free_too) + local t = current + head, current = remove_node(head,current) + if t then + if free_too then + free_node(t) + t = nil + else + t.next, t.prev = nil, nil + end + end + return head, current, t +end + +function nodes.delete(head,current) + return nodes.remove(head,current,true) +end + +function nodes.pool.kern(k) + local n = new_node("kern",1) + n.kern = k + return n +end + +local getfield = node.getfield +local setfield = node.setfield + +nodes.getfield = getfield +nodes.setfield = setfield + +nodes.getattr = getfield +nodes.setattr = setfield + +-- being lazy ... just copy a bunch ... not all needed in generic but we assume +-- nodes to be kind of private anyway + +nodes.tostring = node.tostring or tostring +nodes.copy = node.copy +nodes.copy_list = node.copy_list +nodes.delete = node.delete +nodes.dimensions = node.dimensions +nodes.end_of_math = node.end_of_math +nodes.flush_list = node.flush_list +nodes.flush_node = node.flush_node +nodes.free = node.free +nodes.insert_after = node.insert_after +nodes.insert_before = node.insert_before +nodes.hpack = node.hpack +nodes.new = node.new +nodes.tail = node.tail +nodes.traverse = node.traverse +nodes.traverse_id = node.traverse_id +nodes.slide = node.slide +nodes.vpack = node.vpack + +nodes.first_glyph = node.first_glyph +nodes.first_character = node.first_character +nodes.has_glyph = node.has_glyph or node.first_glyph + +nodes.current_attr = node.current_attr +nodes.do_ligature_n = node.do_ligature_n +nodes.has_field = node.has_field +nodes.last_node = node.last_node +nodes.usedlist = node.usedlist +nodes.protrusion_skippable = node.protrusion_skippable +nodes.write = node.write + +nodes.has_attribute = node.has_attribute +nodes.set_attribute = node.set_attribute +nodes.unset_attribute = node.unset_attribute + +nodes.protect_glyphs = node.protect_glyphs +nodes.unprotect_glyphs = node.unprotect_glyphs +nodes.kerning = node.kerning +nodes.ligaturing = node.ligaturing +nodes.mlist_to_hlist = node.mlist_to_hlist + +-- in generic code, at least for some time, we stay nodes, while in context +-- we can go nuts (e.g. experimental); this split permits us us keep code +-- used elsewhere stable but at the same time play around in context + +local direct = node.direct +local nuts = { } +nodes.nuts = nuts + +local tonode = direct.tonode +local tonut = direct.todirect + +nodes.tonode = tonode +nodes.tonut = tonut + +nuts.tonode = tonode +nuts.tonut = tonut + + +local getfield = direct.getfield +local setfield = direct.setfield + +nuts.getfield = getfield +nuts.setfield = setfield +nuts.getnext = direct.getnext +nuts.getprev = direct.getprev +nuts.getid = direct.getid +nuts.getattr = getfield +nuts.setattr = setfield +nuts.getfont = direct.getfont +nuts.getsubtype = direct.getsubtype +nuts.getchar = direct.getchar + +nuts.insert_before = direct.insert_before +nuts.insert_after = direct.insert_after +nuts.delete = direct.delete +nuts.copy = direct.copy +nuts.tail = direct.tail +nuts.flush_list = direct.flush_list +nuts.end_of_math = direct.end_of_math +nuts.traverse = direct.traverse +nuts.traverse_id = direct.traverse_id + +nuts.getprop = nuts.getattr +nuts.setprop = nuts.setattr + +local new_nut = direct.new +nuts.new = new_nut +nuts.pool = { } + +function nuts.pool.kern(k) + local n = new_nut("kern",1) + setfield(n,"kern",k) + return n +end + +-- properties as used in the (new) injector: + +local propertydata = direct.get_properties_table() +nodes.properties = { data = propertydata } + +direct.set_properties_mode(true,true) -- needed for injection + +function direct.set_properties_mode() end -- we really need the set modes + +nuts.getprop = function(n,k) + local p = propertydata[n] + if p then + return p[k] + end +end + +nuts.setprop = function(n,k,v) + if v then + local p = propertydata[n] + if p then + p[k] = v + else + propertydata[n] = { [k] = v } + end + end +end + +nodes.setprop = nodes.setproperty +nodes.getprop = nodes.getproperty diff --git a/src/fontloader/misc/fontloader-basics.tex b/src/fontloader/misc/fontloader-basics.tex new file mode 100644 index 0000000..abe4989 --- /dev/null +++ b/src/fontloader/misc/fontloader-basics.tex @@ -0,0 +1,23 @@ +%D \module +%D [ file=luatex-basics, +%D version=2009.12.01, +%D title=\LUATEX\ Support Macros, +%D subtitle=Attribute Allocation, +%D author=Hans Hagen, +%D date=\currentdate, +%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}] + +%D As soon as we feel the need this file will file will contain an extension +%D to the standard plain register allocation. For the moment we stick to a +%D rather dumb attribute allocator. We start at 256 because we don't want +%D any interference with the attributes used in the font handler. + +\ifx\newattribute\undefined \else \endinput \fi + +\newcount \lastallocatedattribute \lastallocatedattribute=255 + +\def\newattribute#1% + {\global\advance\lastallocatedattribute 1 + \attributedef#1\lastallocatedattribute} + +\endinput diff --git a/src/fontloader/misc/fontloader-data-con.lua b/src/fontloader/misc/fontloader-data-con.lua new file mode 100644 index 0000000..240538d --- /dev/null +++ b/src/fontloader/misc/fontloader-data-con.lua @@ -0,0 +1,138 @@ +if not modules then modules = { } end modules ['data-con'] = { + version = 1.100, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local format, lower, gsub = string.format, string.lower, string.gsub + +local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end) +local trace_containers = false trackers.register("resolvers.containers", function(v) trace_containers = v end) +local trace_storage = false trackers.register("resolvers.storage", function(v) trace_storage = v end) + +--[[ldx-- +

Once we found ourselves defining similar cache constructs +several times, containers were introduced. Containers are used +to collect tables in memory and reuse them when possible based +on (unique) hashes (to be provided by the calling function).

+ +

Caching to disk is disabled by default. Version numbers are +stored in the saved table which makes it possible to change the +table structures without bothering about the disk cache.

+ +

Examples of usage can be found in the font related code.

+--ldx]]-- + +containers = containers or { } +local containers = containers +containers.usecache = true + +local report_containers = logs.reporter("resolvers","containers") + +local allocated = { } + +local mt = { + __index = function(t,k) + if k == "writable" then + local writable = caches.getwritablepath(t.category,t.subcategory) or { "." } + t.writable = writable + return writable + elseif k == "readables" then + local readables = caches.getreadablepaths(t.category,t.subcategory) or { "." } + t.readables = readables + return readables + end + end, + __storage__ = true +} + +function containers.define(category, subcategory, version, enabled) + if category and subcategory then + local c = allocated[category] + if not c then + c = { } + allocated[category] = c + end + local s = c[subcategory] + if not s then + s = { + category = category, + subcategory = subcategory, + storage = { }, + enabled = enabled, + version = version or math.pi, -- after all, this is TeX + trace = false, + -- writable = caches.getwritablepath and caches.getwritablepath (category,subcategory) or { "." }, + -- readables = caches.getreadablepaths and caches.getreadablepaths(category,subcategory) or { "." }, + } + setmetatable(s,mt) + c[subcategory] = s + end + return s + end +end + +function containers.is_usable(container,name) + return container.enabled and caches and caches.is_writable(container.writable, name) +end + +function containers.is_valid(container,name) + if name and name ~= "" then + local storage = container.storage[name] + return storage and storage.cache_version == container.version + else + return false + end +end + +function containers.read(container,name) + local storage = container.storage + local stored = storage[name] + if not stored and container.enabled and caches and containers.usecache then + stored = caches.loaddata(container.readables,name) + if stored and stored.cache_version == container.version then + if trace_cache or trace_containers then + report_containers("action %a, category %a, name %a","load",container.subcategory,name) + end + else + stored = nil + end + storage[name] = stored + elseif stored then + if trace_cache or trace_containers then + report_containers("action %a, category %a, name %a","reuse",container.subcategory,name) + end + end + return stored +end + +function containers.write(container, name, data) + if data then + data.cache_version = container.version + if container.enabled and caches then + local unique, shared = data.unique, data.shared + data.unique, data.shared = nil, nil + caches.savedata(container.writable, name, data) + if trace_cache or trace_containers then + report_containers("action %a, category %a, name %a","save",container.subcategory,name) + end + data.unique, data.shared = unique, shared + end + if trace_cache or trace_containers then + report_containers("action %a, category %a, name %a","store",container.subcategory,name) + end + container.storage[name] = data + end + return data +end + +function containers.content(container,name) + return container.storage[name] +end + +function containers.cleanname(name) + -- return (gsub(lower(name),"[^%w]+","-")) + return (gsub(lower(name),"[^%w\128-\255]+","-")) -- more utf friendly +end diff --git a/src/fontloader/misc/fontloader-font-afk.lua b/src/fontloader/misc/fontloader-font-afk.lua new file mode 100644 index 0000000..8b65b06 --- /dev/null +++ b/src/fontloader/misc/fontloader-font-afk.lua @@ -0,0 +1,200 @@ +if not modules then modules = { } end modules ['font-afk'] = { + version = 1.001, + comment = "companion to font-afm.lua", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files", + dataonly = true, +} + +--[[ldx-- +

For ligatures, only characters with a code smaller than 128 make sense, +anything larger is encoding dependent. An interesting complication is that a +character can be in an encoding twice but is hashed once.

+--ldx]]-- + +local allocate = utilities.storage.allocate + +fonts.handlers.afm.helpdata = { + ligatures = allocate { -- okay, nowadays we could parse the name but type 1 fonts + ['f'] = { -- don't have that many ligatures anyway + { 'f', 'ff' }, + { 'i', 'fi' }, + { 'l', 'fl' }, + }, + ['ff'] = { + { 'i', 'ffi' } + }, + ['fi'] = { + { 'i', 'fii' } + }, + ['fl'] = { + { 'i', 'fli' } + }, + ['s'] = { + { 't', 'st' } + }, + ['i'] = { + { 'j', 'ij' } + }, + }, + texligatures = allocate { + -- ['space'] = { + -- { 'L', 'Lslash' }, + -- { 'l', 'lslash' } + -- }, + -- ['question'] = { + -- { 'quoteleft', 'questiondown' } + -- }, + -- ['exclam'] = { + -- { 'quoteleft', 'exclamdown' } + -- }, + ['quoteleft'] = { + { 'quoteleft', 'quotedblleft' } + }, + ['quoteright'] = { + { 'quoteright', 'quotedblright' } + }, + ['hyphen'] = { + { 'hyphen', 'endash' } + }, + ['endash'] = { + { 'hyphen', 'emdash' } + } + }, + leftkerned = allocate { + AEligature = "A", aeligature = "a", + OEligature = "O", oeligature = "o", + IJligature = "I", ijligature = "i", + AE = "A", ae = "a", + OE = "O", oe = "o", + IJ = "I", ij = "i", + Ssharp = "S", ssharp = "s", + }, + rightkerned = allocate { + AEligature = "E", aeligature = "e", + OEligature = "E", oeligature = "e", + IJligature = "J", ijligature = "j", + AE = "E", ae = "e", + OE = "E", oe = "e", + IJ = "J", ij = "j", + Ssharp = "S", ssharp = "s", + }, + bothkerned = allocate { + Acircumflex = "A", acircumflex = "a", + Ccircumflex = "C", ccircumflex = "c", + Ecircumflex = "E", ecircumflex = "e", + Gcircumflex = "G", gcircumflex = "g", + Hcircumflex = "H", hcircumflex = "h", + Icircumflex = "I", icircumflex = "i", + Jcircumflex = "J", jcircumflex = "j", + Ocircumflex = "O", ocircumflex = "o", + Scircumflex = "S", scircumflex = "s", + Ucircumflex = "U", ucircumflex = "u", + Wcircumflex = "W", wcircumflex = "w", + Ycircumflex = "Y", ycircumflex = "y", + + Agrave = "A", agrave = "a", + Egrave = "E", egrave = "e", + Igrave = "I", igrave = "i", + Ograve = "O", ograve = "o", + Ugrave = "U", ugrave = "u", + Ygrave = "Y", ygrave = "y", + + Atilde = "A", atilde = "a", + Itilde = "I", itilde = "i", + Otilde = "O", otilde = "o", + Utilde = "U", utilde = "u", + Ntilde = "N", ntilde = "n", + + Adiaeresis = "A", adiaeresis = "a", Adieresis = "A", adieresis = "a", + Ediaeresis = "E", ediaeresis = "e", Edieresis = "E", edieresis = "e", + Idiaeresis = "I", idiaeresis = "i", Idieresis = "I", idieresis = "i", + Odiaeresis = "O", odiaeresis = "o", Odieresis = "O", odieresis = "o", + Udiaeresis = "U", udiaeresis = "u", Udieresis = "U", udieresis = "u", + Ydiaeresis = "Y", ydiaeresis = "y", Ydieresis = "Y", ydieresis = "y", + + Aacute = "A", aacute = "a", + Cacute = "C", cacute = "c", + Eacute = "E", eacute = "e", + Iacute = "I", iacute = "i", + Lacute = "L", lacute = "l", + Nacute = "N", nacute = "n", + Oacute = "O", oacute = "o", + Racute = "R", racute = "r", + Sacute = "S", sacute = "s", + Uacute = "U", uacute = "u", + Yacute = "Y", yacute = "y", + Zacute = "Z", zacute = "z", + + Dstroke = "D", dstroke = "d", + Hstroke = "H", hstroke = "h", + Tstroke = "T", tstroke = "t", + + Cdotaccent = "C", cdotaccent = "c", + Edotaccent = "E", edotaccent = "e", + Gdotaccent = "G", gdotaccent = "g", + Idotaccent = "I", idotaccent = "i", + Zdotaccent = "Z", zdotaccent = "z", + + Amacron = "A", amacron = "a", + Emacron = "E", emacron = "e", + Imacron = "I", imacron = "i", + Omacron = "O", omacron = "o", + Umacron = "U", umacron = "u", + + Ccedilla = "C", ccedilla = "c", + Kcedilla = "K", kcedilla = "k", + Lcedilla = "L", lcedilla = "l", + Ncedilla = "N", ncedilla = "n", + Rcedilla = "R", rcedilla = "r", + Scedilla = "S", scedilla = "s", + Tcedilla = "T", tcedilla = "t", + + Ohungarumlaut = "O", ohungarumlaut = "o", + Uhungarumlaut = "U", uhungarumlaut = "u", + + Aogonek = "A", aogonek = "a", + Eogonek = "E", eogonek = "e", + Iogonek = "I", iogonek = "i", + Uogonek = "U", uogonek = "u", + + Aring = "A", aring = "a", + Uring = "U", uring = "u", + + Abreve = "A", abreve = "a", + Ebreve = "E", ebreve = "e", + Gbreve = "G", gbreve = "g", + Ibreve = "I", ibreve = "i", + Obreve = "O", obreve = "o", + Ubreve = "U", ubreve = "u", + + Ccaron = "C", ccaron = "c", + Dcaron = "D", dcaron = "d", + Ecaron = "E", ecaron = "e", + Lcaron = "L", lcaron = "l", + Ncaron = "N", ncaron = "n", + Rcaron = "R", rcaron = "r", + Scaron = "S", scaron = "s", + Tcaron = "T", tcaron = "t", + Zcaron = "Z", zcaron = "z", + + dotlessI = "I", dotlessi = "i", + dotlessJ = "J", dotlessj = "j", + + AEligature = "AE", aeligature = "ae", AE = "AE", ae = "ae", + OEligature = "OE", oeligature = "oe", OE = "OE", oe = "oe", + IJligature = "IJ", ijligature = "ij", IJ = "IJ", ij = "ij", + + Lstroke = "L", lstroke = "l", Lslash = "L", lslash = "l", + Ostroke = "O", ostroke = "o", Oslash = "O", oslash = "o", + + Ssharp = "SS", ssharp = "ss", + + Aumlaut = "A", aumlaut = "a", + Eumlaut = "E", eumlaut = "e", + Iumlaut = "I", iumlaut = "i", + Oumlaut = "O", oumlaut = "o", + Uumlaut = "U", uumlaut = "u", + } +} diff --git a/src/fontloader/misc/fontloader-font-afm.lua b/src/fontloader/misc/fontloader-font-afm.lua new file mode 100644 index 0000000..ca5616a --- /dev/null +++ b/src/fontloader/misc/fontloader-font-afm.lua @@ -0,0 +1,1042 @@ +if not modules then modules = { } end modules ['font-afm'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +--[[ldx-- +

Some code may look a bit obscure but this has to do with the +fact that we also use this code for testing and much code evolved +in the transition from to to .

+ +

The following code still has traces of intermediate font support +where we handles font encodings. Eventually font encoding goes +away.

+ +

The embedding of a font involves creating temporary files and +depending on your system setup that can fail. It took more than a +day to figure out why sometimes embedding failed in mingw luatex +where running on a real path like c:\... failed while running on +say e:\... being a link worked well. The native windows binaries +don't have this issue.

+ +--ldx]]-- + +local fonts, logs, trackers, containers, resolvers = fonts, logs, trackers, containers, resolvers + +local next, type, tonumber = next, type, tonumber +local format, match, gmatch, lower, gsub, strip = string.format, string.match, string.gmatch, string.lower, string.gsub, string.strip +local abs = math.abs +local P, S, C, R, lpegmatch, patterns = lpeg.P, lpeg.S, lpeg.C, lpeg.R, lpeg.match, lpeg.patterns +local derivetable = table.derive + +local trace_features = false trackers.register("afm.features", function(v) trace_features = v end) +local trace_indexing = false trackers.register("afm.indexing", function(v) trace_indexing = v end) +local trace_loading = false trackers.register("afm.loading", function(v) trace_loading = v end) +local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end) + +local report_afm = logs.reporter("fonts","afm loading") + +local setmetatableindex = table.setmetatableindex + +local findbinfile = resolvers.findbinfile + +local definers = fonts.definers +local readers = fonts.readers +local constructors = fonts.constructors + +local afm = constructors.newhandler("afm") +local pfb = constructors.newhandler("pfb") + +local afmfeatures = constructors.newfeatures("afm") +local registerafmfeature = afmfeatures.register + +afm.version = 1.500 -- incrementing this number one up will force a re-cache +afm.cache = containers.define("fonts", "afm", afm.version, true) +afm.autoprefixed = true -- this will become false some day (catches texnansi-blabla.*) + +afm.helpdata = { } -- set later on so no local for this +afm.syncspace = true -- when true, nicer stretch values +afm.addligatures = true -- best leave this set to true +afm.addtexligatures = true -- best leave this set to true +afm.addkerns = true -- best leave this set to true + +local overloads = fonts.mappings.overloads + +local applyruntimefixes = fonts.treatments and fonts.treatments.applyfixes + +local function setmode(tfmdata,value) + if value then + tfmdata.properties.mode = lower(value) + end +end + +registerafmfeature { + name = "mode", + description = "mode", + initializers = { + base = setmode, + node = setmode, + } +} + +--[[ldx-- +

We start with the basic reader which we give a name similar to the +built in and reader.

+--ldx]]-- + +--~ Comment FONTIDENTIFIER LMMATHSYMBOLS10 +--~ Comment CODINGSCHEME TEX MATH SYMBOLS +--~ Comment DESIGNSIZE 10.0 pt +--~ Comment CHECKSUM O 4261307036 +--~ Comment SPACE 0 plus 0 minus 0 +--~ Comment QUAD 1000 +--~ Comment EXTRASPACE 0 +--~ Comment NUM 676.508 393.732 443.731 +--~ Comment DENOM 685.951 344.841 +--~ Comment SUP 412.892 362.892 288.889 +--~ Comment SUB 150 247.217 +--~ Comment SUPDROP 386.108 +--~ Comment SUBDROP 50 +--~ Comment DELIM 2390 1010 +--~ Comment AXISHEIGHT 250 + +local comment = P("Comment") +local spacing = patterns.spacer -- S(" \t")^1 +local lineend = patterns.newline -- S("\n\r") +local words = C((1 - lineend)^1) +local number = C((R("09") + S("."))^1) / tonumber * spacing^0 +local data = lpeg.Carg(1) + +local pattern = ( -- needs testing ... not used anyway as we no longer need math afm's + comment * spacing * + ( + data * ( + ("CODINGSCHEME" * spacing * words ) / function(fd,a) end + + ("DESIGNSIZE" * spacing * number * words ) / function(fd,a) fd[ 1] = a end + + ("CHECKSUM" * spacing * number * words ) / function(fd,a) fd[ 2] = a end + + ("SPACE" * spacing * number * "plus" * number * "minus" * number) / function(fd,a,b,c) fd[ 3], fd[ 4], fd[ 5] = a, b, c end + + ("QUAD" * spacing * number ) / function(fd,a) fd[ 6] = a end + + ("EXTRASPACE" * spacing * number ) / function(fd,a) fd[ 7] = a end + + ("NUM" * spacing * number * number * number ) / function(fd,a,b,c) fd[ 8], fd[ 9], fd[10] = a, b, c end + + ("DENOM" * spacing * number * number ) / function(fd,a,b ) fd[11], fd[12] = a, b end + + ("SUP" * spacing * number * number * number ) / function(fd,a,b,c) fd[13], fd[14], fd[15] = a, b, c end + + ("SUB" * spacing * number * number ) / function(fd,a,b) fd[16], fd[17] = a, b end + + ("SUPDROP" * spacing * number ) / function(fd,a) fd[18] = a end + + ("SUBDROP" * spacing * number ) / function(fd,a) fd[19] = a end + + ("DELIM" * spacing * number * number ) / function(fd,a,b) fd[20], fd[21] = a, b end + + ("AXISHEIGHT" * spacing * number ) / function(fd,a) fd[22] = a end + ) + + (1-lineend)^0 + ) + + (1-comment)^1 +)^0 + +local function scan_comment(str) + local fd = { } + lpegmatch(pattern,str,1,fd) + return fd +end + +-- On a rainy day I will rewrite this in lpeg ... or we can use the (slower) fontloader +-- as in now supports afm/pfb loading but it's not too bad to have different methods +-- for testing approaches. + +local keys = { } + +function keys.FontName (data,line) data.metadata.fontname = strip (line) -- get rid of spaces + data.metadata.fullname = strip (line) end +function keys.ItalicAngle (data,line) data.metadata.italicangle = tonumber (line) end +function keys.IsFixedPitch(data,line) data.metadata.isfixedpitch = toboolean(line,true) end +function keys.CharWidth (data,line) data.metadata.charwidth = tonumber (line) end +function keys.XHeight (data,line) data.metadata.xheight = tonumber (line) end +function keys.Descender (data,line) data.metadata.descender = tonumber (line) end +function keys.Ascender (data,line) data.metadata.ascender = tonumber (line) end +function keys.Comment (data,line) + -- Comment DesignSize 12 (pts) + -- Comment TFM designsize: 12 (in points) + line = lower(line) + local designsize = match(line,"designsize[^%d]*(%d+)") + if designsize then data.metadata.designsize = tonumber(designsize) end +end + +local function get_charmetrics(data,charmetrics,vector) + local characters = data.characters + local chr, ind = { }, 0 + for k,v in gmatch(charmetrics,"([%a]+) +(.-) *;") do + if k == 'C' then + v = tonumber(v) + if v < 0 then + ind = ind + 1 -- ? + else + ind = v + end + chr = { + index = ind + } + elseif k == 'WX' then + chr.width = tonumber(v) + elseif k == 'N' then + characters[v] = chr + elseif k == 'B' then + local llx, lly, urx, ury = match(v,"^ *(.-) +(.-) +(.-) +(.-)$") + chr.boundingbox = { tonumber(llx), tonumber(lly), tonumber(urx), tonumber(ury) } + elseif k == 'L' then + local plus, becomes = match(v,"^(.-) +(.-)$") + local ligatures = chr.ligatures + if ligatures then + ligatures[plus] = becomes + else + chr.ligatures = { [plus] = becomes } + end + end + end +end + +local function get_kernpairs(data,kernpairs) + local characters = data.characters + for one, two, value in gmatch(kernpairs,"KPX +(.-) +(.-) +(.-)\n") do + local chr = characters[one] + if chr then + local kerns = chr.kerns + if kerns then + kerns[two] = tonumber(value) + else + chr.kerns = { [two] = tonumber(value) } + end + end + end +end + +local function get_variables(data,fontmetrics) + for key, rest in gmatch(fontmetrics,"(%a+) *(.-)[\n\r]") do + local keyhandler = keys[key] + if keyhandler then + keyhandler(data,rest) + end + end +end + +local function get_indexes(data,pfbname) + data.resources.filename = resolvers.unresolve(pfbname) -- no shortcut + local pfbblob = fontloader.open(pfbname) + if pfbblob then + local characters = data.characters + local pfbdata = fontloader.to_table(pfbblob) + if pfbdata then + local glyphs = pfbdata.glyphs + if glyphs then + if trace_loading then + report_afm("getting index data from %a",pfbname) + end + for index, glyph in next, glyphs do + -- for index, glyph in table.sortedhash(glyphs) do + local name = glyph.name + if name then + local char = characters[name] + if char then + if trace_indexing then + report_afm("glyph %a has index %a",name,index) + end + char.index = index + end + end + end + elseif trace_loading then + report_afm("no glyph data in pfb file %a",pfbname) + end + elseif trace_loading then + report_afm("no data in pfb file %a",pfbname) + end + fontloader.close(pfbblob) + elseif trace_loading then + report_afm("invalid pfb file %a",pfbname) + end +end + +local function readafm(filename) + local ok, afmblob, size = resolvers.loadbinfile(filename) -- has logging + if ok and afmblob then + local data = { + resources = { + filename = resolvers.unresolve(filename), + version = afm.version, + creator = "context mkiv", + }, + properties = { + hasitalics = false, + }, + goodies = { + }, + metadata = { + filename = file.removesuffix(file.basename(filename)) + }, + characters = { + -- a temporary store + }, + descriptions = { + -- the final store + }, + } + afmblob = gsub(afmblob,"StartCharMetrics(.-)EndCharMetrics", function(charmetrics) + if trace_loading then + report_afm("loading char metrics") + end + get_charmetrics(data,charmetrics,vector) + return "" + end) + afmblob = gsub(afmblob,"StartKernPairs(.-)EndKernPairs", function(kernpairs) + if trace_loading then + report_afm("loading kern pairs") + end + get_kernpairs(data,kernpairs) + return "" + end) + afmblob = gsub(afmblob,"StartFontMetrics%s+([%d%.]+)(.-)EndFontMetrics", function(version,fontmetrics) + if trace_loading then + report_afm("loading variables") + end + data.afmversion = version + get_variables(data,fontmetrics) + data.fontdimens = scan_comment(fontmetrics) -- todo: all lpeg, no time now + return "" + end) + return data + else + if trace_loading then + report_afm("no valid afm file %a",filename) + end + return nil + end +end + +--[[ldx-- +

We cache files. Caching is taken care of in the loader. We cheat a bit +by adding ligatures and kern information to the afm derived data. That +way we can set them faster when defining a font.

+--ldx]]-- + +local addkerns, addligatures, addtexligatures, unify, normalize, fixnames -- we will implement these later + +function afm.load(filename) + -- hm, for some reasons not resolved yet + filename = resolvers.findfile(filename,'afm') or "" + if filename ~= "" and not fonts.names.ignoredfile(filename) then + local name = file.removesuffix(file.basename(filename)) + local data = containers.read(afm.cache,name) + local attr = lfs.attributes(filename) + local size, time = attr.size or 0, attr.modification or 0 + -- + local pfbfile = file.replacesuffix(name,"pfb") + local pfbname = resolvers.findfile(pfbfile,"pfb") or "" + if pfbname == "" then + pfbname = resolvers.findfile(file.basename(pfbfile),"pfb") or "" + end + local pfbsize, pfbtime = 0, 0 + if pfbname ~= "" then + local attr = lfs.attributes(pfbname) + pfbsize = attr.size or 0 + pfbtime = attr.modification or 0 + end + if not data or data.size ~= size or data.time ~= time or data.pfbsize ~= pfbsize or data.pfbtime ~= pfbtime then + report_afm("reading %a",filename) + data = readafm(filename) + if data then + if pfbname ~= "" then + get_indexes(data,pfbname) + elseif trace_loading then + report_afm("no pfb file for %a",filename) + -- data.resources.filename = "unset" -- better than loading the afm file + end + report_afm("unifying %a",filename) + unify(data,filename) + if afm.addligatures then + report_afm("add ligatures") + addligatures(data) + end + if afm.addtexligatures then + report_afm("add tex ligatures") + addtexligatures(data) + end + if afm.addkerns then + report_afm("add extra kerns") + addkerns(data) + end + normalize(data) + fixnames(data) + report_afm("add tounicode data") + fonts.mappings.addtounicode(data,filename) + data.size = size + data.time = time + data.pfbsize = pfbsize + data.pfbtime = pfbtime + report_afm("saving %a in cache",name) + data.resources.unicodes = nil -- consistent with otf but here we save not much + data = containers.write(afm.cache, name, data) + data = containers.read(afm.cache,name) + end + if applyruntimefixes and data then + applyruntimefixes(filename,data) + end + end + return data + else + return nil + end +end + +local uparser = fonts.mappings.makenameparser() + +unify = function(data, filename) + local unicodevector = fonts.encodings.agl.unicodes -- loaded runtime in context + local unicodes, names = { }, { } + local private = constructors.privateoffset + local descriptions = data.descriptions + for name, blob in next, data.characters do + local code = unicodevector[name] -- or characters.name_to_unicode[name] + if not code then + code = lpegmatch(uparser,name) + if not code then + code = private + private = private + 1 + report_afm("assigning private slot %U for unknown glyph name %a",code,name) + end + end + local index = blob.index + unicodes[name] = code + names[name] = index + blob.name = name + descriptions[code] = { + boundingbox = blob.boundingbox, + width = blob.width, + kerns = blob.kerns, + index = index, + name = name, + } + end + for unicode, description in next, descriptions do + local kerns = description.kerns + if kerns then + local krn = { } + for name, kern in next, kerns do + local unicode = unicodes[name] + if unicode then + krn[unicode] = kern + else + -- print(unicode,name) + end + end + description.kerns = krn + end + end + data.characters = nil + local resources = data.resources + local filename = resources.filename or file.removesuffix(file.basename(filename)) + resources.filename = resolvers.unresolve(filename) -- no shortcut + resources.unicodes = unicodes -- name to unicode + resources.marks = { } -- todo + -- resources.names = names -- name to index + resources.private = private +end + +normalize = function(data) +end + +fixnames = function(data) + for k, v in next, data.descriptions do + local n = v.name + local r = overloads[n] + if r then + local name = r.name + if trace_indexing then + report_afm("renaming characters %a to %a",n,name) + end + v.name = name + v.unicode = r.unicode + end + end +end + + +--[[ldx-- +

These helpers extend the basic table with extra ligatures, texligatures +and extra kerns. This saves quite some lookups later.

+--ldx]]-- + +local addthem = function(rawdata,ligatures) + if ligatures then + local descriptions = rawdata.descriptions + local resources = rawdata.resources + local unicodes = resources.unicodes + -- local names = resources.names + for ligname, ligdata in next, ligatures do + local one = descriptions[unicodes[ligname]] + if one then + for _, pair in next, ligdata do + local two, three = unicodes[pair[1]], unicodes[pair[2]] + if two and three then + local ol = one.ligatures + if ol then + if not ol[two] then + ol[two] = three + end + else + one.ligatures = { [two] = three } + end + end + end + end + end + end +end + +addligatures = function(rawdata) addthem(rawdata,afm.helpdata.ligatures ) end +addtexligatures = function(rawdata) addthem(rawdata,afm.helpdata.texligatures) end + +--[[ldx-- +

We keep the extra kerns in separate kerning tables so that we can use +them selectively.

+--ldx]]-- + +-- This is rather old code (from the beginning when we had only tfm). If +-- we unify the afm data (now we have names all over the place) then +-- we can use shcodes but there will be many more looping then. But we +-- could get rid of the tables in char-cmp then. Als, in the generic version +-- we don't use the character database. (Ok, we can have a context specific +-- variant). + +addkerns = function(rawdata) -- using shcodes is not robust here + local descriptions = rawdata.descriptions + local resources = rawdata.resources + local unicodes = resources.unicodes + local function do_it_left(what) + if what then + for unicode, description in next, descriptions do + local kerns = description.kerns + if kerns then + local extrakerns + for complex, simple in next, what do + complex = unicodes[complex] + simple = unicodes[simple] + if complex and simple then + local ks = kerns[simple] + if ks and not kerns[complex] then + if extrakerns then + extrakerns[complex] = ks + else + extrakerns = { [complex] = ks } + end + end + end + end + if extrakerns then + description.extrakerns = extrakerns + end + end + end + end + end + local function do_it_copy(what) + if what then + for complex, simple in next, what do + complex = unicodes[complex] + simple = unicodes[simple] + if complex and simple then + local complexdescription = descriptions[complex] + if complexdescription then -- optional + local simpledescription = descriptions[complex] + if simpledescription then + local extrakerns + local kerns = simpledescription.kerns + if kerns then + for unicode, kern in next, kerns do + if extrakerns then + extrakerns[unicode] = kern + else + extrakerns = { [unicode] = kern } + end + end + end + local extrakerns = simpledescription.extrakerns + if extrakerns then + for unicode, kern in next, extrakerns do + if extrakerns then + extrakerns[unicode] = kern + else + extrakerns = { [unicode] = kern } + end + end + end + if extrakerns then + complexdescription.extrakerns = extrakerns + end + end + end + end + end + end + end + -- add complex with values of simplified when present + do_it_left(afm.helpdata.leftkerned) + do_it_left(afm.helpdata.bothkerned) + -- copy kerns from simple char to complex char unless set + do_it_copy(afm.helpdata.bothkerned) + do_it_copy(afm.helpdata.rightkerned) +end + +--[[ldx-- +

The copying routine looks messy (and is indeed a bit messy).

+--ldx]]-- + +local function adddimensions(data) -- we need to normalize afm to otf i.e. indexed table instead of name + if data then + for unicode, description in next, data.descriptions do + local bb = description.boundingbox + if bb then + local ht, dp = bb[4], -bb[2] + if ht == 0 or ht < 0 then + -- no need to set it and no negative heights, nil == 0 + else + description.height = ht + end + if dp == 0 or dp < 0 then + -- no negative depths and no negative depths, nil == 0 + else + description.depth = dp + end + end + end + end +end + +local function copytotfm(data) + if data and data.descriptions then + local metadata = data.metadata + local resources = data.resources + local properties = derivetable(data.properties) + local descriptions = derivetable(data.descriptions) + local goodies = derivetable(data.goodies) + local characters = { } + local parameters = { } + local unicodes = resources.unicodes + -- + for unicode, description in next, data.descriptions do -- use parent table + characters[unicode] = { } + end + -- + local filename = constructors.checkedfilename(resources) + local fontname = metadata.fontname or metadata.fullname + local fullname = metadata.fullname or metadata.fontname + local endash = 0x0020 -- space + local emdash = 0x2014 + local spacer = "space" + local spaceunits = 500 + -- + local monospaced = metadata.isfixedpitch + local charwidth = metadata.charwidth + local italicangle = metadata.italicangle + local charxheight = metadata.xheight and metadata.xheight > 0 and metadata.xheight + properties.monospaced = monospaced + parameters.italicangle = italicangle + parameters.charwidth = charwidth + parameters.charxheight = charxheight + -- same as otf + if properties.monospaced then + if descriptions[endash] then + spaceunits, spacer = descriptions[endash].width, "space" + end + if not spaceunits and descriptions[emdash] then + spaceunits, spacer = descriptions[emdash].width, "emdash" + end + if not spaceunits and charwidth then + spaceunits, spacer = charwidth, "charwidth" + end + else + if descriptions[endash] then + spaceunits, spacer = descriptions[endash].width, "space" + end + if not spaceunits and charwidth then + spaceunits, spacer = charwidth, "charwidth" + end + end + spaceunits = tonumber(spaceunits) + if spaceunits < 200 then + -- todo: warning + end + -- + parameters.slant = 0 + parameters.space = spaceunits + parameters.space_stretch = 500 + parameters.space_shrink = 333 + parameters.x_height = 400 + parameters.quad = 1000 + -- + if italicangle and italicangle ~= 0 then + parameters.italicangle = italicangle + parameters.italicfactor = math.cos(math.rad(90+italicangle)) + parameters.slant = - math.tan(italicangle*math.pi/180) + end + if monospaced then + parameters.space_stretch = 0 + parameters.space_shrink = 0 + elseif afm.syncspace then + parameters.space_stretch = spaceunits/2 + parameters.space_shrink = spaceunits/3 + end + parameters.extra_space = parameters.space_shrink + if charxheight then + parameters.x_height = charxheight + else + -- same as otf + local x = 0x0078 -- x + if x then + local x = descriptions[x] + if x then + parameters.x_height = x.height + end + end + -- + end + local fd = data.fontdimens + if fd and fd[8] and fd[9] and fd[10] then -- math + for k,v in next, fd do + parameters[k] = v + end + end + -- + parameters.designsize = (metadata.designsize or 10)*65536 + parameters.ascender = abs(metadata.ascender or 0) + parameters.descender = abs(metadata.descender or 0) + parameters.units = 1000 + -- + properties.spacer = spacer + properties.encodingbytes = 2 + properties.format = fonts.formats[filename] or "type1" + properties.filename = filename + properties.fontname = fontname + properties.fullname = fullname + properties.psname = fullname + properties.name = filename or fullname or fontname + -- + if next(characters) then + return { + characters = characters, + descriptions = descriptions, + parameters = parameters, + resources = resources, + properties = properties, + goodies = goodies, + } + end + end + return nil +end + +--[[ldx-- +

Originally we had features kind of hard coded for +files but since I expect to support more font formats, I decided +to treat this fontformat like any other and handle features in a +more configurable way.

+--ldx]]-- + +function afm.setfeatures(tfmdata,features) + local okay = constructors.initializefeatures("afm",tfmdata,features,trace_features,report_afm) + if okay then + return constructors.collectprocessors("afm",tfmdata,features,trace_features,report_afm) + else + return { } -- will become false + end +end + +local function addtables(data) + local resources = data.resources + local lookuptags = resources.lookuptags + local unicodes = resources.unicodes + if not lookuptags then + lookuptags = { } + resources.lookuptags = lookuptags + end + setmetatableindex(lookuptags,function(t,k) + local v = type(k) == "number" and ("lookup " .. k) or k + t[k] = v + return v + end) + if not unicodes then + unicodes = { } + resources.unicodes = unicodes + setmetatableindex(unicodes,function(t,k) + setmetatableindex(unicodes,nil) + for u, d in next, data.descriptions do + local n = d.name + if n then + t[n] = u + end + end + return rawget(t,k) + end) + end + constructors.addcoreunicodes(unicodes) -- do we really need this? +end + +local function afmtotfm(specification) + local afmname = specification.filename or specification.name + if specification.forced == "afm" or specification.format == "afm" then -- move this one up + if trace_loading then + report_afm("forcing afm format for %a",afmname) + end + else + local tfmname = findbinfile(afmname,"ofm") or "" + if tfmname ~= "" then + if trace_loading then + report_afm("fallback from afm to tfm for %a",afmname) + end + return -- just that + end + end + if afmname ~= "" then + -- weird, isn't this already done then? + local features = constructors.checkedfeatures("afm",specification.features.normal) + specification.features.normal = features + constructors.hashinstance(specification,true) -- also weird here + -- + specification = definers.resolve(specification) -- new, was forgotten + local cache_id = specification.hash + local tfmdata = containers.read(constructors.cache, cache_id) -- cache with features applied + if not tfmdata then + local rawdata = afm.load(afmname) + if rawdata and next(rawdata) then + addtables(rawdata) + adddimensions(rawdata) + tfmdata = copytotfm(rawdata) + if tfmdata and next(tfmdata) then + local shared = tfmdata.shared + if not shared then + shared = { } + tfmdata.shared = shared + end + shared.rawdata = rawdata + shared.features = features + shared.processes = afm.setfeatures(tfmdata,features) + end + elseif trace_loading then + report_afm("no (valid) afm file found with name %a",afmname) + end + tfmdata = containers.write(constructors.cache,cache_id,tfmdata) + end + return tfmdata + end +end + +--[[ldx-- +

As soon as we could intercept the reader, I implemented an + reader. Since traditional could use +fonts with companions, the following method also could handle +those cases, but now that we can handle directly we no longer +need this features.

+--ldx]]-- + +local function read_from_afm(specification) + local tfmdata = afmtotfm(specification) + if tfmdata then + tfmdata.properties.name = specification.name + tfmdata = constructors.scale(tfmdata, specification) + local allfeatures = tfmdata.shared.features or specification.features.normal + constructors.applymanipulators("afm",tfmdata,allfeatures,trace_features,report_afm) + fonts.loggers.register(tfmdata,'afm',specification) + end + return tfmdata +end + +--[[ldx-- +

Here comes the implementation of a few features. We only implement +those that make sense for this format.

+--ldx]]-- + +local function prepareligatures(tfmdata,ligatures,value) + if value then + local descriptions = tfmdata.descriptions + local hasligatures = false + for unicode, character in next, tfmdata.characters do + local description = descriptions[unicode] + local dligatures = description.ligatures + if dligatures then + local cligatures = character.ligatures + if not cligatures then + cligatures = { } + character.ligatures = cligatures + end + for unicode, ligature in next, dligatures do + cligatures[unicode] = { + char = ligature, + type = 0 + } + end + hasligatures = true + end + end + tfmdata.properties.hasligatures = hasligatures + end +end + +local function preparekerns(tfmdata,kerns,value) + if value then + local rawdata = tfmdata.shared.rawdata + local resources = rawdata.resources + local unicodes = resources.unicodes + local descriptions = tfmdata.descriptions + local haskerns = false + for u, chr in next, tfmdata.characters do + local d = descriptions[u] + local newkerns = d[kerns] + if newkerns then + local kerns = chr.kerns + if not kerns then + kerns = { } + chr.kerns = kerns + end + for k,v in next, newkerns do + local uk = unicodes[k] + if uk then + kerns[uk] = v + end + end + haskerns = true + end + end + tfmdata.properties.haskerns = haskerns + end +end + +local list = { + -- [0x0022] = 0x201D, + [0x0027] = 0x2019, + -- [0x0060] = 0x2018, +} + +local function texreplacements(tfmdata,value) + local descriptions = tfmdata.descriptions + local characters = tfmdata.characters + for k, v in next, list do + characters [k] = characters [v] -- we forget about kerns + descriptions[k] = descriptions[v] -- we forget about kerns + end +end + +local function ligatures (tfmdata,value) prepareligatures(tfmdata,'ligatures', value) end +local function texligatures(tfmdata,value) prepareligatures(tfmdata,'texligatures',value) end +local function kerns (tfmdata,value) preparekerns (tfmdata,'kerns', value) end +local function extrakerns (tfmdata,value) preparekerns (tfmdata,'extrakerns', value) end + +registerafmfeature { + name = "liga", + description = "traditional ligatures", + initializers = { + base = ligatures, + node = ligatures, + } +} + +registerafmfeature { + name = "kern", + description = "intercharacter kerning", + initializers = { + base = kerns, + node = kerns, + } +} + +registerafmfeature { + name = "extrakerns", + description = "additional intercharacter kerning", + initializers = { + base = extrakerns, + node = extrakerns, + } +} + +registerafmfeature { + name = 'tlig', + description = 'tex ligatures', + initializers = { + base = texligatures, + node = texligatures, + } +} + +registerafmfeature { + name = 'trep', + description = 'tex replacements', + initializers = { + base = texreplacements, + node = texreplacements, + } +} + +-- readers + +local check_tfm = readers.check_tfm + +fonts.formats.afm = "type1" +fonts.formats.pfb = "type1" + +local function check_afm(specification,fullname) + local foundname = findbinfile(fullname, 'afm') or "" -- just to be sure + if foundname == "" then + foundname = fonts.names.getfilename(fullname,"afm") or "" + end + if foundname == "" and afm.autoprefixed then + local encoding, shortname = match(fullname,"^(.-)%-(.*)$") -- context: encoding-name.* + if encoding and shortname and fonts.encodings.known[encoding] then + shortname = findbinfile(shortname,'afm') or "" -- just to be sure + if shortname ~= "" then + foundname = shortname + if trace_defining then + report_afm("stripping encoding prefix from filename %a",afmname) + end + end + end + end + if foundname ~= "" then + specification.filename = foundname + specification.format = "afm" + return read_from_afm(specification) + end +end + +function readers.afm(specification,method) + local fullname, tfmdata = specification.filename or "", nil + if fullname == "" then + local forced = specification.forced or "" + if forced ~= "" then + tfmdata = check_afm(specification,specification.name .. "." .. forced) + end + if not tfmdata then + method = method or definers.method or "afm or tfm" + if method == "tfm" then + tfmdata = check_tfm(specification,specification.name) + elseif method == "afm" then + tfmdata = check_afm(specification,specification.name) + elseif method == "tfm or afm" then + tfmdata = check_tfm(specification,specification.name) or check_afm(specification,specification.name) + else -- method == "afm or tfm" or method == "" then + tfmdata = check_afm(specification,specification.name) or check_tfm(specification,specification.name) + end + end + else + tfmdata = check_afm(specification,fullname) + end + return tfmdata +end + +function readers.pfb(specification,method) -- only called when forced + local original = specification.specification + if trace_defining then + report_afm("using afm reader for %a",original) + end + specification.specification = gsub(original,"%.pfb",".afm") + specification.forced = "afm" + return readers.afm(specification,method) +end diff --git a/src/fontloader/misc/fontloader-font-cid.lua b/src/fontloader/misc/fontloader-font-cid.lua new file mode 100644 index 0000000..0eaacdf --- /dev/null +++ b/src/fontloader/misc/fontloader-font-cid.lua @@ -0,0 +1,177 @@ +if not modules then modules = { } end modules ['font-cid'] = { + version = 1.001, + comment = "companion to font-otf.lua (cidmaps)", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local format, match, lower = string.format, string.match, string.lower +local tonumber = tonumber +local P, S, R, C, V, lpegmatch = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.match + +local fonts, logs, trackers = fonts, logs, trackers + +local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end) + +local report_otf = logs.reporter("fonts","otf loading") + +local cid = { } +fonts.cid = cid + +local cidmap = { } +local cidmax = 10 + +-- original string parser: 0.109, lpeg parser: 0.036 seconds for Adobe-CNS1-4.cidmap +-- +-- 18964 18964 (leader) +-- 0 /.notdef +-- 1..95 0020 +-- 99 3000 + +local number = C(R("09","af","AF")^1) +local space = S(" \n\r\t") +local spaces = space^0 +local period = P(".") +local periods = period * period +local name = P("/") * C((1-space)^1) + +local unicodes, names = { }, { } -- we could use Carg now + +local function do_one(a,b) + unicodes[tonumber(a)] = tonumber(b,16) +end + +local function do_range(a,b,c) + c = tonumber(c,16) + for i=tonumber(a),tonumber(b) do + unicodes[i] = c + c = c + 1 + end +end + +local function do_name(a,b) + names[tonumber(a)] = b +end + +local grammar = P { "start", + start = number * spaces * number * V("series"), + series = (spaces * (V("one") + V("range") + V("named")))^1, + one = (number * spaces * number) / do_one, + range = (number * periods * number * spaces * number) / do_range, + named = (number * spaces * name) / do_name +} + +local function loadcidfile(filename) + local data = io.loaddata(filename) + if data then + unicodes, names = { }, { } + lpegmatch(grammar,data) + local supplement, registry, ordering = match(filename,"^(.-)%-(.-)%-()%.(.-)$") + return { + supplement = supplement, + registry = registry, + ordering = ordering, + filename = filename, + unicodes = unicodes, + names = names, + } + end +end + +cid.loadfile = loadcidfile -- we use the frozen variant +local template = "%s-%s-%s.cidmap" + +local function locate(registry,ordering,supplement) + local filename = format(template,registry,ordering,supplement) + local hashname = lower(filename) + local found = cidmap[hashname] + if not found then + if trace_loading then + report_otf("checking cidmap, registry %a, ordering %a, supplement %a, filename %a",registry,ordering,supplement,filename) + end + local fullname = resolvers.findfile(filename,'cid') or "" + if fullname ~= "" then + found = loadcidfile(fullname) + if found then + if trace_loading then + report_otf("using cidmap file %a",filename) + end + cidmap[hashname] = found + found.usedname = file.basename(filename) + end + end + end + return found +end + +-- cf Arthur R. we can safely scan upwards since cids are downward compatible + +function cid.getmap(specification) + if not specification then + report_otf("invalid cidinfo specification, table expected") + return + end + local registry = specification.registry + local ordering = specification.ordering + local supplement = specification.supplement + local filename = format(registry,ordering,supplement) + local lowername = lower(filename) + local found = cidmap[lowername] + if found then + return found + end + if ordering == "Identity" then + local found = { + supplement = supplement, + registry = registry, + ordering = ordering, + filename = filename, + unicodes = { }, + names = { }, + } + cidmap[lowername] = found + return found + end + -- check for already loaded file + if trace_loading then + report_otf("cidmap needed, registry %a, ordering %a, supplement %a",registry,ordering,supplement) + end + found = locate(registry,ordering,supplement) + if not found then + local supnum = tonumber(supplement) + local cidnum = nil + -- next highest (alternatively we could start high) + if supnum < cidmax then + for s=supnum+1,cidmax do + local c = locate(registry,ordering,s) + if c then + found, cidnum = c, s + break + end + end + end + -- next lowest (least worse fit) + if not found and supnum > 0 then + for s=supnum-1,0,-1 do + local c = locate(registry,ordering,s) + if c then + found, cidnum = c, s + break + end + end + end + -- prevent further lookups -- somewhat tricky + registry = lower(registry) + ordering = lower(ordering) + if found and cidnum > 0 then + for s=0,cidnum-1 do + local filename = format(template,registry,ordering,s) + if not cidmap[filename] then + cidmap[filename] = found + end + end + end + end + return found +end diff --git a/src/fontloader/misc/fontloader-font-con.lua b/src/fontloader/misc/fontloader-font-con.lua new file mode 100644 index 0000000..bb96912 --- /dev/null +++ b/src/fontloader/misc/fontloader-font-con.lua @@ -0,0 +1,1404 @@ +if not modules then modules = { } end modules ['font-con'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- some names of table entries will be changed (no _) + +local next, tostring, rawget = next, tostring, rawget +local format, match, lower, gsub = string.format, string.match, string.lower, string.gsub +local utfbyte = utf.byte +local sort, insert, concat, sortedkeys, serialize, fastcopy = table.sort, table.insert, table.concat, table.sortedkeys, table.serialize, table.fastcopy +local derivetable = table.derive + +local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end) +local trace_scaling = false trackers.register("fonts.scaling" , function(v) trace_scaling = v end) + +local report_defining = logs.reporter("fonts","defining") + +-- watch out: no negative depths and negative eights permitted in regular fonts + +--[[ldx-- +

Here we only implement a few helper functions.

+--ldx]]-- + +local fonts = fonts +local constructors = fonts.constructors or { } +fonts.constructors = constructors +local handlers = fonts.handlers or { } -- can have preloaded tables +fonts.handlers = handlers + +local allocate = utilities.storage.allocate +local setmetatableindex = table.setmetatableindex + +-- will be directives + +constructors.dontembed = allocate() +constructors.autocleanup = true +constructors.namemode = "fullpath" -- will be a function + +constructors.version = 1.01 +constructors.cache = containers.define("fonts", "constructors", constructors.version, false) + +constructors.privateoffset = 0xF0000 -- 0x10FFFF + +constructors.cacheintex = true -- so we see the original table in fonts.font + +-- Some experimental helpers (handy for tracing): +-- +-- todo: extra: +-- +-- extra_space => space.extra +-- space => space.width +-- space_stretch => space.stretch +-- space_shrink => space.shrink + +-- We do keep the x-height, extra_space, space_shrink and space_stretch +-- around as these are low level official names. + +constructors.keys = { + properties = { + encodingbytes = "number", + embedding = "number", + cidinfo = { + }, + format = "string", + fontname = "string", + fullname = "string", + filename = "filename", + psname = "string", + name = "string", + virtualized = "boolean", + hasitalics = "boolean", + autoitalicamount = "basepoints", + nostackmath = "boolean", + noglyphnames = "boolean", + mode = "string", + hasmath = "boolean", + mathitalics = "boolean", + textitalics = "boolean", + finalized = "boolean", + }, + parameters = { + mathsize = "number", + scriptpercentage = "float", + scriptscriptpercentage = "float", + units = "cardinal", + designsize = "scaledpoints", + expansion = { + stretch = "integerscale", -- might become float + shrink = "integerscale", -- might become float + step = "integerscale", -- might become float + auto = "boolean", + }, + protrusion = { + auto = "boolean", + }, + slantfactor = "float", + extendfactor = "float", + factor = "float", + hfactor = "float", + vfactor = "float", + size = "scaledpoints", + units = "scaledpoints", + scaledpoints = "scaledpoints", + slantperpoint = "scaledpoints", + spacing = { + width = "scaledpoints", + stretch = "scaledpoints", + shrink = "scaledpoints", + extra = "scaledpoints", + }, + xheight = "scaledpoints", + quad = "scaledpoints", + ascender = "scaledpoints", + descender = "scaledpoints", + synonyms = { + space = "spacing.width", + spacestretch = "spacing.stretch", + spaceshrink = "spacing.shrink", + extraspace = "spacing.extra", + x_height = "xheight", + space_stretch = "spacing.stretch", + space_shrink = "spacing.shrink", + extra_space = "spacing.extra", + em = "quad", + ex = "xheight", + slant = "slantperpoint", + }, + }, + description = { + width = "basepoints", + height = "basepoints", + depth = "basepoints", + boundingbox = { }, + }, + character = { + width = "scaledpoints", + height = "scaledpoints", + depth = "scaledpoints", + italic = "scaledpoints", + }, +} + +-- This might become an interface: + +local designsizes = allocate() +constructors.designsizes = designsizes +local loadedfonts = allocate() +constructors.loadedfonts = loadedfonts + +--[[ldx-- +

We need to normalize the scale factor (in scaled points). This has to +do with the fact that uses a negative multiple of 1000 as +a signal for a font scaled based on the design size.

+--ldx]]-- + +local factors = { + pt = 65536.0, + bp = 65781.8, +} + +function constructors.setfactor(f) + constructors.factor = factors[f or 'pt'] or factors.pt +end + +constructors.setfactor() + +function constructors.scaled(scaledpoints, designsize) -- handles designsize in sp as well + if scaledpoints < 0 then + if designsize then + local factor = constructors.factor + if designsize > factor then -- or just 1000 / when? mp? + return (- scaledpoints/1000) * designsize -- sp's + else + return (- scaledpoints/1000) * designsize * factor + end + else + return (- scaledpoints/1000) * 10 * factor + end + else + return scaledpoints + end +end + +--[[ldx-- +

Beware, the boundingbox is passed as reference so we may not overwrite it +in the process; numbers are of course copies. Here 65536 equals 1pt. (Due to +excessive memory usage in CJK fonts, we no longer pass the boundingbox.)

+--ldx]]-- + +-- The scaler is only used for otf and afm and virtual fonts. If +-- a virtual font has italic correction make sure to set the +-- hasitalics flag. Some more flags will be added in +-- the future. + +--[[ldx-- +

The reason why the scaler was originally split, is that for a while we experimented +with a helper function. However, in practice the calls are too slow to +make this profitable and the based variant was just faster. A days +wasted day but an experience richer.

+--ldx]]-- + +-- we can get rid of the tfm instance when we have fast access to the +-- scaled character dimensions at the tex end, e.g. a fontobject.width +-- actually we already have some of that now as virtual keys in glyphs +-- +-- flushing the kern and ligature tables from memory saves a lot (only +-- base mode) but it complicates vf building where the new characters +-- demand this data .. solution: functions that access them + +function constructors.cleanuptable(tfmdata) + if constructors.autocleanup and tfmdata.properties.virtualized then + for k, v in next, tfmdata.characters do + if v.commands then v.commands = nil end + -- if v.kerns then v.kerns = nil end + end + end +end + +-- experimental, sharing kerns (unscaled and scaled) saves memory +-- local sharedkerns, basekerns = constructors.check_base_kerns(tfmdata) +-- loop over descriptions (afm and otf have descriptions, tfm not) +-- there is no need (yet) to assign a value to chr.tonunicode + +-- constructors.prepare_base_kerns(tfmdata) -- optimalization + +-- we have target.name=metricfile and target.fullname=RealName and target.filename=diskfilename +-- when collapsing fonts, luatex looks as both target.name and target.fullname as ttc files +-- can have multiple subfonts + +function constructors.calculatescale(tfmdata,scaledpoints) + local parameters = tfmdata.parameters + if scaledpoints < 0 then + scaledpoints = (- scaledpoints/1000) * (tfmdata.designsize or parameters.designsize) -- already in sp + end + return scaledpoints, scaledpoints / (parameters.units or 1000) -- delta +end + +local unscaled = { + ScriptPercentScaleDown = true, + ScriptScriptPercentScaleDown = true, + RadicalDegreeBottomRaisePercent = true +} + +function constructors.assignmathparameters(target,original) -- simple variant, not used in context + -- when a tfm file is loaded, it has already been scaled + -- and it never enters the scaled so this is otf only and + -- even then we do some extra in the context math plugins + local mathparameters = original.mathparameters + if mathparameters and next(mathparameters) then + local targetparameters = target.parameters + local targetproperties = target.properties + local targetmathparameters = { } + local factor = targetproperties.math_is_scaled and 1 or targetparameters.factor + for name, value in next, mathparameters do + if unscaled[name] then + targetmathparameters[name] = value + else + targetmathparameters[name] = value * factor + end + end + if not targetmathparameters.FractionDelimiterSize then + targetmathparameters.FractionDelimiterSize = 1.01 * targetparameters.size + end + if not mathparameters.FractionDelimiterDisplayStyleSize then + targetmathparameters.FractionDelimiterDisplayStyleSize = 2.40 * targetparameters.size + end + target.mathparameters = targetmathparameters + end +end + +function constructors.beforecopyingcharacters(target,original) + -- can be used for additional tweaking +end + +function constructors.aftercopyingcharacters(target,original) + -- can be used for additional tweaking +end + +-- It's probably ok to hash just the indices because there is not that much +-- chance that one will shift slots and leave the others unset then. Anyway, +-- there is of course some overhead here, but it might as well get compensated +-- by less time spent on including the font resource twice. For the moment +-- we default to false, so a macro package has to enable it explicitly. In +-- LuaTeX the fullname is used to identify a font as being unique. + +constructors.sharefonts = false +constructors.nofsharedfonts = 0 +local sharednames = { } + +function constructors.trytosharefont(target,tfmdata) + if constructors.sharefonts then -- not robust ! + local characters = target.characters + local n = 1 + local t = { target.psname } + local u = sortedkeys(characters) + for i=1,#u do + local k = u[i] + n = n + 1 ; t[n] = k + n = n + 1 ; t[n] = characters[k].index or k + end + local h = md5.HEX(concat(t," ")) + local s = sharednames[h] + if s then + if trace_defining then + report_defining("font %a uses backend resources of font %a",target.fullname,s) + end + target.fullname = s + constructors.nofsharedfonts = constructors.nofsharedfonts + 1 + target.properties.sharedwith = s + else + sharednames[h] = target.fullname + end + end +end + +function constructors.enhanceparameters(parameters) + local xheight = parameters.x_height + local quad = parameters.quad + local space = parameters.space + local stretch = parameters.space_stretch + local shrink = parameters.space_shrink + local extra = parameters.extra_space + local slant = parameters.slant + parameters.xheight = xheight + parameters.spacestretch = stretch + parameters.spaceshrink = shrink + parameters.extraspace = extra + parameters.em = quad + parameters.ex = xheight + parameters.slantperpoint = slant + parameters.spacing = { + width = space, + stretch = stretch, + shrink = shrink, + extra = extra, + } +end + +function constructors.scale(tfmdata,specification) + local target = { } -- the new table + -- + if tonumber(specification) then + specification = { size = specification } + end + target.specification = specification + -- + local scaledpoints = specification.size + local relativeid = specification.relativeid + -- + local properties = tfmdata.properties or { } + local goodies = tfmdata.goodies or { } + local resources = tfmdata.resources or { } + local descriptions = tfmdata.descriptions or { } -- bad news if empty + local characters = tfmdata.characters or { } -- bad news if empty + local changed = tfmdata.changed or { } -- for base mode + local shared = tfmdata.shared or { } + local parameters = tfmdata.parameters or { } + local mathparameters = tfmdata.mathparameters or { } + -- + local targetcharacters = { } + local targetdescriptions = derivetable(descriptions) + local targetparameters = derivetable(parameters) + local targetproperties = derivetable(properties) + local targetgoodies = goodies -- we need to loop so no metatable + target.characters = targetcharacters + target.descriptions = targetdescriptions + target.parameters = targetparameters + -- target.mathparameters = targetmathparameters -- happens elsewhere + target.properties = targetproperties + target.goodies = targetgoodies + target.shared = shared + target.resources = resources + target.unscaled = tfmdata -- the original unscaled one + -- + -- specification.mathsize : 1=text 2=script 3=scriptscript + -- specification.textsize : natural (text)size + -- parameters.mathsize : 1=text 2=script 3=scriptscript >1000 enforced size (feature value other than yes) + -- + local mathsize = tonumber(specification.mathsize) or 0 + local textsize = tonumber(specification.textsize) or scaledpoints + local forcedsize = tonumber(parameters.mathsize ) or 0 + local extrafactor = tonumber(specification.factor ) or 1 + if (mathsize == 2 or forcedsize == 2) and parameters.scriptpercentage then + scaledpoints = parameters.scriptpercentage * textsize / 100 + elseif (mathsize == 3 or forcedsize == 3) and parameters.scriptscriptpercentage then + scaledpoints = parameters.scriptscriptpercentage * textsize / 100 + elseif forcedsize > 1000 then -- safeguard + scaledpoints = forcedsize + end + targetparameters.mathsize = mathsize -- context specific + targetparameters.textsize = textsize -- context specific + targetparameters.forcedsize = forcedsize -- context specific + targetparameters.extrafactor = extrafactor -- context specific + -- + local tounicode = fonts.mappings.tounicode + -- + local defaultwidth = resources.defaultwidth or 0 + local defaultheight = resources.defaultheight or 0 + local defaultdepth = resources.defaultdepth or 0 + local units = parameters.units or 1000 + -- + if target.fonts then + target.fonts = fastcopy(target.fonts) -- maybe we virtualize more afterwards + end + -- + -- boundary keys are no longer needed as we now have a string 'right_boundary' + -- that can be used in relevant tables (kerns and ligatures) ... not that I ever + -- used them + -- + -- boundarychar_label = 0, -- not needed + -- boundarychar = 65536, -- there is now a string 'right_boundary' + -- false_boundarychar = 65536, -- produces invalid tfm in luatex + -- + targetproperties.language = properties.language or "dflt" -- inherited + targetproperties.script = properties.script or "dflt" -- inherited + targetproperties.mode = properties.mode or "base" -- inherited + -- + local askedscaledpoints = scaledpoints + local scaledpoints, delta = constructors.calculatescale(tfmdata,scaledpoints,nil,specification) -- no shortcut, dan be redefined + -- + local hdelta = delta + local vdelta = delta + -- + target.designsize = parameters.designsize -- not really needed so it might become obsolete + target.units_per_em = units -- just a trigger for the backend + -- + local direction = properties.direction or tfmdata.direction or 0 -- pointless, as we don't use omf fonts at all + target.direction = direction + properties.direction = direction + -- + target.size = scaledpoints + -- + target.encodingbytes = properties.encodingbytes or 1 + target.embedding = properties.embedding or "subset" + target.tounicode = 1 + target.cidinfo = properties.cidinfo + target.format = properties.format + target.cache = constructors.cacheintex and "yes" or "renew" + -- + local fontname = properties.fontname or tfmdata.fontname -- for the moment we fall back on + local fullname = properties.fullname or tfmdata.fullname -- names in the tfmdata although + local filename = properties.filename or tfmdata.filename -- that is not the right place to + local psname = properties.psname or tfmdata.psname -- pass them + local name = properties.name or tfmdata.name + -- + if not psname or psname == "" then + -- name used in pdf file as well as for selecting subfont in ttc/dfont + psname = fontname or (fullname and fonts.names.cleanname(fullname)) + end + target.fontname = fontname + target.fullname = fullname + target.filename = filename + target.psname = psname + target.name = name + -- + -- + properties.fontname = fontname + properties.fullname = fullname + properties.filename = filename + properties.psname = psname + properties.name = name + -- expansion (hz) + local expansion = parameters.expansion + if expansion then + target.stretch = expansion.stretch + target.shrink = expansion.shrink + target.step = expansion.step + target.auto_expand = expansion.auto + end + -- protrusion + local protrusion = parameters.protrusion + if protrusion then + target.auto_protrude = protrusion.auto + end + -- widening + local extendfactor = parameters.extendfactor or 0 + if extendfactor ~= 0 and extendfactor ~= 1 then + hdelta = hdelta * extendfactor + target.extend = extendfactor * 1000 -- extent ? + else + target.extend = 1000 -- extent ? + end + -- slanting + local slantfactor = parameters.slantfactor or 0 + if slantfactor ~= 0 then + target.slant = slantfactor * 1000 + else + target.slant = 0 + end + -- + targetparameters.factor = delta + targetparameters.hfactor = hdelta + targetparameters.vfactor = vdelta + targetparameters.size = scaledpoints + targetparameters.units = units + targetparameters.scaledpoints = askedscaledpoints + -- + local isvirtual = properties.virtualized or tfmdata.type == "virtual" + local hasquality = target.auto_expand or target.auto_protrude + local hasitalics = properties.hasitalics + local autoitalicamount = properties.autoitalicamount + local stackmath = not properties.nostackmath + local nonames = properties.noglyphnames + local haskerns = properties.haskerns or properties.mode == "base" -- we can have afm in node mode + local hasligatures = properties.hasligatures or properties.mode == "base" -- we can have afm in node mode + -- + if changed and not next(changed) then + changed = false + end + -- + target.type = isvirtual and "virtual" or "real" + -- + target.postprocessors = tfmdata.postprocessors + -- + local targetslant = (parameters.slant or parameters[1] or 0) * factors.pt -- per point + local targetspace = (parameters.space or parameters[2] or 0) * hdelta + local targetspace_stretch = (parameters.space_stretch or parameters[3] or 0) * hdelta + local targetspace_shrink = (parameters.space_shrink or parameters[4] or 0) * hdelta + local targetx_height = (parameters.x_height or parameters[5] or 0) * vdelta + local targetquad = (parameters.quad or parameters[6] or 0) * hdelta + local targetextra_space = (parameters.extra_space or parameters[7] or 0) * hdelta + -- + targetparameters.slant = targetslant -- slantperpoint + targetparameters.space = targetspace + targetparameters.space_stretch = targetspace_stretch + targetparameters.space_shrink = targetspace_shrink + targetparameters.x_height = targetx_height + targetparameters.quad = targetquad + targetparameters.extra_space = targetextra_space + -- + local ascender = parameters.ascender + if ascender then + targetparameters.ascender = delta * ascender + end + local descender = parameters.descender + if descender then + targetparameters.descender = delta * descender + end + -- + constructors.enhanceparameters(targetparameters) -- official copies for us + -- + local protrusionfactor = (targetquad ~= 0 and 1000/targetquad) or 0 + local scaledwidth = defaultwidth * hdelta + local scaledheight = defaultheight * vdelta + local scaleddepth = defaultdepth * vdelta + -- + local hasmath = (properties.hasmath or next(mathparameters)) and true + -- + if hasmath then + constructors.assignmathparameters(target,tfmdata) -- does scaling and whatever is needed + properties.hasmath = true + target.nomath = false + target.MathConstants = target.mathparameters + else + properties.hasmath = false + target.nomath = true + target.mathparameters = nil -- nop + end + -- + local italickey = "italic" + local useitalics = true -- something context + -- + -- some context specific trickery (this will move to a plugin) + -- + if hasmath then + -- the latest luatex can deal with it itself so we now disable this + -- mechanism here + -- + -- if properties.mathitalics then + -- italickey = "italic_correction" + -- if trace_defining then + -- report_defining("math italics disabled for font %a, fullname %a, filename %a",name,fullname,filename) + -- end + -- end + autoitalicamount = false -- new + elseif properties.textitalics then + italickey = "italic_correction" + useitalics = false + if properties.delaytextitalics then + autoitalicamount = false + end + end + -- + -- end of context specific trickery + -- + if trace_defining then + report_defining("defining tfm, name %a, fullname %a, filename %a, hscale %a, vscale %a, math %a, italics %a", + name,fullname,filename,hdelta,vdelta, + hasmath and "enabled" or "disabled",useitalics and "enabled" or "disabled") + end + -- + constructors.beforecopyingcharacters(target,tfmdata) + -- + local sharedkerns = { } + -- + -- we can have a dumb mode (basemode without math etc) that skips most + -- + for unicode, character in next, characters do + local chr, description, index + if changed then + local c = changed[unicode] + if c then + description = descriptions[c] or descriptions[unicode] or character + character = characters[c] or character + index = description.index or c + else + description = descriptions[unicode] or character + index = description.index or unicode + end + else + description = descriptions[unicode] or character + index = description.index or unicode + end + local width = description.width + local height = description.height + local depth = description.depth + if width then width = hdelta*width else width = scaledwidth end + if height then height = vdelta*height else height = scaledheight end + -- if depth then depth = vdelta*depth else depth = scaleddepth end + if depth and depth ~= 0 then + depth = delta*depth + if nonames then + chr = { + index = index, + height = height, + depth = depth, + width = width, + } + else + chr = { + name = description.name, + index = index, + height = height, + depth = depth, + width = width, + } + end + else + -- this saves a little bit of memory time and memory, esp for big cjk fonts + if nonames then + chr = { + index = index, + height = height, + width = width, + } + else + chr = { + name = description.name, + index = index, + height = height, + width = width, + } + end + end + local isunicode = description.unicode + if isunicode then + chr.unicode = isunicode + chr.tounicode = tounicode(isunicode) + end + if hasquality then + -- we could move these calculations elsewhere (saves calculations) + local ve = character.expansion_factor + if ve then + chr.expansion_factor = ve*1000 -- expansionfactor, hm, can happen elsewhere + end + local vl = character.left_protruding + if vl then + chr.left_protruding = protrusionfactor*width*vl + end + local vr = character.right_protruding + if vr then + chr.right_protruding = protrusionfactor*width*vr + end + end + -- + if autoitalicamount then + local vi = description.italic + if not vi then + local vi = description.boundingbox[3] - description.width + autoitalicamount + if vi > 0 then -- < 0 indicates no overshoot or a very small auto italic + chr[italickey] = vi*hdelta + end + elseif vi ~= 0 then + chr[italickey] = vi*hdelta + end + elseif hasitalics then + local vi = description.italic + if vi and vi ~= 0 then + chr[italickey] = vi*hdelta + end + end + -- to be tested + if hasmath then + -- todo, just operate on descriptions.math + local vn = character.next + if vn then + chr.next = vn + else + local vv = character.vert_variants + if vv then + local t = { } + for i=1,#vv do + local vvi = vv[i] + t[i] = { + ["start"] = (vvi["start"] or 0)*vdelta, + ["end"] = (vvi["end"] or 0)*vdelta, + ["advance"] = (vvi["advance"] or 0)*vdelta, + ["extender"] = vvi["extender"], + ["glyph"] = vvi["glyph"], + } + end + chr.vert_variants = t + else + local hv = character.horiz_variants + if hv then + local t = { } + for i=1,#hv do + local hvi = hv[i] + t[i] = { + ["start"] = (hvi["start"] or 0)*hdelta, + ["end"] = (hvi["end"] or 0)*hdelta, + ["advance"] = (hvi["advance"] or 0)*hdelta, + ["extender"] = hvi["extender"], + ["glyph"] = hvi["glyph"], + } + end + chr.horiz_variants = t + end + end + end + local va = character.top_accent + if va then + chr.top_accent = vdelta*va + end + if stackmath then + local mk = character.mathkerns -- not in math ? + if mk then + local kerns = { } + local v = mk.top_right if v then local k = { } for i=1,#v do local vi = v[i] + k[i] = { height = vdelta*vi.height, kern = vdelta*vi.kern } + end kerns.top_right = k end + local v = mk.top_left if v then local k = { } for i=1,#v do local vi = v[i] + k[i] = { height = vdelta*vi.height, kern = vdelta*vi.kern } + end kerns.top_left = k end + local v = mk.bottom_left if v then local k = { } for i=1,#v do local vi = v[i] + k[i] = { height = vdelta*vi.height, kern = vdelta*vi.kern } + end kerns.bottom_left = k end + local v = mk.bottom_right if v then local k = { } for i=1,#v do local vi = v[i] + k[i] = { height = vdelta*vi.height, kern = vdelta*vi.kern } + end kerns.bottom_right = k end + chr.mathkern = kerns -- singular -> should be patched in luatex ! + end + end + end + if haskerns then + local vk = character.kerns + if vk then + local s = sharedkerns[vk] + if not s then + s = { } + for k,v in next, vk do s[k] = v*hdelta end + sharedkerns[vk] = s + end + chr.kerns = s + end + end + if hasligatures then + local vl = character.ligatures + if vl then + if true then + chr.ligatures = vl -- shared + else + local tt = { } + for i, l in next, vl do + tt[i] = l + end + chr.ligatures = tt + end + end + end + if isvirtual then + local vc = character.commands + if vc then + -- we assume non scaled commands here + -- tricky .. we need to scale pseudo math glyphs too + -- which is why we deal with rules too + local ok = false + for i=1,#vc do + local key = vc[i][1] + if key == "right" or key == "down" then + ok = true + break + end + end + if ok then + local tt = { } + for i=1,#vc do + local ivc = vc[i] + local key = ivc[1] + if key == "right" then + tt[i] = { key, ivc[2]*hdelta } + elseif key == "down" then + tt[i] = { key, ivc[2]*vdelta } + elseif key == "rule" then + tt[i] = { key, ivc[2]*vdelta, ivc[3]*hdelta } + else -- not comment + tt[i] = ivc -- shared since in cache and untouched + end + end + chr.commands = tt + else + chr.commands = vc + end + chr.index = nil + end + end + targetcharacters[unicode] = chr + end + -- + constructors.aftercopyingcharacters(target,tfmdata) + -- + constructors.trytosharefont(target,tfmdata) + -- + return target +end + +function constructors.finalize(tfmdata) + if tfmdata.properties and tfmdata.properties.finalized then + return + end + -- + if not tfmdata.characters then + return nil + end + -- + if not tfmdata.goodies then + tfmdata.goodies = { } -- context specific + end + -- + local parameters = tfmdata.parameters + if not parameters then + return nil + end + -- + if not parameters.expansion then + parameters.expansion = { + stretch = tfmdata.stretch or 0, + shrink = tfmdata.shrink or 0, + step = tfmdata.step or 0, + auto = tfmdata.auto_expand or false, + } + end + -- + if not parameters.protrusion then + parameters.protrusion = { + auto = auto_protrude + } + end + -- + if not parameters.size then + parameters.size = tfmdata.size + end + -- + if not parameters.extendfactor then + parameters.extendfactor = tfmdata.extend or 0 + end + -- + if not parameters.slantfactor then + parameters.slantfactor = tfmdata.slant or 0 + end + -- + if not parameters.designsize then + parameters.designsize = tfmdata.designsize or (factors.pt * 10) + end + -- + if not parameters.units then + parameters.units = tfmdata.units_per_em or 1000 + end + -- + if not tfmdata.descriptions then + local descriptions = { } -- yes or no + setmetatableindex(descriptions, function(t,k) local v = { } t[k] = v return v end) + tfmdata.descriptions = descriptions + end + -- + local properties = tfmdata.properties + if not properties then + properties = { } + tfmdata.properties = properties + end + -- + if not properties.virtualized then + properties.virtualized = tfmdata.type == "virtual" + end + -- + if not tfmdata.properties then + tfmdata.properties = { + fontname = tfmdata.fontname, + filename = tfmdata.filename, + fullname = tfmdata.fullname, + name = tfmdata.name, + psname = tfmdata.psname, + -- + encodingbytes = tfmdata.encodingbytes or 1, + embedding = tfmdata.embedding or "subset", + tounicode = tfmdata.tounicode or 1, + cidinfo = tfmdata.cidinfo or nil, + format = tfmdata.format or "type1", + direction = tfmdata.direction or 0, + } + end + if not tfmdata.resources then + tfmdata.resources = { } + end + if not tfmdata.shared then + tfmdata.shared = { } + end + -- + -- tfmdata.fonts + -- tfmdata.unscaled + -- + if not properties.hasmath then + properties.hasmath = not tfmdata.nomath + end + -- + tfmdata.MathConstants = nil + tfmdata.postprocessors = nil + -- + tfmdata.fontname = nil + tfmdata.filename = nil + tfmdata.fullname = nil + tfmdata.name = nil -- most tricky part + tfmdata.psname = nil + -- + tfmdata.encodingbytes = nil + tfmdata.embedding = nil + tfmdata.tounicode = nil + tfmdata.cidinfo = nil + tfmdata.format = nil + tfmdata.direction = nil + tfmdata.type = nil + tfmdata.nomath = nil + tfmdata.designsize = nil + -- + tfmdata.size = nil + tfmdata.stretch = nil + tfmdata.shrink = nil + tfmdata.step = nil + tfmdata.auto_expand = nil + tfmdata.auto_protrude = nil + tfmdata.extend = nil + tfmdata.slant = nil + tfmdata.units_per_em = nil + -- + tfmdata.cache = nil + -- + properties.finalized = true + -- + return tfmdata +end + +--[[ldx-- +

A unique hash value is generated by:

+--ldx]]-- + +local hashmethods = { } +constructors.hashmethods = hashmethods + +function constructors.hashfeatures(specification) -- will be overloaded + local features = specification.features + if features then + local t, tn = { }, 0 + for category, list in next, features do + if next(list) then + local hasher = hashmethods[category] + if hasher then + local hash = hasher(list) + if hash then + tn = tn + 1 + t[tn] = category .. ":" .. hash + end + end + end + end + if tn > 0 then + return concat(t," & ") + end + end + return "unknown" +end + +hashmethods.normal = function(list) + local s = { } + local n = 0 + for k, v in next, list do + if not k then + -- no need to add to hash + elseif k == "number" or k == "features" then + -- no need to add to hash (maybe we need a skip list) + else + n = n + 1 + s[n] = k + end + end + if n > 0 then + sort(s) + for i=1,n do + local k = s[i] + s[i] = k .. '=' .. tostring(list[k]) + end + return concat(s,"+") + end +end + +--[[ldx-- +

In principle we can share tfm tables when we are in node for a font, but then +we need to define a font switch as an id/attr switch which is no fun, so in that +case users can best use dynamic features ... so, we will not use that speedup. Okay, +when we get rid of base mode we can optimize even further by sharing, but then we +loose our testcases for .

+--ldx]]-- + +function constructors.hashinstance(specification,force) + local hash, size, fallbacks = specification.hash, specification.size, specification.fallbacks + if force or not hash then + hash = constructors.hashfeatures(specification) + specification.hash = hash + end + if size < 1000 and designsizes[hash] then + size = math.round(constructors.scaled(size,designsizes[hash])) + specification.size = size + end + if fallbacks then + return hash .. ' @ ' .. tostring(size) .. ' @ ' .. fallbacks + else + return hash .. ' @ ' .. tostring(size) + end +end + +function constructors.setname(tfmdata,specification) -- todo: get specification from tfmdata + if constructors.namemode == "specification" then + -- not to be used in context ! + local specname = specification.specification + if specname then + tfmdata.properties.name = specname + if trace_defining then + report_otf("overloaded fontname %a",specname) + end + end + end +end + +function constructors.checkedfilename(data) + local foundfilename = data.foundfilename + if not foundfilename then + local askedfilename = data.filename or "" + if askedfilename ~= "" then + askedfilename = resolvers.resolve(askedfilename) -- no shortcut + foundfilename = resolvers.findbinfile(askedfilename,"") or "" + if foundfilename == "" then + report_defining("source file %a is not found",askedfilename) + foundfilename = resolvers.findbinfile(file.basename(askedfilename),"") or "" + if foundfilename ~= "" then + report_defining("using source file %a due to cache mismatch",foundfilename) + end + end + end + data.foundfilename = foundfilename + end + return foundfilename +end + +local formats = allocate() +fonts.formats = formats + +setmetatableindex(formats, function(t,k) + local l = lower(k) + if rawget(t,k) then + t[k] = l + return l + end + return rawget(t,file.suffix(l)) +end) + +local locations = { } + +local function setindeed(mode,target,group,name,action,position) + local t = target[mode] + if not t then + report_defining("fatal error in setting feature %a, group %a, mode %a",name,group,mode) + os.exit() + elseif position then + -- todo: remove existing + insert(t, position, { name = name, action = action }) + else + for i=1,#t do + local ti = t[i] + if ti.name == name then + ti.action = action + return + end + end + insert(t, { name = name, action = action }) + end +end + +local function set(group,name,target,source) + target = target[group] + if not target then + report_defining("fatal target error in setting feature %a, group %a",name,group) + os.exit() + end + local source = source[group] + if not source then + report_defining("fatal source error in setting feature %a, group %a",name,group) + os.exit() + end + local node = source.node + local base = source.base + local position = source.position + if node then + setindeed("node",target,group,name,node,position) + end + if base then + setindeed("base",target,group,name,base,position) + end +end + +local function register(where,specification) + local name = specification.name + if name and name ~= "" then + local default = specification.default + local description = specification.description + local initializers = specification.initializers + local processors = specification.processors + local manipulators = specification.manipulators + local modechecker = specification.modechecker + if default then + where.defaults[name] = default + end + if description and description ~= "" then + where.descriptions[name] = description + end + if initializers then + set('initializers',name,where,specification) + end + if processors then + set('processors', name,where,specification) + end + if manipulators then + set('manipulators',name,where,specification) + end + if modechecker then + where.modechecker = modechecker + end + end +end + +constructors.registerfeature = register + +function constructors.getfeatureaction(what,where,mode,name) + what = handlers[what].features + if what then + where = what[where] + if where then + mode = where[mode] + if mode then + for i=1,#mode do + local m = mode[i] + if m.name == name then + return m.action + end + end + end + end + end +end + +function constructors.newhandler(what) -- could be a metatable newindex + local handler = handlers[what] + if not handler then + handler = { } + handlers[what] = handler + end + return handler +end + +function constructors.newfeatures(what) -- could be a metatable newindex + local handler = handlers[what] + local features = handler.features + if not features then + local tables = handler.tables -- can be preloaded + local statistics = handler.statistics -- can be preloaded + features = allocate { + defaults = { }, + descriptions = tables and tables.features or { }, + used = statistics and statistics.usedfeatures or { }, + initializers = { base = { }, node = { } }, + processors = { base = { }, node = { } }, + manipulators = { base = { }, node = { } }, + } + features.register = function(specification) return register(features,specification) end + handler.features = features -- will also become hidden + end + return features +end + +--[[ldx-- +

We need to check for default features. For this we provide +a helper function.

+--ldx]]-- + +function constructors.checkedfeatures(what,features) + local defaults = handlers[what].features.defaults + if features and next(features) then + features = fastcopy(features) -- can be inherited (mt) but then no loops possible + for key, value in next, defaults do + if features[key] == nil then + features[key] = value + end + end + return features + else + return fastcopy(defaults) -- we can change features in place + end +end + +-- before scaling + +function constructors.initializefeatures(what,tfmdata,features,trace,report) + if features and next(features) then + local properties = tfmdata.properties or { } -- brrr + local whathandler = handlers[what] + local whatfeatures = whathandler.features + local whatinitializers = whatfeatures.initializers + local whatmodechecker = whatfeatures.modechecker + -- properties.mode can be enforces (for instance in font-otd) + local mode = properties.mode or (whatmodechecker and whatmodechecker(tfmdata,features,features.mode)) or features.mode or "base" + properties.mode = mode -- also status + features.mode = mode -- both properties.mode or features.mode can be changed + -- + local done = { } + while true do + local redo = false + local initializers = whatfeatures.initializers[mode] + if initializers then + for i=1,#initializers do + local step = initializers[i] + local feature = step.name +-- we could intercept mode here .. needs a rewrite of this whole loop then but it's cleaner that way + local value = features[feature] + if not value then + -- disabled + elseif done[feature] then + -- already done + else + local action = step.action + if trace then + report("initializing feature %a to %a for mode %a for font %a",feature, + value,mode,tfmdata.properties.fullname) + end + action(tfmdata,value,features) -- can set mode (e.g. goodies) so it can trigger a restart + if mode ~= properties.mode or mode ~= features.mode then + if whatmodechecker then + properties.mode = whatmodechecker(tfmdata,features,properties.mode) -- force checking + features.mode = properties.mode + end + if mode ~= properties.mode then + mode = properties.mode + redo = true + end + end + done[feature] = true + end + if redo then + break + end + end + if not redo then + break + end + else + break + end + end + properties.mode = mode -- to be sure + return true + else + return false + end +end + +-- while typesetting + +function constructors.collectprocessors(what,tfmdata,features,trace,report) + local processes, nofprocesses = { }, 0 + if features and next(features) then + local properties = tfmdata.properties + local whathandler = handlers[what] + local whatfeatures = whathandler.features + local whatprocessors = whatfeatures.processors + local mode = properties.mode + local processors = whatprocessors[mode] + if processors then + for i=1,#processors do + local step = processors[i] + local feature = step.name + if features[feature] then + local action = step.action + if trace then + report("installing feature processor %a for mode %a for font %a",feature,mode,tfmdata.properties.fullname) + end + if action then + nofprocesses = nofprocesses + 1 + processes[nofprocesses] = action + end + end + end + elseif trace then + report("no feature processors for mode %a for font %a",mode,properties.fullname) + end + end + return processes +end + +-- after scaling + +function constructors.applymanipulators(what,tfmdata,features,trace,report) + if features and next(features) then + local properties = tfmdata.properties + local whathandler = handlers[what] + local whatfeatures = whathandler.features + local whatmanipulators = whatfeatures.manipulators + local mode = properties.mode + local manipulators = whatmanipulators[mode] + if manipulators then + for i=1,#manipulators do + local step = manipulators[i] + local feature = step.name + local value = features[feature] + if value then + local action = step.action + if trace then + report("applying feature manipulator %a for mode %a for font %a",feature,mode,properties.fullname) + end + if action then + action(tfmdata,feature,value) + end + end + end + end + end +end + +function constructors.addcoreunicodes(unicodes) -- maybe make this a metatable if used at all + if not unicodes then + unicodes = { } + end + unicodes.space = 0x0020 + unicodes.hyphen = 0x002D + unicodes.zwj = 0x200D + unicodes.zwnj = 0x200C + return unicodes +end + +-- -- keep for a while: old tounicode code +-- +-- if changed then +-- -- basemode hack (we try to catch missing tounicodes, e.g. needed for ssty in math cambria) +-- local c = changed[unicode] +-- if c then +-- -- local ligatures = character.ligatures -- the original ligatures (as we cannot rely on remapping) +-- description = descriptions[c] or descriptions[unicode] or character +-- character = characters[c] or character +-- index = description.index or c +-- if tounicode then +-- touni = tounicode[index] -- nb: index! +-- if not touni then -- goodie +-- local d = descriptions[unicode] or characters[unicode] +-- local i = d.index or unicode +-- touni = tounicode[i] -- nb: index! +-- end +-- end +-- -- if ligatures and not character.ligatures then +-- -- character.ligatures = ligatures -- the original targets (for now at least.. see libertine smallcaps) +-- -- end +-- else +-- description = descriptions[unicode] or character +-- index = description.index or unicode +-- if tounicode then +-- touni = tounicode[index] -- nb: index! +-- end +-- end +-- else +-- description = descriptions[unicode] or character +-- index = description.index or unicode +-- if tounicode then +-- touni = tounicode[index] -- nb: index! +-- end +-- end diff --git a/src/fontloader/misc/fontloader-font-def.lua b/src/fontloader/misc/fontloader-font-def.lua new file mode 100644 index 0000000..fdded3c --- /dev/null +++ b/src/fontloader/misc/fontloader-font-def.lua @@ -0,0 +1,452 @@ +if not modules then modules = { } end modules ['font-def'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- We can overload some of the definers.functions so we don't local them. + +local format, gmatch, match, find, lower, gsub = string.format, string.gmatch, string.match, string.find, string.lower, string.gsub +local tostring, next = tostring, next +local lpegmatch = lpeg.match +local suffixonly, removesuffix = file.suffix, file.removesuffix + +local allocate = utilities.storage.allocate + +local trace_defining = false trackers .register("fonts.defining", function(v) trace_defining = v end) +local directive_embedall = false directives.register("fonts.embedall", function(v) directive_embedall = v end) + +trackers.register("fonts.loading", "fonts.defining", "otf.loading", "afm.loading", "tfm.loading") +trackers.register("fonts.all", "fonts.*", "otf.*", "afm.*", "tfm.*") + +local report_defining = logs.reporter("fonts","defining") + +--[[ldx-- +

Here we deal with defining fonts. We do so by intercepting the +default loader that only handles .

+--ldx]]-- + +local fonts = fonts +local fontdata = fonts.hashes.identifiers +local readers = fonts.readers +local definers = fonts.definers +local specifiers = fonts.specifiers +local constructors = fonts.constructors +local fontgoodies = fonts.goodies + +readers.sequence = allocate { 'otf', 'ttf', 'afm', 'tfm', 'lua' } -- dfont ttc + +local variants = allocate() +specifiers.variants = variants + +definers.methods = definers.methods or { } + +local internalized = allocate() -- internal tex numbers (private) +local lastdefined = nil -- we don't want this one to end up in s-tra-02 + +local loadedfonts = constructors.loadedfonts +local designsizes = constructors.designsizes + +-- not in generic (some day I'll make two defs, one for context, one for generic) + +local resolvefile = fontgoodies and fontgoodies.filenames and fontgoodies.filenames.resolve or function(s) return s end + +--[[ldx-- +

We hardly gain anything when we cache the final (pre scaled) + table. But it can be handy for debugging, so we no +longer carry this code along. Also, we now have quite some reference +to other tables so we would end up with lots of catches.

+--ldx]]-- + +--[[ldx-- +

We can prefix a font specification by name: or +file:. The first case will result in a lookup in the +synonym table.

+ + +[ name: | file: ] identifier [ separator [ specification ] ] + + +

The following function split the font specification into components +and prepares a table that will move along as we proceed.

+--ldx]]-- + +-- beware, we discard additional specs +-- +-- method:name method:name(sub) method:name(sub)*spec method:name*spec +-- name name(sub) name(sub)*spec name*spec +-- name@spec*oeps + +local splitter, splitspecifiers = nil, "" -- not so nice + +local P, C, S, Cc = lpeg.P, lpeg.C, lpeg.S, lpeg.Cc + +local left = P("(") +local right = P(")") +local colon = P(":") +local space = P(" ") + +definers.defaultlookup = "file" + +local prefixpattern = P(false) + +local function addspecifier(symbol) + splitspecifiers = splitspecifiers .. symbol + local method = S(splitspecifiers) + local lookup = C(prefixpattern) * colon + local sub = left * C(P(1-left-right-method)^1) * right + local specification = C(method) * C(P(1)^1) + local name = C((1-sub-specification)^1) + splitter = P((lookup + Cc("")) * name * (sub + Cc("")) * (specification + Cc(""))) +end + +local function addlookup(str,default) + prefixpattern = prefixpattern + P(str) +end + +definers.addlookup = addlookup + +addlookup("file") +addlookup("name") +addlookup("spec") + +local function getspecification(str) + return lpegmatch(splitter,str or "") -- weird catch +end + +definers.getspecification = getspecification + +function definers.registersplit(symbol,action,verbosename) + addspecifier(symbol) + variants[symbol] = action + if verbosename then + variants[verbosename] = action + end +end + +local function makespecification(specification,lookup,name,sub,method,detail,size) + size = size or 655360 + if not lookup or lookup == "" then + lookup = definers.defaultlookup + end + if trace_defining then + report_defining("specification %a, lookup %a, name %a, sub %a, method %a, detail %a", + specification, lookup, name, sub, method, detail) + end + local t = { + lookup = lookup, -- forced type + specification = specification, -- full specification + size = size, -- size in scaled points or -1000*n + name = name, -- font or filename + sub = sub, -- subfont (eg in ttc) + method = method, -- specification method + detail = detail, -- specification + resolved = "", -- resolved font name + forced = "", -- forced loader + features = { }, -- preprocessed features + } + return t +end + + +definers.makespecification = makespecification + +function definers.analyze(specification, size) + -- can be optimized with locals + local lookup, name, sub, method, detail = getspecification(specification or "") + return makespecification(specification, lookup, name, sub, method, detail, size) +end + +--[[ldx-- +

We can resolve the filename using the next function:

+--ldx]]-- + +definers.resolvers = definers.resolvers or { } +local resolvers = definers.resolvers + +-- todo: reporter + +function resolvers.file(specification) + local name = resolvefile(specification.name) -- catch for renames + local suffix = lower(suffixonly(name)) + if fonts.formats[suffix] then + specification.forced = suffix + specification.forcedname = name + specification.name = removesuffix(name) + else + specification.name = name -- can be resolved + end +end + +function resolvers.name(specification) + local resolve = fonts.names.resolve + if resolve then + local resolved, sub = resolve(specification.name,specification.sub,specification) -- we pass specification for overloaded versions + if resolved then + specification.resolved = resolved + specification.sub = sub + local suffix = lower(suffixonly(resolved)) + if fonts.formats[suffix] then + specification.forced = suffix + specification.forcedname = resolved + specification.name = removesuffix(resolved) + else + specification.name = resolved + end + end + else + resolvers.file(specification) + end +end + +function resolvers.spec(specification) + local resolvespec = fonts.names.resolvespec + if resolvespec then + local resolved, sub = resolvespec(specification.name,specification.sub,specification) -- we pass specification for overloaded versions + if resolved then + specification.resolved = resolved + specification.sub = sub + specification.forced = lower(suffixonly(resolved)) + specification.forcedname = resolved + specification.name = removesuffix(resolved) + end + else + resolvers.name(specification) + end +end + +function definers.resolve(specification) + if not specification.resolved or specification.resolved == "" then -- resolved itself not per se in mapping hash + local r = resolvers[specification.lookup] + if r then + r(specification) + end + end + if specification.forced == "" then + specification.forced = nil + specification.forcedname = nil + end + specification.hash = lower(specification.name .. ' @ ' .. constructors.hashfeatures(specification)) + if specification.sub and specification.sub ~= "" then + specification.hash = specification.sub .. ' @ ' .. specification.hash + end + return specification +end + +--[[ldx-- +

The main read function either uses a forced reader (as determined by +a lookup) or tries to resolve the name using the list of readers.

+ +

We need to cache when possible. We do cache raw tfm data (from , or ). After that we can cache based +on specificstion (name) and size, that is, only needs a number +for an already loaded fonts. However, it may make sense to cache fonts +before they're scaled as well (store 's with applied methods +and features). However, there may be a relation between the size and +features (esp in virtual fonts) so let's not do that now.

+ +

Watch out, here we do load a font, but we don't prepare the +specification yet.

+--ldx]]-- + +-- very experimental: + +function definers.applypostprocessors(tfmdata) + local postprocessors = tfmdata.postprocessors + if postprocessors then + local properties = tfmdata.properties + for i=1,#postprocessors do + local extrahash = postprocessors[i](tfmdata) -- after scaling etc + if type(extrahash) == "string" and extrahash ~= "" then + -- e.g. a reencoding needs this + extrahash = gsub(lower(extrahash),"[^a-z]","-") + properties.fullname = format("%s-%s",properties.fullname,extrahash) + end + end + end + return tfmdata +end + +-- function definers.applypostprocessors(tfmdata) +-- return tfmdata +-- end + +local function checkembedding(tfmdata) + local properties = tfmdata.properties + local embedding + if directive_embedall then + embedding = "full" + elseif properties and properties.filename and constructors.dontembed[properties.filename] then + embedding = "no" + else + embedding = "subset" + end + if properties then + properties.embedding = embedding + else + tfmdata.properties = { embedding = embedding } + end + tfmdata.embedding = embedding +end + +function definers.loadfont(specification) + local hash = constructors.hashinstance(specification) + local tfmdata = loadedfonts[hash] -- hashes by size ! + if not tfmdata then + local forced = specification.forced or "" + if forced ~= "" then + local reader = readers[lower(forced)] -- normally forced is already lowered + tfmdata = reader and reader(specification) + if not tfmdata then + report_defining("forced type %a of %a not found",forced,specification.name) + end + else + local sequence = readers.sequence -- can be overloaded so only a shortcut here + for s=1,#sequence do + local reader = sequence[s] + if readers[reader] then -- we skip not loaded readers + if trace_defining then + report_defining("trying (reader sequence driven) type %a for %a with file %a",reader,specification.name,specification.filename) + end + tfmdata = readers[reader](specification) + if tfmdata then + break + else + specification.filename = nil + end + end + end + end + if tfmdata then + tfmdata = definers.applypostprocessors(tfmdata) + checkembedding(tfmdata) -- todo: general postprocessor + loadedfonts[hash] = tfmdata + designsizes[specification.hash] = tfmdata.parameters.designsize + end + end + if not tfmdata then + report_defining("font with asked name %a is not found using lookup %a",specification.name,specification.lookup) + end + return tfmdata +end + +function constructors.checkvirtualids() + -- dummy in plain version +end + +function constructors.readanddefine(name,size) -- no id -- maybe a dummy first + local specification = definers.analyze(name,size) + local method = specification.method + if method and variants[method] then + specification = variants[method](specification) + end + specification = definers.resolve(specification) + local hash = constructors.hashinstance(specification) + local id = definers.registered(hash) + if not id then + local tfmdata = definers.loadfont(specification) + if tfmdata then + tfmdata.properties.hash = hash + constructors.checkvirtualids(tfmdata) -- experiment, will become obsolete when slots can selfreference + id = font.define(tfmdata) + definers.register(tfmdata,id) + else + id = 0 -- signal + end + end + return fontdata[id], id +end + +--[[ldx-- +

So far the specifiers. Now comes the real definer. Here we cache +based on id's. Here we also intercept the virtual font handler. Since +it evolved stepwise I may rewrite this bit (combine code).

+ +In the previously defined reader (the one resulting in a +table) we cached the (scaled) instances. Here we cache them again, but +this time based on id. We could combine this in one cache but this does +not gain much. By the way, passing id's back to in the callback was +introduced later in the development.

+--ldx]]-- + +function definers.current() -- or maybe current + return lastdefined +end + +function definers.registered(hash) + local id = internalized[hash] + return id, id and fontdata[id] +end + +function definers.register(tfmdata,id) + if tfmdata and id then + local hash = tfmdata.properties.hash + if not hash then + report_defining("registering font, id %a, name %a, invalid hash",id,tfmdata.properties.filename or "?") + elseif not internalized[hash] then + internalized[hash] = id + if trace_defining then + report_defining("registering font, id %s, hash %a",id,hash) + end + fontdata[id] = tfmdata + end + end +end + +function definers.read(specification,size,id) -- id can be optional, name can already be table + statistics.starttiming(fonts) + if type(specification) == "string" then + specification = definers.analyze(specification,size) + end + local method = specification.method + if method and variants[method] then + specification = variants[method](specification) + end + specification = definers.resolve(specification) + local hash = constructors.hashinstance(specification) + local tfmdata = definers.registered(hash) -- id + if tfmdata then + if trace_defining then + report_defining("already hashed: %s",hash) + end + else + tfmdata = definers.loadfont(specification) -- can be overloaded + if tfmdata then + if trace_defining then + report_defining("loaded and hashed: %s",hash) + end + tfmdata.properties.hash = hash + if id then + definers.register(tfmdata,id) + end + else + if trace_defining then + report_defining("not loaded and hashed: %s",hash) + end + end + end + lastdefined = tfmdata or id -- todo ! ! ! ! ! + if not tfmdata then -- or id? + report_defining( "unknown font %a, loading aborted",specification.name) + elseif trace_defining and type(tfmdata) == "table" then + local properties = tfmdata.properties or { } + local parameters = tfmdata.parameters or { } + report_defining("using %a font with id %a, name %a, size %a, bytes %a, encoding %a, fullname %a, filename %a", + properties.format or "unknown", id, properties.name, parameters.size, properties.encodingbytes, + properties.encodingname, properties.fullname, file.basename(properties.filename)) + end + statistics.stoptiming(fonts) + return tfmdata +end + +function font.getfont(id) + return fontdata[id] -- otherwise issues +end + +--[[ldx-- +

We overload the reader.

+--ldx]]-- + +callbacks.register('define_font', definers.read, "definition of fonts (tfmdata preparation)") diff --git a/src/fontloader/misc/fontloader-font-ini.lua b/src/fontloader/misc/fontloader-font-ini.lua new file mode 100644 index 0000000..884b224 --- /dev/null +++ b/src/fontloader/misc/fontloader-font-ini.lua @@ -0,0 +1,32 @@ +if not modules then modules = { } end modules ['font-ini'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +--[[ldx-- +

Not much is happening here.

+--ldx]]-- + +local allocate = utilities.storage.allocate + +local report_defining = logs.reporter("fonts","defining") + +fonts = fonts or { } +local fonts = fonts + +fonts.hashes = { identifiers = allocate() } + +fonts.tables = fonts.tables or { } +fonts.helpers = fonts.helpers or { } +fonts.tracers = fonts.tracers or { } -- for the moment till we have move to moduledata +fonts.specifiers = fonts.specifiers or { } -- in format ! + +fonts.analyzers = { } -- not needed here +fonts.readers = { } +fonts.definers = { methods = { } } +fonts.loggers = { register = function() end } + +fontloader.totable = fontloader.to_table diff --git a/src/fontloader/misc/fontloader-font-map.lua b/src/fontloader/misc/fontloader-font-map.lua new file mode 100644 index 0000000..e26f28e --- /dev/null +++ b/src/fontloader/misc/fontloader-font-map.lua @@ -0,0 +1,533 @@ +if not modules then modules = { } end modules ['font-map'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local tonumber, next, type = tonumber, next, type + +local match, format, find, concat, gsub, lower = string.match, string.format, string.find, table.concat, string.gsub, string.lower +local P, R, S, C, Ct, Cc, lpegmatch = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cc, lpeg.match +local utfbyte = utf.byte +local floor = math.floor +local formatters = string.formatters + +local trace_loading = false trackers.register("fonts.loading", function(v) trace_loading = v end) +local trace_mapping = false trackers.register("fonts.mapping", function(v) trace_unimapping = v end) + +local report_fonts = logs.reporter("fonts","loading") -- not otf only + +local fonts = fonts or { } +local mappings = fonts.mappings or { } +fonts.mappings = mappings + +--[[ldx-- +

Eventually this code will disappear because map files are kind +of obsolete. Some code may move to runtime or auxiliary modules.

+

The name to unciode related code will stay of course.

+--ldx]]-- + +local function loadlumtable(filename) -- will move to font goodies + local lumname = file.replacesuffix(file.basename(filename),"lum") + local lumfile = resolvers.findfile(lumname,"map") or "" + if lumfile ~= "" and lfs.isfile(lumfile) then + if trace_loading or trace_mapping then + report_fonts("loading map table %a",lumfile) + end + lumunic = dofile(lumfile) + return lumunic, lumfile + end +end + +local hex = R("AF","09") +local hexfour = (hex*hex*hex*hex) / function(s) return tonumber(s,16) end +local hexsix = (hex*hex*hex*hex*hex*hex) / function(s) return tonumber(s,16) end +local dec = (R("09")^1) / tonumber +local period = P(".") +local unicode = P("uni") * (hexfour * (period + P(-1)) * Cc(false) + Ct(hexfour^1) * Cc(true)) +local ucode = P("u") * (hexsix * (period + P(-1)) * Cc(false) + Ct(hexsix ^1) * Cc(true)) +local index = P("index") * dec * Cc(false) + +local parser = unicode + ucode + index + +local parsers = { } + +local function makenameparser(str) + if not str or str == "" then + return parser + else + local p = parsers[str] + if not p then + p = P(str) * period * dec * Cc(false) + parsers[str] = p + end + return p + end +end + +local f_single = formatters["%04X"] +local f_double = formatters["%04X%04X"] + +local function tounicode16(unicode,name) + if unicode < 0x10000 then + return f_single(unicode) + elseif unicode < 0x1FFFFFFFFF then + return f_double(floor(unicode/1024),unicode%1024+0xDC00) + else + report_fonts("can't convert %a in %a into tounicode",unicode,name) + end +end + +local function tounicode16sequence(unicodes,name) + local t = { } + for l=1,#unicodes do + local u = unicodes[l] + if u < 0x10000 then + t[l] = f_single(u) + elseif unicode < 0x1FFFFFFFFF then + t[l] = f_double(floor(u/1024),u%1024+0xDC00) + else + report_fonts ("can't convert %a in %a into tounicode",u,name) + return + end + end + return concat(t) +end + +local function tounicode(unicode,name) + if type(unicode) == "table" then + local t = { } + for l=1,#unicode do + local u = unicode[l] + if u < 0x10000 then + t[l] = f_single(u) + elseif u < 0x1FFFFFFFFF then + t[l] = f_double(floor(u/1024),u%1024+0xDC00) + else + report_fonts ("can't convert %a in %a into tounicode",u,name) + return + end + end + return concat(t) + else + if unicode < 0x10000 then + return f_single(unicode) + elseif unicode < 0x1FFFFFFFFF then + return f_double(floor(unicode/1024),unicode%1024+0xDC00) + else + report_fonts("can't convert %a in %a into tounicode",unicode,name) + end + end +end + + +local function fromunicode16(str) + if #str == 4 then + return tonumber(str,16) + else + local l, r = match(str,"(....)(....)") + return (tonumber(l,16))*0x400 + tonumber(r,16) - 0xDC00 + end +end + +-- Slightly slower: +-- +-- local p = C(4) * (C(4)^-1) / function(l,r) +-- if r then +-- return (tonumber(l,16))*0x400 + tonumber(r,16) - 0xDC00 +-- else +-- return tonumber(l,16) +-- end +-- end +-- +-- local function fromunicode16(str) +-- return lpegmatch(p,str) +-- end + +-- This is quite a bit faster but at the cost of some memory but if we +-- do this we will also use it elsewhere so let's not follow this route +-- now. I might use this method in the plain variant (no caching there) +-- but then I need a flag that distinguishes between code branches. +-- +-- local cache = { } +-- +-- function mappings.tounicode16(unicode) +-- local s = cache[unicode] +-- if not s then +-- if unicode < 0x10000 then +-- s = format("%04X",unicode) +-- else +-- s = format("%04X%04X",unicode/0x400+0xD800,unicode%0x400+0xDC00) +-- end +-- cache[unicode] = s +-- end +-- return s +-- end + +mappings.loadlumtable = loadlumtable +mappings.makenameparser = makenameparser +mappings.tounicode = tounicode +mappings.tounicode16 = tounicode16 +mappings.tounicode16sequence = tounicode16sequence +mappings.fromunicode16 = fromunicode16 + +local ligseparator = P("_") +local varseparator = P(".") +local namesplitter = Ct(C((1 - ligseparator - varseparator)^1) * (ligseparator * C((1 - ligseparator - varseparator)^1))^0) + +-- local function test(name) +-- local split = lpegmatch(namesplitter,name) +-- print(string.formatters["%s: [% t]"](name,split)) +-- end + +-- maybe: ff fi fl ffi ffl => f_f f_i f_l f_f_i f_f_l + +-- test("i.f_") +-- test("this") +-- test("this.that") +-- test("japan1.123") +-- test("such_so_more") +-- test("such_so_more.that") + +-- to be completed .. for fonts that use unicodes for ligatures which +-- is a actually a bad thing and should be avoided in the first place + +local overloads = { + IJ = { name = "I_J", unicode = { 0x49, 0x4A }, mess = 0x0132 }, + ij = { name = "i_j", unicode = { 0x69, 0x6A }, mess = 0x0133 }, + ff = { name = "f_f", unicode = { 0x66, 0x66 }, mess = 0xFB00 }, + fi = { name = "f_i", unicode = { 0x66, 0x69 }, mess = 0xFB01 }, + fl = { name = "f_l", unicode = { 0x66, 0x6C }, mess = 0xFB02 }, + ffi = { name = "f_f_i", unicode = { 0x66, 0x66, 0x69 }, mess = 0xFB03 }, + ffl = { name = "f_f_l", unicode = { 0x66, 0x66, 0x6C }, mess = 0xFB04 }, + fj = { name = "f_j", unicode = { 0x66, 0x6A } }, + fk = { name = "f_k", unicode = { 0x66, 0x6B } }, +} + +for k, v in next, overloads do + local name = v.name + local mess = v.mess + if name then + overloads[name] = v + end + if mess then + overloads[mess] = v + end +end + +mappings.overloads = overloads + +function mappings.addtounicode(data,filename) + local resources = data.resources + local properties = data.properties + local descriptions = data.descriptions + local unicodes = resources.unicodes + local lookuptypes = resources.lookuptypes + if not unicodes then + return + end + -- we need to move this code + unicodes['space'] = unicodes['space'] or 32 + unicodes['hyphen'] = unicodes['hyphen'] or 45 + unicodes['zwj'] = unicodes['zwj'] or 0x200D + unicodes['zwnj'] = unicodes['zwnj'] or 0x200C + local private = fonts.constructors.privateoffset + local unicodevector = fonts.encodings.agl.unicodes -- loaded runtime in context + ----- namevector = fonts.encodings.agl.names -- loaded runtime in context + local missing = { } + local lumunic, uparser, oparser + local cidinfo, cidnames, cidcodes, usedmap + -- + cidinfo = properties.cidinfo + usedmap = cidinfo and fonts.cid.getmap(cidinfo) + -- + if usedmap then + oparser = usedmap and makenameparser(cidinfo.ordering) + cidnames = usedmap.names + cidcodes = usedmap.unicodes + end + uparser = makenameparser() + local ns, nl = 0, 0 + for unic, glyph in next, descriptions do + local index = glyph.index + local name = glyph.name + local r = overloads[name] + if r then + -- get rid of weird ligatures + -- glyph.name = r.name + glyph.unicode = r.unicode + elseif unic == -1 or unic >= private or (unic >= 0xE000 and unic <= 0xF8FF) or unic == 0xFFFE or unic == 0xFFFF then + local unicode = lumunic and lumunic[name] or unicodevector[name] + if unicode then + glyph.unicode = unicode + ns = ns + 1 + end + -- cidmap heuristics, beware, there is no guarantee for a match unless + -- the chain resolves + if (not unicode) and usedmap then + local foundindex = lpegmatch(oparser,name) + if foundindex then + unicode = cidcodes[foundindex] -- name to number + if unicode then + glyph.unicode = unicode + ns = ns + 1 + else + local reference = cidnames[foundindex] -- number to name + if reference then + local foundindex = lpegmatch(oparser,reference) + if foundindex then + unicode = cidcodes[foundindex] + if unicode then + glyph.unicode = unicode + ns = ns + 1 + end + end + if not unicode or unicode == "" then + local foundcodes, multiple = lpegmatch(uparser,reference) + if foundcodes then + glyph.unicode = foundcodes + if multiple then + nl = nl + 1 + unicode = true + else + ns = ns + 1 + unicode = foundcodes + end + end + end + end + end + end + end + -- a.whatever or a_b_c.whatever or a_b_c (no numbers) a.b_ + -- + -- It is not trivial to find a solution that suits all fonts. We tried several alternatives + -- and this one seems to work reasonable also with fonts that use less standardized naming + -- schemes. The extra private test is tested by KE and seems to work okay with non-typical + -- fonts as well. + -- + -- The next time I look into this, I'll add an extra analysis step to the otf loader (we can + -- resolve some tounicodes by looking into the gsub data tables that are bound to glyphs. + -- + if not unicode or unicode == "" then + local split = lpegmatch(namesplitter,name) + local nsplit = split and #split or 0 + local t, n = { }, 0 + unicode = true + for l=1,nsplit do + local base = split[l] + local u = unicodes[base] or unicodevector[base] + if not u then + break + elseif type(u) == "table" then + if u[1] >= private then + unicode = false + break + end + n = n + 1 + t[n] = u[1] + else + if u >= private then + unicode = false + break + end + n = n + 1 + t[n] = u + end + end + if n == 0 then -- done then + -- nothing + elseif n == 1 then + glyph.unicode = t[1] + else + glyph.unicode = t + end + nl = nl + 1 + end + -- last resort (we might need to catch private here as well) + if not unicode or unicode == "" then + local foundcodes, multiple = lpegmatch(uparser,name) + if foundcodes then + glyph.unicode = foundcodes + if multiple then + nl = nl + 1 + unicode = true + else + ns = ns + 1 + unicode = foundcodes + end + end + end + -- check using substitutes and alternates + local r = overloads[unicode] + if r then + unicode = r.unicode + glyph.unicode = unicode + end + -- + if not unicode then + missing[name] = true + end + end + end + if next(missing) then + local guess = { } + -- helper + local function check(gname,code,unicode) + local description = descriptions[code] + -- no need to add a self reference + local variant = description.name + if variant == gname then + return + end + -- the variant already has a unicode (normally that resultrs in a default tounicode to self) + local unic = unicodes[variant] + if unic == -1 or unic >= private or (unic >= 0xE000 and unic <= 0xF8FF) or unic == 0xFFFE or unic == 0xFFFF then + -- no default mapping and therefore maybe no tounicode yet + else + return + end + -- the variant already has a tounicode + if descriptions[code].unicode then + return + end + -- add to the list + local g = guess[variant] + -- local r = overloads[unicode] + -- if r then + -- unicode = r.unicode + -- end + if g then + g[gname] = unicode + else + guess[variant] = { [gname] = unicode } + end + end + -- + for unicode, description in next, descriptions do + local slookups = description.slookups + if slookups then + local gname = description.name + for tag, data in next, slookups do + local lookuptype = lookuptypes[tag] + if lookuptype == "alternate" then + for i=1,#data do + check(gname,data[i],unicode) + end + elseif lookuptype == "substitution" then + check(gname,data,unicode) + end + end + end + local mlookups = description.mlookups + if mlookups then + local gname = description.name + for tag, list in next, mlookups do + local lookuptype = lookuptypes[tag] + if lookuptype == "alternate" then + for i=1,#list do + local data = list[i] + for i=1,#data do + check(gname,data[i],unicode) + end + end + elseif lookuptype == "substitution" then + for i=1,#list do + check(gname,list[i],unicode) + end + end + end + end + end + -- resolve references + local done = true + while done do + done = false + for k, v in next, guess do + if type(v) ~= "number" then + for kk, vv in next, v do + if vv == -1 or vv >= private or (vv >= 0xE000 and vv <= 0xF8FF) or vv == 0xFFFE or vv == 0xFFFF then + local uu = guess[kk] + if type(uu) == "number" then + guess[k] = uu + done = true + end + else + guess[k] = vv + done = true + end + end + end + end + end + -- wrap up + local orphans = 0 + local guessed = 0 + for k, v in next, guess do + if type(v) == "number" then + descriptions[unicodes[k]].unicode = descriptions[v].unicode or v -- can also be a table + guessed = guessed + 1 + else + local t = nil + local l = lower(k) + local u = unicodes[l] + if not u then + orphans = orphans + 1 + elseif u == -1 or u >= private or (u >= 0xE000 and u <= 0xF8FF) or u == 0xFFFE or u == 0xFFFF then + local unicode = descriptions[u].unicode + if unicode then + descriptions[unicodes[k]].unicode = unicode + guessed = guessed + 1 + else + orphans = orphans + 1 + end + else + orphans = orphans + 1 + end + end + end + if trace_loading and orphans > 0 or guessed > 0 then + report_fonts("%s glyphs with no related unicode, %s guessed, %s orphans",guessed+orphans,guessed,orphans) + end + end + if trace_mapping then + for unic, glyph in table.sortedhash(descriptions) do + local name = glyph.name + local index = glyph.index + local unicode = glyph.unicode + if unicode then + if type(unicode) == "table" then + local unicodes = { } + for i=1,#unicode do + unicodes[i] = formatters("%U",unicode[i]) + end + report_fonts("internal slot %U, name %a, unicode %U, tounicode % t",index,name,unic,unicodes) + else + report_fonts("internal slot %U, name %a, unicode %U, tounicode %U",index,name,unic,unicode) + end + else + report_fonts("internal slot %U, name %a, unicode %U",index,name,unic) + end + end + end + if trace_loading and (ns > 0 or nl > 0) then + report_fonts("%s tounicode entries added, ligatures %s",nl+ns,ns) + end +end + +-- local parser = makenameparser("Japan1") +-- local parser = makenameparser() +-- local function test(str) +-- local b, a = lpegmatch(parser,str) +-- print((a and table.serialize(b)) or b) +-- end +-- test("a.sc") +-- test("a") +-- test("uni1234") +-- test("uni1234.xx") +-- test("uni12349876") +-- test("u123400987600") +-- test("index1234") +-- test("Japan1.123") diff --git a/src/fontloader/misc/fontloader-font-otb.lua b/src/fontloader/misc/fontloader-font-otb.lua new file mode 100644 index 0000000..4e955a1 --- /dev/null +++ b/src/fontloader/misc/fontloader-font-otb.lua @@ -0,0 +1,707 @@ +if not modules then modules = { } end modules ['font-otb'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} +local concat = table.concat +local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip +local type, next, tonumber, tostring, rawget = type, next, tonumber, tostring, rawget +local lpegmatch = lpeg.match +local utfchar = utf.char + +local trace_baseinit = false trackers.register("otf.baseinit", function(v) trace_baseinit = v end) +local trace_singles = false trackers.register("otf.singles", function(v) trace_singles = v end) +local trace_multiples = false trackers.register("otf.multiples", function(v) trace_multiples = v end) +local trace_alternatives = false trackers.register("otf.alternatives", function(v) trace_alternatives = v end) +local trace_ligatures = false trackers.register("otf.ligatures", function(v) trace_ligatures = v end) +local trace_ligatures_detail = false trackers.register("otf.ligatures.detail", function(v) trace_ligatures_detail = v end) +local trace_kerns = false trackers.register("otf.kerns", function(v) trace_kerns = v end) +local trace_preparing = false trackers.register("otf.preparing", function(v) trace_preparing = v end) + +local report_prepare = logs.reporter("fonts","otf prepare") + +local fonts = fonts +local otf = fonts.handlers.otf + +local otffeatures = otf.features +local registerotffeature = otffeatures.register + +otf.defaultbasealternate = "none" -- first last + +local wildcard = "*" +local default = "dflt" + +local formatters = string.formatters +local f_unicode = formatters["%U"] +local f_uniname = formatters["%U (%s)"] +local f_unilist = formatters["% t (% t)"] + +local function gref(descriptions,n) + if type(n) == "number" then + local name = descriptions[n].name + if name then + return f_uniname(n,name) + else + return f_unicode(n) + end + elseif n then + local num, nam, j = { }, { }, 0 + for i=1,#n do + local ni = n[i] + if tonumber(ni) then -- first is likely a key + j = j + 1 + local di = descriptions[ni] + num[j] = f_unicode(ni) + nam[j] = di and di.name or "-" + end + end + return f_unilist(num,nam) + else + return "" + end +end + +local function cref(feature,lookuptags,lookupname) + if lookupname then + return formatters["feature %a, lookup %a"](feature,lookuptags[lookupname]) + else + return formatters["feature %a"](feature) + end +end + +local function report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,comment) + report_prepare("%s: base alternate %s => %s (%S => %S)", + cref(feature,lookuptags,lookupname), + gref(descriptions,unicode), + replacement and gref(descriptions,replacement), + value, + comment) +end + +local function report_substitution(feature,lookuptags,lookupname,descriptions,unicode,substitution) + report_prepare("%s: base substitution %s => %S", + cref(feature,lookuptags,lookupname), + gref(descriptions,unicode), + gref(descriptions,substitution)) +end + +local function report_ligature(feature,lookuptags,lookupname,descriptions,unicode,ligature) + report_prepare("%s: base ligature %s => %S", + cref(feature,lookuptags,lookupname), + gref(descriptions,ligature), + gref(descriptions,unicode)) +end + +local function report_kern(feature,lookuptags,lookupname,descriptions,unicode,otherunicode,value) + report_prepare("%s: base kern %s + %s => %S", + cref(feature,lookuptags,lookupname), + gref(descriptions,unicode), + gref(descriptions,otherunicode), + value) +end + +local basemethods = { } +local basemethod = "" + +local function applybasemethod(what,...) + local m = basemethods[basemethod][what] + if m then + return m(...) + end +end + +-- We need to make sure that luatex sees the difference between +-- base fonts that have different glyphs in the same slots in fonts +-- that have the same fullname (or filename). LuaTeX will merge fonts +-- eventually (and subset later on). If needed we can use a more +-- verbose name as long as we don't use <()<>[]{}/%> and the length +-- is < 128. + +local basehash, basehashes, applied = { }, 1, { } + +local function registerbasehash(tfmdata) + local properties = tfmdata.properties + local hash = concat(applied," ") + local base = basehash[hash] + if not base then + basehashes = basehashes + 1 + base = basehashes + basehash[hash] = base + end + properties.basehash = base + properties.fullname = properties.fullname .. "-" .. base + -- report_prepare("fullname base hash '%a, featureset %a",tfmdata.properties.fullname,hash) + applied = { } +end + +local function registerbasefeature(feature,value) + applied[#applied+1] = feature .. "=" .. tostring(value) +end + +-- The original basemode ligature builder used the names of components +-- and did some expression juggling to get the chain right. The current +-- variant starts with unicodes but still uses names to make the chain. +-- This is needed because we have to create intermediates when needed +-- but use predefined snippets when available. To some extend the +-- current builder is more stupid but I don't worry that much about it +-- as ligatures are rather predicatable. +-- +-- Personally I think that an ff + i == ffi rule as used in for instance +-- latin modern is pretty weird as no sane person will key that in and +-- expect a glyph for that ligature plus the following character. Anyhow, +-- as we need to deal with this, we do, but no guarantes are given. +-- +-- latin modern dejavu +-- +-- f+f 102 102 102 102 +-- f+i 102 105 102 105 +-- f+l 102 108 102 108 +-- f+f+i 102 102 105 +-- f+f+l 102 102 108 102 102 108 +-- ff+i 64256 105 64256 105 +-- ff+l 64256 108 +-- +-- As you can see here, latin modern is less complete than dejavu but +-- in practice one will not notice it. +-- +-- The while loop is needed because we need to resolve for instance +-- pseudo names like hyphen_hyphen to endash so in practice we end +-- up with a bit too many definitions but the overhead is neglectable. +-- +-- We can have changed[first] or changed[second] but it quickly becomes +-- messy if we need to take that into account. + +local trace = false + +local function finalize_ligatures(tfmdata,ligatures) + local nofligatures = #ligatures + if nofligatures > 0 then + local characters = tfmdata.characters + local descriptions = tfmdata.descriptions + local resources = tfmdata.resources + local unicodes = resources.unicodes -- we use rawget in order to avoid bulding the table + local private = resources.private + local alldone = false + while not alldone do + local done = 0 + for i=1,nofligatures do + local ligature = ligatures[i] + if ligature then + local unicode, lookupdata = ligature[1], ligature[2] + if trace_ligatures_detail then + report_prepare("building % a into %a",lookupdata,unicode) + end + local size = #lookupdata + local firstcode = lookupdata[1] -- [2] + local firstdata = characters[firstcode] + local okay = false + if firstdata then + local firstname = "ctx_" .. firstcode + for i=1,size-1 do -- for i=2,size-1 do + local firstdata = characters[firstcode] + if not firstdata then + firstcode = private + if trace_ligatures_detail then + report_prepare("defining %a as %a",firstname,firstcode) + end + unicodes[firstname] = firstcode + firstdata = { intermediate = true, ligatures = { } } + characters[firstcode] = firstdata + descriptions[firstcode] = { name = firstname } + private = private + 1 + end + local target + local secondcode = lookupdata[i+1] + local secondname = firstname .. "_" .. secondcode + if i == size - 1 then + target = unicode + if not rawget(unicodes,secondname) then + unicodes[secondname] = unicode -- map final ligature onto intermediates + end + okay = true + else + target = rawget(unicodes,secondname) + if not target then + break + end + end + if trace_ligatures_detail then + report_prepare("codes (%a,%a) + (%a,%a) -> %a",firstname,firstcode,secondname,secondcode,target) + end + local firstligs = firstdata.ligatures + if firstligs then + firstligs[secondcode] = { char = target } + else + firstdata.ligatures = { [secondcode] = { char = target } } + end + firstcode = target + firstname = secondname + end + elseif trace_ligatures_detail then + report_prepare("no glyph (%a,%a) for building %a",firstname,firstcode,target) + end + if okay then + ligatures[i] = false + done = done + 1 + end + end + end + alldone = done == 0 + end + if trace_ligatures_detail then + for k, v in table.sortedhash(characters) do + if v.ligatures then + table.print(v,k) + end + end + end + resources.private = private + return true + end +end + +local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist) + local characters = tfmdata.characters + local descriptions = tfmdata.descriptions + local resources = tfmdata.resources + local properties = tfmdata.properties + local changed = tfmdata.changed + local lookuphash = resources.lookuphash + local lookuptypes = resources.lookuptypes + local lookuptags = resources.lookuptags + + local ligatures = { } + local alternate = tonumber(value) or true and 1 + local defaultalt = otf.defaultbasealternate + + local trace_singles = trace_baseinit and trace_singles + local trace_alternatives = trace_baseinit and trace_alternatives + local trace_ligatures = trace_baseinit and trace_ligatures + + local actions = { + substitution = function(lookupdata,lookuptags,lookupname,description,unicode) + if trace_singles then + report_substitution(feature,lookuptags,lookupname,descriptions,unicode,lookupdata) + end + changed[unicode] = lookupdata + end, + alternate = function(lookupdata,lookuptags,lookupname,description,unicode) + local replacement = lookupdata[alternate] + if replacement then + changed[unicode] = replacement + if trace_alternatives then + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"normal") + end + elseif defaultalt == "first" then + replacement = lookupdata[1] + changed[unicode] = replacement + if trace_alternatives then + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt) + end + elseif defaultalt == "last" then + replacement = lookupdata[#data] + if trace_alternatives then + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt) + end + else + if trace_alternatives then + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"unknown") + end + end + end, + ligature = function(lookupdata,lookuptags,lookupname,description,unicode) + if trace_ligatures then + report_ligature(feature,lookuptags,lookupname,descriptions,unicode,lookupdata) + end + ligatures[#ligatures+1] = { unicode, lookupdata } + end, + } + + for unicode, character in next, characters do + local description = descriptions[unicode] + local lookups = description.slookups + if lookups then + for l=1,#lookuplist do + local lookupname = lookuplist[l] + local lookupdata = lookups[lookupname] + if lookupdata then + local lookuptype = lookuptypes[lookupname] + local action = actions[lookuptype] + if action then + action(lookupdata,lookuptags,lookupname,description,unicode) + end + end + end + end + local lookups = description.mlookups + if lookups then + for l=1,#lookuplist do + local lookupname = lookuplist[l] + local lookuplist = lookups[lookupname] + if lookuplist then + local lookuptype = lookuptypes[lookupname] + local action = actions[lookuptype] + if action then + for i=1,#lookuplist do + action(lookuplist[i],lookuptags,lookupname,description,unicode) + end + end + end + end + end + end + properties.hasligatures = finalize_ligatures(tfmdata,ligatures) +end + +local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist) -- todo what kind of kerns, currently all + local characters = tfmdata.characters + local descriptions = tfmdata.descriptions + local resources = tfmdata.resources + local properties = tfmdata.properties + local lookuptags = resources.lookuptags + local sharedkerns = { } + local traceindeed = trace_baseinit and trace_kerns + local haskerns = false + for unicode, character in next, characters do + local description = descriptions[unicode] + local rawkerns = description.kerns -- shared + if rawkerns then + local s = sharedkerns[rawkerns] + if s == false then + -- skip + elseif s then + character.kerns = s + else + local newkerns = character.kerns + local done = false + for l=1,#lookuplist do + local lookup = lookuplist[l] + local kerns = rawkerns[lookup] + if kerns then + for otherunicode, value in next, kerns do + if value == 0 then + -- maybe no 0 test here + elseif not newkerns then + newkerns = { [otherunicode] = value } + done = true + if traceindeed then + report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,value) + end + elseif not newkerns[otherunicode] then -- first wins + newkerns[otherunicode] = value + done = true + if traceindeed then + report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,value) + end + end + end + end + end + if done then + sharedkerns[rawkerns] = newkerns + character.kerns = newkerns -- no empty assignments + haskerns = true + else + sharedkerns[rawkerns] = false + end + end + end + end + properties.haskerns = haskerns +end + +basemethods.independent = { + preparesubstitutions = preparesubstitutions, + preparepositionings = preparepositionings, +} + +local function makefake(tfmdata,name,present) + local resources = tfmdata.resources + local private = resources.private + local character = { intermediate = true, ligatures = { } } + resources.unicodes[name] = private + tfmdata.characters[private] = character + tfmdata.descriptions[private] = { name = name } + resources.private = private + 1 + present[name] = private + return character +end + +local function make_1(present,tree,name) + for k, v in next, tree do + if k == "ligature" then + present[name] = v + else + make_1(present,v,name .. "_" .. k) + end + end +end + +local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,done,lookuptags,lookupname) + for k, v in next, tree do + if k == "ligature" then + local character = characters[preceding] + if not character then + if trace_baseinit then + report_prepare("weird ligature in lookup %a, current %C, preceding %C",lookuptags[lookupname],v,preceding) + end + character = makefake(tfmdata,name,present) + end + local ligatures = character.ligatures + if ligatures then + ligatures[unicode] = { char = v } + else + character.ligatures = { [unicode] = { char = v } } + end + if done then + local d = done[lookupname] + if not d then + done[lookupname] = { "dummy", v } + else + d[#d+1] = v + end + end + else + local code = present[name] or unicode + local name = name .. "_" .. k + make_2(present,tfmdata,characters,v,name,code,k,done,lookuptags,lookupname) + end + end +end + +local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist) + local characters = tfmdata.characters + local descriptions = tfmdata.descriptions + local resources = tfmdata.resources + local changed = tfmdata.changed + local lookuphash = resources.lookuphash + local lookuptypes = resources.lookuptypes + local lookuptags = resources.lookuptags + + local ligatures = { } + local alternate = tonumber(value) or true and 1 + local defaultalt = otf.defaultbasealternate + + local trace_singles = trace_baseinit and trace_singles + local trace_alternatives = trace_baseinit and trace_alternatives + local trace_ligatures = trace_baseinit and trace_ligatures + + for l=1,#lookuplist do + local lookupname = lookuplist[l] + local lookupdata = lookuphash[lookupname] + local lookuptype = lookuptypes[lookupname] + for unicode, data in next, lookupdata do + if lookuptype == "substitution" then + if trace_singles then + report_substitution(feature,lookuptags,lookupname,descriptions,unicode,data) + end + changed[unicode] = data + elseif lookuptype == "alternate" then + local replacement = data[alternate] + if replacement then + changed[unicode] = replacement + if trace_alternatives then + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"normal") + end + elseif defaultalt == "first" then + replacement = data[1] + changed[unicode] = replacement + if trace_alternatives then + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt) + end + elseif defaultalt == "last" then + replacement = data[#data] + if trace_alternatives then + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt) + end + else + if trace_alternatives then + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"unknown") + end + end + elseif lookuptype == "ligature" then + ligatures[#ligatures+1] = { unicode, data, lookupname } + if trace_ligatures then + report_ligature(feature,lookuptags,lookupname,descriptions,unicode,data) + end + end + end + end + + local nofligatures = #ligatures + + if nofligatures > 0 then + + local characters = tfmdata.characters + local present = { } + local done = trace_baseinit and trace_ligatures and { } + + for i=1,nofligatures do + local ligature = ligatures[i] + local unicode, tree = ligature[1], ligature[2] + make_1(present,tree,"ctx_"..unicode) + end + + for i=1,nofligatures do + local ligature = ligatures[i] + local unicode, tree, lookupname = ligature[1], ligature[2], ligature[3] + make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,lookuptags,lookupname) + end + + end + +end + +local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist) + local characters = tfmdata.characters + local descriptions = tfmdata.descriptions + local resources = tfmdata.resources + local properties = tfmdata.properties + local lookuphash = resources.lookuphash + local lookuptags = resources.lookuptags + local traceindeed = trace_baseinit and trace_kerns + -- check out this sharedkerns trickery + for l=1,#lookuplist do + local lookupname = lookuplist[l] + local lookupdata = lookuphash[lookupname] + for unicode, data in next, lookupdata do + local character = characters[unicode] + local kerns = character.kerns + if not kerns then + kerns = { } + character.kerns = kerns + end + if traceindeed then + for otherunicode, kern in next, data do + if not kerns[otherunicode] and kern ~= 0 then + kerns[otherunicode] = kern + report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,kern) + end + end + else + for otherunicode, kern in next, data do + if not kerns[otherunicode] and kern ~= 0 then + kerns[otherunicode] = kern + end + end + end + end + end + +end + +local function initializehashes(tfmdata) + nodeinitializers.features(tfmdata) +end + +basemethods.shared = { + initializehashes = initializehashes, + preparesubstitutions = preparesubstitutions, + preparepositionings = preparepositionings, +} + +basemethod = "independent" + +local function featuresinitializer(tfmdata,value) + if true then -- value then + local starttime = trace_preparing and os.clock() + local features = tfmdata.shared.features + local fullname = tfmdata.properties.fullname or "?" + if features then + applybasemethod("initializehashes",tfmdata) + local collectlookups = otf.collectlookups + local rawdata = tfmdata.shared.rawdata + local properties = tfmdata.properties + local script = properties.script + local language = properties.language + local basesubstitutions = rawdata.resources.features.gsub + local basepositionings = rawdata.resources.features.gpos + -- + -- if basesubstitutions then + -- for feature, data in next, basesubstitutions do + -- local value = features[feature] + -- if value then + -- local validlookups, lookuplist = collectlookups(rawdata,feature,script,language) + -- if validlookups then + -- applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist) + -- registerbasefeature(feature,value) + -- end + -- end + -- end + -- end + -- if basepositionings then + -- for feature, data in next, basepositionings do + -- local value = features[feature] + -- if value then + -- local validlookups, lookuplist = collectlookups(rawdata,feature,script,language) + -- if validlookups then + -- applybasemethod("preparepositionings",tfmdata,feature,features[feature],validlookups,lookuplist) + -- registerbasefeature(feature,value) + -- end + -- end + -- end + -- end + -- + if basesubstitutions or basepositionings then + local sequences = tfmdata.resources.sequences + for s=1,#sequences do + local sequence = sequences[s] + local sfeatures = sequence.features + if sfeatures then + local order = sequence.order + if order then + for i=1,#order do -- + local feature = order[i] + local value = features[feature] + if value then + local validlookups, lookuplist = collectlookups(rawdata,feature,script,language) + if not validlookups then + -- skip + elseif basesubstitutions and basesubstitutions[feature] then + if trace_preparing then + report_prepare("filtering base %s feature %a for %a with value %a","sub",feature,fullname,value) + end + applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist) + registerbasefeature(feature,value) + elseif basepositionings and basepositionings[feature] then + if trace_preparing then + report_prepare("filtering base %a feature %a for %a with value %a","pos",feature,fullname,value) + end + applybasemethod("preparepositionings",tfmdata,feature,value,validlookups,lookuplist) + registerbasefeature(feature,value) + end + end + end + end + end + end + end + -- + registerbasehash(tfmdata) + end + if trace_preparing then + report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,fullname) + end + end +end + +registerotffeature { + name = "features", + description = "features", + default = true, + initializers = { + -- position = 1, -- after setscript (temp hack ... we need to force script / language to 1 + base = featuresinitializer, + } +} + +-- independent : collect lookups independently (takes more runtime ... neglectable) +-- shared : shares lookups with node mode (takes more memory unless also a node mode variant is used ... noticeable) + +directives.register("fonts.otf.loader.basemethod", function(v) + if basemethods[v] then + basemethod = v + end +end) diff --git a/src/fontloader/misc/fontloader-font-otf.lua b/src/fontloader/misc/fontloader-font-otf.lua new file mode 100644 index 0000000..18b9752 --- /dev/null +++ b/src/fontloader/misc/fontloader-font-otf.lua @@ -0,0 +1,2592 @@ +if not modules then modules = { } end modules ['font-otf'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- langs -> languages enz +-- anchor_classes vs kernclasses +-- modification/creationtime in subfont is runtime dus zinloos +-- to_table -> totable +-- ascent descent + +-- more checking against low level calls of functions + +local utfbyte = utf.byte +local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip +local type, next, tonumber, tostring = type, next, tonumber, tostring +local abs = math.abs +local insert = table.insert +local lpegmatch = lpeg.match +local reversed, concat, remove, sortedkeys = table.reversed, table.concat, table.remove, table.sortedkeys +local ioflush = io.flush +local fastcopy, tohash, derivetable = table.fastcopy, table.tohash, table.derive +local formatters = string.formatters +local P, R, S, C, Ct, lpegmatch = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Ct, lpeg.match + +local setmetatableindex = table.setmetatableindex +local allocate = utilities.storage.allocate +local registertracker = trackers.register +local registerdirective = directives.register +local starttiming = statistics.starttiming +local stoptiming = statistics.stoptiming +local elapsedtime = statistics.elapsedtime +local findbinfile = resolvers.findbinfile + +local trace_private = false registertracker("otf.private", function(v) trace_private = v end) +local trace_loading = false registertracker("otf.loading", function(v) trace_loading = v end) +local trace_features = false registertracker("otf.features", function(v) trace_features = v end) +local trace_dynamics = false registertracker("otf.dynamics", function(v) trace_dynamics = v end) +local trace_sequences = false registertracker("otf.sequences", function(v) trace_sequences = v end) +local trace_markwidth = false registertracker("otf.markwidth", function(v) trace_markwidth = v end) +local trace_defining = false registertracker("fonts.defining", function(v) trace_defining = v end) + +local compact_lookups = true registertracker("otf.compactlookups", function(v) compact_lookups = v end) +local purge_names = true registertracker("otf.purgenames", function(v) purge_names = v end) + +local report_otf = logs.reporter("fonts","otf loading") + +local fonts = fonts +local otf = fonts.handlers.otf + +otf.glists = { "gsub", "gpos" } + +otf.version = 2.802 -- beware: also sync font-mis.lua +otf.cache = containers.define("fonts", "otf", otf.version, true) + +local fontdata = fonts.hashes.identifiers +local chardata = characters and characters.data -- not used + +local definers = fonts.definers +local readers = fonts.readers +local constructors = fonts.constructors + +local otffeatures = constructors.newfeatures("otf") +local registerotffeature = otffeatures.register + +local enhancers = allocate() +otf.enhancers = enhancers +local patches = { } +enhancers.patches = patches + +local forceload = false +local cleanup = 0 -- mk: 0=885M 1=765M 2=735M (regular run 730M) +local packdata = true +local syncspace = true +local forcenotdef = false +local includesubfonts = false +local overloadkerns = false -- experiment + +local applyruntimefixes = fonts.treatments and fonts.treatments.applyfixes + +local wildcard = "*" +local default = "dflt" + +local fontloaderfields = fontloader.fields +local mainfields = nil +local glyphfields = nil -- not used yet + +local formats = fonts.formats + +formats.otf = "opentype" +formats.ttf = "truetype" +formats.ttc = "truetype" +formats.dfont = "truetype" + +registerdirective("fonts.otf.loader.cleanup", function(v) cleanup = tonumber(v) or (v and 1) or 0 end) +registerdirective("fonts.otf.loader.force", function(v) forceload = v end) +registerdirective("fonts.otf.loader.pack", function(v) packdata = v end) +registerdirective("fonts.otf.loader.syncspace", function(v) syncspace = v end) +registerdirective("fonts.otf.loader.forcenotdef", function(v) forcenotdef = v end) +registerdirective("fonts.otf.loader.overloadkerns", function(v) overloadkerns = v end) + +function otf.fileformat(filename) + local leader = lower(io.loadchunk(filename,4)) + local suffix = lower(file.suffix(filename)) + if leader == "otto" then + return formats.otf, suffix == "otf" + elseif leader == "ttcf" then + return formats.ttc, suffix == "ttc" + -- elseif leader == "true" then + -- return formats.ttf, suffix == "ttf" + elseif suffix == "ttc" then + return formats.ttc, true + elseif suffix == "dfont" then + return formats.dfont, true + else + return formats.ttf, suffix == "ttf" + end +end + +-- local function otf_format(filename) +-- -- return formats[lower(file.suffix(filename))] +-- end + +local function otf_format(filename) + local format, okay = otf.fileformat(filename) + if not okay then + report_otf("font %a is actually an %a file",filename,format) + end + return format +end + +local function load_featurefile(raw,featurefile) + if featurefile and featurefile ~= "" then + if trace_loading then + report_otf("using featurefile %a", featurefile) + end + fontloader.apply_featurefile(raw, featurefile) + end +end + +local function showfeatureorder(rawdata,filename) + local sequences = rawdata.resources.sequences + if sequences and #sequences > 0 then + if trace_loading then + report_otf("font %a has %s sequences",filename,#sequences) + report_otf(" ") + end + for nos=1,#sequences do + local sequence = sequences[nos] + local typ = sequence.type or "no-type" + local name = sequence.name or "no-name" + local subtables = sequence.subtables or { "no-subtables" } + local features = sequence.features + if trace_loading then + report_otf("%3i %-15s %-20s [% t]",nos,name,typ,subtables) + end + if features then + for feature, scripts in next, features do + local tt = { } + if type(scripts) == "table" then + for script, languages in next, scripts do + local ttt = { } + for language, _ in next, languages do + ttt[#ttt+1] = language + end + tt[#tt+1] = formatters["[%s: % t]"](script,ttt) + end + if trace_loading then + report_otf(" %s: % t",feature,tt) + end + else + if trace_loading then + report_otf(" %s: %S",feature,scripts) + end + end + end + end + end + if trace_loading then + report_otf("\n") + end + elseif trace_loading then + report_otf("font %a has no sequences",filename) + end +end + +--[[ldx-- +

We start with a lot of tables and related functions.

+--ldx]]-- + +local valid_fields = table.tohash { + -- "anchor_classes", + "ascent", + -- "cache_version", + "cidinfo", + "copyright", + -- "creationtime", + "descent", + "design_range_bottom", + "design_range_top", + "design_size", + "encodingchanged", + "extrema_bound", + "familyname", + "fontname", + "fontstyle_id", + "fontstyle_name", + "fullname", + -- "glyphs", + "hasvmetrics", + -- "head_optimized_for_cleartype", + "horiz_base", + "issans", + "isserif", + "italicangle", + -- "kerns", + -- "lookups", + "macstyle", + -- "modificationtime", + "onlybitmaps", + "origname", + "os2_version", + "pfminfo", + -- "private", + "serifcheck", + "sfd_version", + -- "size", + "strokedfont", + "strokewidth", + -- "subfonts", + "table_version", + -- "tables", + -- "ttf_tab_saved", + "ttf_tables", + "uni_interp", + "uniqueid", + "units_per_em", + "upos", + "use_typo_metrics", + "uwidth", + "validation_state", + "version", + "vert_base", + "weight", + "weight_width_slope_only", + -- "xuid", +} + +local ordered_enhancers = { + "prepare tables", + + "prepare glyphs", + "prepare lookups", + + "analyze glyphs", + "analyze math", + + -- "prepare tounicode", + + "reorganize lookups", + "reorganize mark classes", + "reorganize anchor classes", + + "reorganize glyph kerns", + "reorganize glyph lookups", + "reorganize glyph anchors", + + "merge kern classes", + + "reorganize features", + "reorganize subtables", + + "check glyphs", + "check metadata", + "check extra features", -- after metadata + + "prepare tounicode", + + "check encoding", -- moved + "add duplicates", + + "cleanup tables", + + "compact lookups", + "purge names", +} + +--[[ldx-- +

Here we go.

+--ldx]]-- + +local actions = allocate() +local before = allocate() +local after = allocate() + +patches.before = before +patches.after = after + +local function enhance(name,data,filename,raw) + local enhancer = actions[name] + if enhancer then + if trace_loading then + report_otf("apply enhancement %a to file %a",name,filename) + ioflush() + end + enhancer(data,filename,raw) + else + -- no message as we can have private ones + end +end + +function enhancers.apply(data,filename,raw) + local basename = file.basename(lower(filename)) + if trace_loading then + report_otf("%s enhancing file %a","start",filename) + end + ioflush() -- we want instant messages + for e=1,#ordered_enhancers do + local enhancer = ordered_enhancers[e] + local b = before[enhancer] + if b then + for pattern, action in next, b do + if find(basename,pattern) then + action(data,filename,raw) + end + end + end + enhance(enhancer,data,filename,raw) + local a = after[enhancer] + if a then + for pattern, action in next, a do + if find(basename,pattern) then + action(data,filename,raw) + end + end + end + ioflush() -- we want instant messages + end + if trace_loading then + report_otf("%s enhancing file %a","stop",filename) + end + ioflush() -- we want instant messages +end + +-- patches.register("before","migrate metadata","cambria",function() end) + +function patches.register(what,where,pattern,action) + local pw = patches[what] + if pw then + local ww = pw[where] + if ww then + ww[pattern] = action + else + pw[where] = { [pattern] = action} + end + end +end + +function patches.report(fmt,...) + if trace_loading then + report_otf("patching: %s",formatters[fmt](...)) + end +end + +function enhancers.register(what,action) -- only already registered can be overloaded + actions[what] = action +end + +function otf.load(filename,sub,featurefile) -- second argument (format) is gone ! + local base = file.basename(file.removesuffix(filename)) + local name = file.removesuffix(base) + local attr = lfs.attributes(filename) + local size = attr and attr.size or 0 + local time = attr and attr.modification or 0 + if featurefile then + name = name .. "@" .. file.removesuffix(file.basename(featurefile)) + end + if sub == "" then + sub = false + end + local hash = name + if sub then + hash = hash .. "-" .. sub + end + hash = containers.cleanname(hash) + local featurefiles + if featurefile then + featurefiles = { } + for s in gmatch(featurefile,"[^,]+") do + local name = resolvers.findfile(file.addsuffix(s,'fea'),'fea') or "" + if name == "" then + report_otf("loading error, no featurefile %a",s) + else + local attr = lfs.attributes(name) + featurefiles[#featurefiles+1] = { + name = name, + size = attr and attr.size or 0, + time = attr and attr.modification or 0, + } + end + end + if #featurefiles == 0 then + featurefiles = nil + end + end + local data = containers.read(otf.cache,hash) + local reload = not data or data.size ~= size or data.time ~= time + if forceload then + report_otf("forced reload of %a due to hard coded flag",filename) + reload = true + end + if not reload then + local featuredata = data.featuredata + if featurefiles then + if not featuredata or #featuredata ~= #featurefiles then + reload = true + else + for i=1,#featurefiles do + local fi, fd = featurefiles[i], featuredata[i] + if fi.name ~= fd.name or fi.size ~= fd.size or fi.time ~= fd.time then + reload = true + break + end + end + end + elseif featuredata then + reload = true + end + if reload then + report_otf("loading: forced reload due to changed featurefile specification %a",featurefile) + end + end + if reload then + report_otf("loading %a, hash %a",filename,hash) + local fontdata, messages + if sub then + fontdata, messages = fontloader.open(filename,sub) + else + fontdata, messages = fontloader.open(filename) + end + if fontdata then + mainfields = mainfields or (fontloaderfields and fontloaderfields(fontdata)) + end + if trace_loading and messages and #messages > 0 then + if type(messages) == "string" then + report_otf("warning: %s",messages) + else + for m=1,#messages do + report_otf("warning: %S",messages[m]) + end + end + else + report_otf("loading done") + end + if fontdata then + if featurefiles then + for i=1,#featurefiles do + load_featurefile(fontdata,featurefiles[i].name) + end + end + local unicodes = { + -- names to unicodes + } + local splitter = lpeg.splitter(" ",unicodes) + data = { + size = size, + time = time, + format = otf_format(filename), + featuredata = featurefiles, + resources = { + filename = resolvers.unresolve(filename), -- no shortcut + version = otf.version, + creator = "context mkiv", + unicodes = unicodes, + indices = { + -- index to unicodes + }, + duplicates = { + -- alternative unicodes + }, + variants = { + -- alternative unicodes (variants) + }, + lookuptypes = { + }, + }, + warnings = { + }, + metadata = { + -- raw metadata, not to be used + }, + properties = { + -- normalized metadata + }, + descriptions = { + }, + goodies = { + }, + helpers = { -- might go away + tounicodelist = splitter, + tounicodetable = Ct(splitter), + }, + } + starttiming(data) + report_otf("file size: %s", size) + enhancers.apply(data,filename,fontdata) + local packtime = { } + if packdata then + if cleanup > 0 then + collectgarbage("collect") + end + starttiming(packtime) + enhance("pack",data,filename,nil) + stoptiming(packtime) + end + report_otf("saving %a in cache",filename) + data = containers.write(otf.cache, hash, data) + if cleanup > 1 then + collectgarbage("collect") + end + stoptiming(data) + if elapsedtime then -- not in generic + report_otf("preprocessing and caching time %s, packtime %s", + elapsedtime(data),packdata and elapsedtime(packtime) or 0) + end + fontloader.close(fontdata) -- free memory + if cleanup > 3 then + collectgarbage("collect") + end + data = containers.read(otf.cache, hash) -- this frees the old table and load the sparse one + if cleanup > 2 then + collectgarbage("collect") + end + else + data = nil + report_otf("loading failed due to read error") + end + end + if data then + if trace_defining then + report_otf("loading from cache using hash %a",hash) + end + enhance("unpack",data,filename,nil,false) + -- + local resources = data.resources + local lookuptags = resources.lookuptags + local unicodes = resources.unicodes + if not lookuptags then + lookuptags = { } + resources.lookuptags = lookuptags + end + setmetatableindex(lookuptags,function(t,k) + local v = type(k) == "number" and ("lookup " .. k) or k + t[k] = v + return v + end) + if not unicodes then + unicodes = { } + resources.unicodes = unicodes + setmetatableindex(unicodes,function(t,k) + -- use rawget when no table has to be built + setmetatableindex(unicodes,nil) + for u, d in next, data.descriptions do + local n = d.name + if n then + t[n] = u + -- report_otf("accessing known name %a",k) + else + -- report_otf("accessing unknown name %a",k) + end + end + return rawget(t,k) + end) + end + constructors.addcoreunicodes(unicodes) -- do we really need this? + -- + if applyruntimefixes then + applyruntimefixes(filename,data) + end + enhance("add dimensions",data,filename,nil,false) + if trace_sequences then + showfeatureorder(data,filename) + end + end + return data +end + +local mt = { + __index = function(t,k) -- maybe set it + if k == "height" then + local ht = t.boundingbox[4] + return ht < 0 and 0 or ht + elseif k == "depth" then + local dp = -t.boundingbox[2] + return dp < 0 and 0 or dp + elseif k == "width" then + return 0 + elseif k == "name" then -- or maybe uni* + return forcenotdef and ".notdef" + end + end +} + +actions["prepare tables"] = function(data,filename,raw) + data.properties.hasitalics = false +end + +actions["add dimensions"] = function(data,filename) + -- todo: forget about the width if it's the defaultwidth (saves mem) + -- we could also build the marks hash here (instead of storing it) + if data then + local descriptions = data.descriptions + local resources = data.resources + local defaultwidth = resources.defaultwidth or 0 + local defaultheight = resources.defaultheight or 0 + local defaultdepth = resources.defaultdepth or 0 + local basename = trace_markwidth and file.basename(filename) + for _, d in next, descriptions do + local bb, wd = d.boundingbox, d.width + if not wd then + -- or bb? + d.width = defaultwidth + elseif trace_markwidth and wd ~= 0 and d.class == "mark" then + report_otf("mark %a with width %b found in %a",d.name or "",wd,basename) + -- d.width = -wd + end + -- if forcenotdef and not d.name then + -- d.name = ".notdef" + -- end + if bb then + local ht, dp = bb[4], -bb[2] + if ht == 0 or ht < 0 then + -- not set + else + d.height = ht + end + if dp == 0 or dp < 0 then + -- not set + else + d.depth = dp + end + end + end + end +end + +local function somecopy(old) -- fast one + if old then + local new = { } + if type(old) == "table" then + for k, v in next, old do + if k == "glyphs" then + -- skip + elseif type(v) == "table" then + new[k] = somecopy(v) + else + new[k] = v + end + end + else + for i=1,#mainfields do + local k = mainfields[i] + local v = old[k] + if k == "glyphs" then + -- skip + elseif type(v) == "table" then + new[k] = somecopy(v) + else + new[k] = v + end + end + end + return new + else + return { } + end +end + +-- not setting hasitalics and class (when nil) during table cronstruction can save some mem + +actions["prepare glyphs"] = function(data,filename,raw) + local rawglyphs = raw.glyphs + local rawsubfonts = raw.subfonts + local rawcidinfo = raw.cidinfo + local criterium = constructors.privateoffset + local private = criterium + local resources = data.resources + local metadata = data.metadata + local properties = data.properties + local descriptions = data.descriptions + local unicodes = resources.unicodes -- name to unicode + local indices = resources.indices -- index to unicode + local duplicates = resources.duplicates + local variants = resources.variants + + if rawsubfonts then + + metadata.subfonts = includesubfonts and { } + properties.cidinfo = rawcidinfo + + if rawcidinfo.registry then + local cidmap = fonts.cid.getmap(rawcidinfo) + if cidmap then + rawcidinfo.usedname = cidmap.usedname + local nofnames, nofunicodes = 0, 0 + local cidunicodes, cidnames = cidmap.unicodes, cidmap.names + for cidindex=1,#rawsubfonts do + local subfont = rawsubfonts[cidindex] + local cidglyphs = subfont.glyphs + if includesubfonts then + metadata.subfonts[cidindex] = somecopy(subfont) + end + -- we have delayed loading so we cannot use next + for index=0,subfont.glyphcnt-1 do -- we could take the previous glyphcnt instead of 0 + local glyph = cidglyphs[index] + if glyph then + local unicode = glyph.unicode + if unicode >= 0x00E000 and unicode <= 0x00F8FF then + unicode = -1 + elseif unicode >= 0x0F0000 and unicode <= 0x0FFFFD then + unicode = -1 + elseif unicode >= 0x100000 and unicode <= 0x10FFFD then + unicode = -1 + end + local name = glyph.name or cidnames[index] + if not unicode or unicode == -1 then -- or unicode >= criterium then + unicode = cidunicodes[index] + end + if unicode and descriptions[unicode] then + if trace_private then + report_otf("preventing glyph %a at index %H to overload unicode %U",name or "noname",index,unicode) + end + unicode = -1 + end + if not unicode or unicode == -1 then -- or unicode >= criterium then + if not name then + name = format("u%06X.ctx",private) + end + unicode = private + unicodes[name] = private + if trace_private then + report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private) + end + private = private + 1 + nofnames = nofnames + 1 + else + -- if unicode > criterium then + -- local taken = descriptions[unicode] + -- if taken then + -- private = private + 1 + -- descriptions[private] = taken + -- unicodes[taken.name] = private + -- indices[taken.index] = private + -- if trace_private then + -- report_otf("slot %U is moved to %U due to private in font",unicode) + -- end + -- end + -- end + if not name then + name = format("u%06X.ctx",unicode) + end + unicodes[name] = unicode + nofunicodes = nofunicodes + 1 + end + indices[index] = unicode -- each index is unique (at least now) + local description = { + -- width = glyph.width, + boundingbox = glyph.boundingbox, + name = glyph.name or name or "unknown", -- uniXXXX + cidindex = cidindex, + index = index, + glyph = glyph, + } + descriptions[unicode] = description + else + -- report_otf("potential problem: glyph %U is used but empty",index) + end + end + end + if trace_loading then + report_otf("cid font remapped, %s unicode points, %s symbolic names, %s glyphs",nofunicodes, nofnames, nofunicodes+nofnames) + end + elseif trace_loading then + report_otf("unable to remap cid font, missing cid file for %a",filename) + end + elseif trace_loading then + report_otf("font %a has no glyphs",filename) + end + + else + + for index=0,raw.glyphcnt-1 do -- not raw.glyphmax-1 (as that will crash) + local glyph = rawglyphs[index] + if glyph then + local unicode = glyph.unicode + local name = glyph.name + if not unicode or unicode == -1 then -- or unicode >= criterium then + unicode = private + unicodes[name] = private + if trace_private then + report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private) + end + private = private + 1 + else + -- We have a font that uses and exposes the private area. As this is rather unreliable it's + -- advised no to trust slots here (better use glyphnames). Anyway, we need a double check: + -- we need to move already moved entries and we also need to bump the next private to after + -- the (currently) last slot. This could leave us with a hole but we have holes anyway. + if unicode > criterium then + -- \definedfont[file:HANBatang-LVT.ttf] \fontchar{uF0135} \char"F0135 + local taken = descriptions[unicode] + if taken then + if unicode >= private then + private = unicode + 1 -- restart private (so we can have mixed now) + else + private = private + 1 -- move on + end + descriptions[private] = taken + unicodes[taken.name] = private + indices[taken.index] = private + if trace_private then + report_otf("slot %U is moved to %U due to private in font",unicode) + end + else + if unicode >= private then + private = unicode + 1 -- restart (so we can have mixed now) + end + end + end + unicodes[name] = unicode + end + indices[index] = unicode + -- if not name then + -- name = format("u%06X",unicode) -- u%06X.ctx + -- end + descriptions[unicode] = { + -- width = glyph.width, + boundingbox = glyph.boundingbox, + name = name, + index = index, + glyph = glyph, + } + local altuni = glyph.altuni + if altuni then + -- local d + for i=1,#altuni do + local a = altuni[i] + local u = a.unicode + local v = a.variant + if v then + -- tricky: no addition to d? needs checking but in practice such dups are either very simple + -- shapes or e.g cjk with not that many features + local vv = variants[v] + if vv then + vv[u] = unicode + else -- xits-math has some: + vv = { [u] = unicode } + variants[v] = vv + end + -- elseif d then + -- d[#d+1] = u + -- else + -- d = { u } + end + end + -- if d then + -- duplicates[unicode] = d -- is this needed ? + -- end + end + else + report_otf("potential problem: glyph %U is used but empty",index) + end + end + + end + + resources.private = private + +end + +-- the next one is still messy but will get better when we have +-- flattened map/enc tables in the font loader + +-- the next one is not using a valid base for unicode privates +-- +-- PsuedoEncodeUnencoded(EncMap *map,struct ttfinfo *info) + +actions["check encoding"] = function(data,filename,raw) + local descriptions = data.descriptions + local resources = data.resources + local properties = data.properties + local unicodes = resources.unicodes -- name to unicode + local indices = resources.indices -- index to unicodes + local duplicates = resources.duplicates + + -- begin of messy (not needed when cidmap) + + local mapdata = raw.map or { } + local unicodetoindex = mapdata and mapdata.map or { } + local indextounicode = mapdata and mapdata.backmap or { } + -- local encname = lower(data.enc_name or raw.enc_name or mapdata.enc_name or "") + local encname = lower(data.enc_name or mapdata.enc_name or "") + local criterium = 0xFFFF -- for instance cambria has a lot of mess up there + local privateoffset = constructors.privateoffset + + -- end of messy + + if find(encname,"unicode") then -- unicodebmp, unicodefull, ... + if trace_loading then + report_otf("checking embedded unicode map %a",encname) + end + local reported = { } + -- we loop over the original unicode->index mapping but we + -- need to keep in mind that that one can have weird entries + -- so we need some extra checking + for maybeunicode, index in next, unicodetoindex do + if descriptions[maybeunicode] then + -- we ignore invalid unicodes (unicode = -1) (ff can map wrong to non private) + else + local unicode = indices[index] + if not unicode then + -- weird (cjk or so?) + elseif maybeunicode == unicode then + -- no need to add + elseif unicode > privateoffset then + -- we have a non-unicode + else + local d = descriptions[unicode] + if d then + local c = d.copies + if c then + c[maybeunicode] = true + else + d.copies = { [maybeunicode] = true } + end + elseif index and not reported[index] then + report_otf("missing index %i",index) + reported[index] = true + end + end + end + end + for unicode, data in next, descriptions do + local d = data.copies + if d then + duplicates[unicode] = sortedkeys(d) + data.copies = nil + end + end + elseif properties.cidinfo then + report_otf("warning: no unicode map, used cidmap %a",properties.cidinfo.usedname) + else + report_otf("warning: non unicode map %a, only using glyph unicode data",encname or "whatever") + end + + if mapdata then + mapdata.map = { } -- clear some memory + mapdata.backmap = { } -- clear some memory + end +end + +-- for the moment we assume that a font with lookups will not use +-- altuni so we stick to kerns only .. alternatively we can always +-- do an indirect lookup uni_to_uni . but then we need that in +-- all lookups + +actions["add duplicates"] = function(data,filename,raw) + local descriptions = data.descriptions + local resources = data.resources + local properties = data.properties + local unicodes = resources.unicodes -- name to unicode + local indices = resources.indices -- index to unicodes + local duplicates = resources.duplicates + + for unicode, d in next, duplicates do + local nofduplicates = #d + if nofduplicates > 4 then + if trace_loading then + report_otf("ignoring excessive duplicates of %U (n=%s)",unicode,nofduplicates) + end + else + -- local validduplicates = { } + for i=1,nofduplicates do + local u = d[i] + if not descriptions[u] then + local description = descriptions[unicode] + local n = 0 + for _, description in next, descriptions do + if kerns then + local kerns = description.kerns + for _, k in next, kerns do + local ku = k[unicode] + if ku then + k[u] = ku + n = n + 1 + end + end + end + -- todo: lookups etc + end + if u > 0 then -- and + local duplicate = table.copy(description) -- else packing problem + duplicate.comment = format("copy of U+%05X", unicode) + descriptions[u] = duplicate + -- validduplicates[#validduplicates+1] = u + if trace_loading then + report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n) + end + end + end + end + -- duplicates[unicode] = #validduplicates > 0 and validduplicates or nil + end + end +end + +-- class : nil base mark ligature component (maybe we don't need it in description) +-- boundingbox: split into ht/dp takes more memory (larger tables and less sharing) + +actions["analyze glyphs"] = function(data,filename,raw) -- maybe integrate this in the previous + local descriptions = data.descriptions + local resources = data.resources + local metadata = data.metadata + local properties = data.properties + local hasitalics = false + local widths = { } + local marks = { } -- always present (saves checking) + for unicode, description in next, descriptions do + local glyph = description.glyph + local italic = glyph.italic_correction + if not italic then + -- skip + elseif italic == 0 then + -- skip + else + description.italic = italic + hasitalics = true + end + local width = glyph.width + widths[width] = (widths[width] or 0) + 1 + local class = glyph.class + if class then + if class == "mark" then + marks[unicode] = true + end + description.class = class + end + end + -- flag italic + properties.hasitalics = hasitalics + -- flag marks + resources.marks = marks + -- share most common width for cjk fonts + local wd, most = 0, 1 + for k,v in next, widths do + if v > most then + wd, most = k, v + end + end + if most > 1000 then -- maybe 500 + if trace_loading then + report_otf("most common width: %s (%s times), sharing (cjk font)",wd,most) + end + for unicode, description in next, descriptions do + if description.width == wd then + -- description.width = nil + else + description.width = description.glyph.width + end + end + resources.defaultwidth = wd + else + for unicode, description in next, descriptions do + description.width = description.glyph.width + end + end +end + +actions["reorganize mark classes"] = function(data,filename,raw) + local mark_classes = raw.mark_classes + if mark_classes then + local resources = data.resources + local unicodes = resources.unicodes + local markclasses = { } + resources.markclasses = markclasses -- reversed + for name, class in next, mark_classes do + local t = { } + for s in gmatch(class,"[^ ]+") do + t[unicodes[s]] = true + end + markclasses[name] = t + end + end +end + +actions["reorganize features"] = function(data,filename,raw) -- combine with other + local features = { } + data.resources.features = features + for k, what in next, otf.glists do + local dw = raw[what] + if dw then + local f = { } + features[what] = f + for i=1,#dw do + local d= dw[i] + local dfeatures = d.features + if dfeatures then + for i=1,#dfeatures do + local df = dfeatures[i] + local tag = strip(lower(df.tag)) + local ft = f[tag] + if not ft then + ft = { } + f[tag] = ft + end + local dscripts = df.scripts + for i=1,#dscripts do + local d = dscripts[i] + local languages = d.langs + local script = strip(lower(d.script)) + local fts = ft[script] if not fts then fts = {} ft[script] = fts end + for i=1,#languages do + fts[strip(lower(languages[i]))] = true + end + end + end + end + end + end + end +end + +actions["reorganize anchor classes"] = function(data,filename,raw) + local resources = data.resources + local anchor_to_lookup = { } + local lookup_to_anchor = { } + resources.anchor_to_lookup = anchor_to_lookup + resources.lookup_to_anchor = lookup_to_anchor + local classes = raw.anchor_classes -- anchor classes not in final table + if classes then + for c=1,#classes do + local class = classes[c] + local anchor = class.name + local lookups = class.lookup + if type(lookups) ~= "table" then + lookups = { lookups } + end + local a = anchor_to_lookup[anchor] + if not a then + a = { } + anchor_to_lookup[anchor] = a + end + for l=1,#lookups do + local lookup = lookups[l] + local l = lookup_to_anchor[lookup] + if l then + l[anchor] = true + else + l = { [anchor] = true } + lookup_to_anchor[lookup] = l + end + a[lookup] = true + end + end + end +end + +actions["prepare tounicode"] = function(data,filename,raw) + fonts.mappings.addtounicode(data,filename) +end + +local g_directions = { + gsub_contextchain = 1, + gpos_contextchain = 1, + -- gsub_context = 1, + -- gpos_context = 1, + gsub_reversecontextchain = -1, + gpos_reversecontextchain = -1, +} +-- The following is no longer needed as AAT is ignored per end October 2013. +-- +-- -- Research by Khaled Hosny has demonstrated that the font loader merges +-- -- regular and AAT features and that these can interfere (especially because +-- -- we dropped checking for valid features elsewhere. So, we just check for +-- -- the special flag and drop the feature if such a tag is found. +-- +-- local function supported(features) +-- for i=1,#features do +-- if features[i].ismac then +-- return false +-- end +-- end +-- return true +-- end + +actions["reorganize subtables"] = function(data,filename,raw) + local resources = data.resources + local sequences = { } + local lookups = { } + local chainedfeatures = { } + resources.sequences = sequences + resources.lookups = lookups + for _, what in next, otf.glists do + local dw = raw[what] + if dw then + for k=1,#dw do + local gk = dw[k] + local features = gk.features + -- if not features or supported(features) then -- not always features ! + local typ = gk.type + local chain = g_directions[typ] or 0 + local subtables = gk.subtables + if subtables then + local t = { } + for s=1,#subtables do + t[s] = subtables[s].name + end + subtables = t + end + local flags, markclass = gk.flags, nil + if flags then + local t = { -- forcing false packs nicer + (flags.ignorecombiningmarks and "mark") or false, + (flags.ignoreligatures and "ligature") or false, + (flags.ignorebaseglyphs and "base") or false, + flags.r2l or false, + } + markclass = flags.mark_class + if markclass then + markclass = resources.markclasses[markclass] + end + flags = t + end + -- + local name = gk.name + -- + if not name then + -- in fact an error + report_otf("skipping weird lookup number %s",k) + elseif features then + -- scripts, tag, ismac + local f = { } + local o = { } + for i=1,#features do + local df = features[i] + local tag = strip(lower(df.tag)) + local ft = f[tag] + if not ft then + ft = { } + f[tag] = ft + o[#o+1] = tag + end + local dscripts = df.scripts + for i=1,#dscripts do + local d = dscripts[i] + local languages = d.langs + local script = strip(lower(d.script)) + local fts = ft[script] if not fts then fts = {} ft[script] = fts end + for i=1,#languages do + fts[strip(lower(languages[i]))] = true + end + end + end + sequences[#sequences+1] = { + type = typ, + chain = chain, + flags = flags, + name = name, + subtables = subtables, + markclass = markclass, + features = f, + order = o, + } + else + lookups[name] = { + type = typ, + chain = chain, + flags = flags, + subtables = subtables, + markclass = markclass, + } + end + -- end + end + end + end +end + +-- test this: +-- +-- for _, what in next, otf.glists do +-- raw[what] = nil +-- end + +actions["prepare lookups"] = function(data,filename,raw) + local lookups = raw.lookups + if lookups then + data.lookups = lookups + end +end + +-- The reverse handler does a bit redundant splitting but it's seldom +-- seen so we don't bother too much. We could store the replacement +-- in the current list (value instead of true) but it makes other code +-- uglier. Maybe some day. + +local function t_uncover(splitter,cache,covers) + local result = { } + for n=1,#covers do + local cover = covers[n] + local uncovered = cache[cover] + if not uncovered then + uncovered = lpegmatch(splitter,cover) + cache[cover] = uncovered + end + result[n] = uncovered + end + return result +end + +local function s_uncover(splitter,cache,cover) + if cover == "" then + return nil + else + local uncovered = cache[cover] + if not uncovered then + uncovered = lpegmatch(splitter,cover) + -- for i=1,#uncovered do + -- uncovered[i] = { [uncovered[i]] = true } + -- end + cache[cover] = uncovered + end + return { uncovered } + end +end + +local function t_hashed(t,cache) + if t then + local ht = { } + for i=1,#t do + local ti = t[i] + local tih = cache[ti] + if not tih then + local tn = #ti + if tn == 1 then + tih = { [ti[1]] = true } + else + tih = { } + for i=1,tn do + tih[ti[i]] = true + end + end + cache[ti] = tih + end + ht[i] = tih + end + return ht + else + return nil + end +end + +-- local s_hashed = t_hashed + +local function s_hashed(t,cache) + if t then + local tf = t[1] + local nf = #tf + if nf == 1 then + return { [tf[1]] = true } + else + local ht = { } + for i=1,nf do + ht[i] = { [tf[i]] = true } + end + return ht + end + else + return nil + end +end + +local function r_uncover(splitter,cache,cover,replacements) + if cover == "" then + return nil + else + -- we always have current as { } even in the case of one + local uncovered = cover[1] + local replaced = cache[replacements] + if not replaced then + replaced = lpegmatch(splitter,replacements) + cache[replacements] = replaced + end + local nu, nr = #uncovered, #replaced + local r = { } + if nu == nr then + for i=1,nu do + r[uncovered[i]] = replaced[i] + end + end + return r + end +end + +actions["reorganize lookups"] = function(data,filename,raw) -- we could check for "" and n == 0 + -- we prefer the before lookups in a normal order + if data.lookups then + local splitter = data.helpers.tounicodetable + local t_u_cache = { } + local s_u_cache = t_u_cache -- string keys + local t_h_cache = { } + local s_h_cache = t_h_cache -- table keys (so we could use one cache) + local r_u_cache = { } -- maybe shared + for _, lookup in next, data.lookups do + local rules = lookup.rules + if rules then + local format = lookup.format + if format == "class" then + local before_class = lookup.before_class + if before_class then + before_class = t_uncover(splitter,t_u_cache,reversed(before_class)) + end + local current_class = lookup.current_class + if current_class then + current_class = t_uncover(splitter,t_u_cache,current_class) + end + local after_class = lookup.after_class + if after_class then + after_class = t_uncover(splitter,t_u_cache,after_class) + end + for i=1,#rules do + local rule = rules[i] + local class = rule.class + local before = class.before + if before then + for i=1,#before do + before[i] = before_class[before[i]] or { } + end + rule.before = t_hashed(before,t_h_cache) + end + local current = class.current + local lookups = rule.lookups + if current then + for i=1,#current do + current[i] = current_class[current[i]] or { } + -- let's not be sparse + if lookups and not lookups[i] then + lookups[i] = "" -- (was: false) e.g. we can have two lookups and one replacement + end + -- end of fix + end + rule.current = t_hashed(current,t_h_cache) + end + local after = class.after + if after then + for i=1,#after do + after[i] = after_class[after[i]] or { } + end + rule.after = t_hashed(after,t_h_cache) + end + rule.class = nil + end + lookup.before_class = nil + lookup.current_class = nil + lookup.after_class = nil + lookup.format = "coverage" + elseif format == "coverage" then + for i=1,#rules do + local rule = rules[i] + local coverage = rule.coverage + if coverage then + local before = coverage.before + if before then + before = t_uncover(splitter,t_u_cache,reversed(before)) + rule.before = t_hashed(before,t_h_cache) + end + local current = coverage.current + if current then + current = t_uncover(splitter,t_u_cache,current) + -- let's not be sparse + local lookups = rule.lookups + if lookups then + for i=1,#current do + if not lookups[i] then + lookups[i] = "" -- fix sparse array + end + end + end + -- + rule.current = t_hashed(current,t_h_cache) + end + local after = coverage.after + if after then + after = t_uncover(splitter,t_u_cache,after) + rule.after = t_hashed(after,t_h_cache) + end + rule.coverage = nil + end + end + elseif format == "reversecoverage" then -- special case, single substitution only + for i=1,#rules do + local rule = rules[i] + local reversecoverage = rule.reversecoverage + if reversecoverage then + local before = reversecoverage.before + if before then + before = t_uncover(splitter,t_u_cache,reversed(before)) + rule.before = t_hashed(before,t_h_cache) + end + local current = reversecoverage.current + if current then + current = t_uncover(splitter,t_u_cache,current) + rule.current = t_hashed(current,t_h_cache) + end + local after = reversecoverage.after + if after then + after = t_uncover(splitter,t_u_cache,after) + rule.after = t_hashed(after,t_h_cache) + end + local replacements = reversecoverage.replacements + if replacements then + rule.replacements = r_uncover(splitter,r_u_cache,current,replacements) + end + rule.reversecoverage = nil + end + end + elseif format == "glyphs" then + -- I could store these more efficient (as not we use a nested tables for before, + -- after and current but this features happens so seldom that I don't bother + -- about it right now. + for i=1,#rules do + local rule = rules[i] + local glyphs = rule.glyphs + if glyphs then + local fore = glyphs.fore + if fore and fore ~= "" then + fore = s_uncover(splitter,s_u_cache,fore) + rule.after = s_hashed(fore,s_h_cache) + end + local back = glyphs.back + if back then + back = s_uncover(splitter,s_u_cache,back) + rule.before = s_hashed(back,s_h_cache) + end + local names = glyphs.names + if names then + names = s_uncover(splitter,s_u_cache,names) + rule.current = s_hashed(names,s_h_cache) + end + rule.glyphs = nil + local lookups = rule.lookups + if lookups then + for i=1,#names do + if not lookups[i] then + lookups[i] = "" -- fix sparse array + end + end + end + end + end + end + end + end + end +end + +local function check_variants(unicode,the_variants,splitter,unicodes) + local variants = the_variants.variants + if variants then -- use splitter + local glyphs = lpegmatch(splitter,variants) + local done = { [unicode] = true } + local n = 0 + for i=1,#glyphs do + local g = glyphs[i] + if done[g] then + if i > 1 then + report_otf("skipping cyclic reference %U in math variant %U",g,unicode) + end + else + if n == 0 then + n = 1 + variants = { g } + else + n = n + 1 + variants[n] = g + end + done[g] = true + end + end + if n == 0 then + variants = nil + end + end + local parts = the_variants.parts + if parts then + local p = #parts + if p > 0 then + for i=1,p do + local pi = parts[i] + pi.glyph = unicodes[pi.component] or 0 + pi.component = nil + end + else + parts = nil + end + end + local italic_correction = the_variants.italic_correction + if italic_correction and italic_correction == 0 then + italic_correction = nil + end + return variants, parts, italic_correction +end + +actions["analyze math"] = function(data,filename,raw) + if raw.math then + data.metadata.math = raw.math + local unicodes = data.resources.unicodes + local splitter = data.helpers.tounicodetable + for unicode, description in next, data.descriptions do + local glyph = description.glyph + local mathkerns = glyph.mathkern -- singular + local horiz_variants = glyph.horiz_variants + local vert_variants = glyph.vert_variants + local top_accent = glyph.top_accent + if mathkerns or horiz_variants or vert_variants or top_accent then + local math = { } + if top_accent then + math.top_accent = top_accent + end + if mathkerns then + for k, v in next, mathkerns do + if not next(v) then + mathkerns[k] = nil + else + for k, v in next, v do + if v == 0 then + k[v] = nil -- height / kern can be zero + end + end + end + end + math.kerns = mathkerns + end + if horiz_variants then + math.horiz_variants, math.horiz_parts, math.horiz_italic_correction = check_variants(unicode,horiz_variants,splitter,unicodes) + end + if vert_variants then + math.vert_variants, math.vert_parts, math.vert_italic_correction = check_variants(unicode,vert_variants,splitter,unicodes) + end + local italic_correction = description.italic + if italic_correction and italic_correction ~= 0 then + math.italic_correction = italic_correction + end + description.math = math + end + end + end +end + +actions["reorganize glyph kerns"] = function(data,filename,raw) + local descriptions = data.descriptions + local resources = data.resources + local unicodes = resources.unicodes + for unicode, description in next, descriptions do + local kerns = description.glyph.kerns + if kerns then + local newkerns = { } + for k, kern in next, kerns do + local name = kern.char + local offset = kern.off + local lookup = kern.lookup + if name and offset and lookup then + local unicode = unicodes[name] + if unicode then + if type(lookup) == "table" then + for l=1,#lookup do + local lookup = lookup[l] + local lookupkerns = newkerns[lookup] + if lookupkerns then + lookupkerns[unicode] = offset + else + newkerns[lookup] = { [unicode] = offset } + end + end + else + local lookupkerns = newkerns[lookup] + if lookupkerns then + lookupkerns[unicode] = offset + else + newkerns[lookup] = { [unicode] = offset } + end + end + elseif trace_loading then + report_otf("problems with unicode %a of kern %a of glyph %U",name,k,unicode) + end + end + end + description.kerns = newkerns + end + end +end + +actions["merge kern classes"] = function(data,filename,raw) + local gposlist = raw.gpos + if gposlist then + local descriptions = data.descriptions + local resources = data.resources + local unicodes = resources.unicodes + local splitter = data.helpers.tounicodetable + local ignored = 0 + local blocked = 0 + for gp=1,#gposlist do + local gpos = gposlist[gp] + local subtables = gpos.subtables + if subtables then + local first_done = { } -- could become an option so that we can deal with buggy fonts that don't get fixed + local split = { } -- saves time .. although probably not that much any more in the fixed luatex kernclass table + for s=1,#subtables do + local subtable = subtables[s] + local kernclass = subtable.kernclass -- name is inconsistent with anchor_classes + local lookup = subtable.lookup or subtable.name + if kernclass then -- the next one is quite slow + if #kernclass > 0 then + kernclass = kernclass[1] + lookup = type(kernclass.lookup) == "string" and kernclass.lookup or lookup + report_otf("fixing kernclass table of lookup %a",lookup) + end + local firsts = kernclass.firsts + local seconds = kernclass.seconds + local offsets = kernclass.offsets + -- if offsets[1] == nil then + -- offsets[1] = "" -- defaults ? + -- end + for n, s in next, firsts do + split[s] = split[s] or lpegmatch(splitter,s) + end + local maxseconds = 0 + for n, s in next, seconds do + if n > maxseconds then + maxseconds = n + end + split[s] = split[s] or lpegmatch(splitter,s) + end + for fk=1,#firsts do -- maxfirsts ? + local fv = firsts[fk] + local splt = split[fv] + if splt then + local extrakerns = { } + local baseoffset = (fk-1) * maxseconds + for sk=2,maxseconds do -- will become 1 based in future luatex + local sv = seconds[sk] + -- for sk, sv in next, seconds do + local splt = split[sv] + if splt then -- redundant test + local offset = offsets[baseoffset + sk] + if offset then + for i=1,#splt do + extrakerns[splt[i]] = offset + end + end + end + end + for i=1,#splt do + local first_unicode = splt[i] + if first_done[first_unicode] then + report_otf("lookup %a: ignoring further kerns of %C",lookup,first_unicode) + blocked = blocked + 1 + else + first_done[first_unicode] = true + local description = descriptions[first_unicode] + if description then + local kerns = description.kerns + if not kerns then + kerns = { } -- unicode indexed ! + description.kerns = kerns + end + local lookupkerns = kerns[lookup] + if not lookupkerns then + lookupkerns = { } + kerns[lookup] = lookupkerns + end + if overloadkerns then + for second_unicode, kern in next, extrakerns do + lookupkerns[second_unicode] = kern + end + else + for second_unicode, kern in next, extrakerns do + local k = lookupkerns[second_unicode] + if not k then + lookupkerns[second_unicode] = kern + elseif k ~= kern then + if trace_loading then + report_otf("lookup %a: ignoring overload of kern between %C and %C, rejecting %a, keeping %a",lookup,first_unicode,second_unicode,k,kern) + end + ignored = ignored + 1 + end + end + end + elseif trace_loading then + report_otf("no glyph data for %U", first_unicode) + end + end + end + end + end + subtable.kernclass = { } + end + end + end + end + if ignored > 0 then + report_otf("%s kern overloads ignored",ignored) + end + if blocked > 0 then + report_otf("%s succesive kerns blocked",blocked) + end + end +end + +actions["check glyphs"] = function(data,filename,raw) + for unicode, description in next, data.descriptions do + description.glyph = nil + end +end + +-- future versions will remove _ + +local valid = (R("\x00\x7E") - S("(){}[]<>%/ \n\r\f\v"))^0 * P(-1) + +local function valid_ps_name(str) + return str and str ~= "" and #str < 64 and lpegmatch(valid,str) and true or false +end + +actions["check metadata"] = function(data,filename,raw) + local metadata = data.metadata + for _, k in next, mainfields do + if valid_fields[k] then + local v = raw[k] + if not metadata[k] then + metadata[k] = v + end + end + end + -- metadata.pfminfo = raw.pfminfo -- not already done? + local ttftables = metadata.ttf_tables + if ttftables then + for i=1,#ttftables do + ttftables[i].data = "deleted" + end + end + -- + if metadata.validation_state and table.contains(metadata.validation_state,"bad_ps_fontname") then + -- the ff library does a bit too much (and wrong) checking ... so we need to catch this + -- at least for now + local function valid(what) + local names = raw.names + for i=1,#names do + local list = names[i] + local names = list.names + if names then + local name = names[what] + if name and valid_ps_name(name) then + return name + end + end + end + end + local function check(what) + local oldname = metadata[what] + if valid_ps_name(oldname) then + report_otf("ignoring warning %a because %s %a is proper ASCII","bad_ps_fontname",what,oldname) + else + local newname = valid(what) + if not newname then + newname = formatters["bad-%s-%s"](what,file.nameonly(filename)) + end + local warning = formatters["overloading %s from invalid ASCII name %a to %a"](what,oldname,newname) + data.warnings[#data.warnings+1] = warning + report_otf(warning) + metadata[what] = newname + end + end + check("fontname") + check("fullname") + end + -- +end + +actions["cleanup tables"] = function(data,filename,raw) + local duplicates = data.resources.duplicates + if duplicates then + for k, v in next, duplicates do + if #v == 1 then + duplicates[k] = v[1] + end + end + end + data.resources.indices = nil -- not needed + data.resources.unicodes = nil -- delayed + data.helpers = nil -- tricky as we have no unicodes any more +end + +-- kern: ttf has a table with kerns +-- +-- Weird, as maxfirst and maxseconds can have holes, first seems to be indexed, but +-- seconds can start at 2 .. this need to be fixed as getn as well as # are sort of +-- unpredictable alternatively we could force an [1] if not set (maybe I will do that +-- anyway). + +-- we can share { } as it is never set + +--- ligatures have an extra specification.char entry that we don't use + +actions["reorganize glyph lookups"] = function(data,filename,raw) + local resources = data.resources + local unicodes = resources.unicodes + local descriptions = data.descriptions + local splitter = data.helpers.tounicodelist + + local lookuptypes = resources.lookuptypes + + for unicode, description in next, descriptions do + local lookups = description.glyph.lookups + if lookups then + for tag, lookuplist in next, lookups do + for l=1,#lookuplist do + local lookup = lookuplist[l] + local specification = lookup.specification + local lookuptype = lookup.type + local lt = lookuptypes[tag] + if not lt then + lookuptypes[tag] = lookuptype + elseif lt ~= lookuptype then + report_otf("conflicting lookuptypes, %a points to %a and %a",tag,lt,lookuptype) + end + if lookuptype == "ligature" then + lookuplist[l] = { lpegmatch(splitter,specification.components) } + elseif lookuptype == "alternate" then + lookuplist[l] = { lpegmatch(splitter,specification.components) } + elseif lookuptype == "substitution" then + lookuplist[l] = unicodes[specification.variant] + elseif lookuptype == "multiple" then + lookuplist[l] = { lpegmatch(splitter,specification.components) } + elseif lookuptype == "position" then + lookuplist[l] = { + specification.x or 0, + specification.y or 0, + specification.h or 0, + specification.v or 0 + } + elseif lookuptype == "pair" then + local one = specification.offsets[1] + local two = specification.offsets[2] + local paired = unicodes[specification.paired] + if one then + if two then + lookuplist[l] = { paired, { one.x or 0, one.y or 0, one.h or 0, one.v or 0 }, { two.x or 0, two.y or 0, two.h or 0, two.v or 0 } } + else + lookuplist[l] = { paired, { one.x or 0, one.y or 0, one.h or 0, one.v or 0 } } + end + else + if two then + lookuplist[l] = { paired, { }, { two.x or 0, two.y or 0, two.h or 0, two.v or 0} } -- maybe nil instead of { } + else + lookuplist[l] = { paired } + end + end + end + end + end + local slookups, mlookups + for tag, lookuplist in next, lookups do + if #lookuplist == 1 then + if slookups then + slookups[tag] = lookuplist[1] + else + slookups = { [tag] = lookuplist[1] } + end + else + if mlookups then + mlookups[tag] = lookuplist + else + mlookups = { [tag] = lookuplist } + end + end + end + if slookups then + description.slookups = slookups + end + if mlookups then + description.mlookups = mlookups + end + end + end + +end + +actions["reorganize glyph anchors"] = function(data,filename,raw) -- when we replace inplace we safe entries + local descriptions = data.descriptions + for unicode, description in next, descriptions do + local anchors = description.glyph.anchors + if anchors then + for class, data in next, anchors do + if class == "baselig" then + for tag, specification in next, data do + for i=1,#specification do + local si = specification[i] + specification[i] = { si.x or 0, si.y or 0 } + end + end + else + for tag, specification in next, data do + data[tag] = { specification.x or 0, specification.y or 0 } + end + end + end + description.anchors = anchors + end + end +end + +local bogusname = (P("uni") + P("u")) * R("AF","09")^4 + + (P("index") + P("glyph") + S("Ii") * P("dentity") * P(".")^0) * R("09")^1 +local uselessname = (1-bogusname)^0 * bogusname + +actions["purge names"] = function(data,filename,raw) -- not used yet + if purge_names then + local n = 0 + for u, d in next, data.descriptions do + if lpegmatch(uselessname,d.name) then + n = n + 1 + d.name = nil + end + -- d.comment = nil + end + if n > 0 then + report_otf("%s bogus names removed",n) + end + end +end + +actions["compact lookups"] = function(data,filename,raw) + if not compact_lookups then + report_otf("not compacting") + return + end + -- create keyhash + local last = 0 + local tags = table.setmetatableindex({ }, + function(t,k) + last = last + 1 + t[k] = last + return last + end + ) + -- + local descriptions = data.descriptions + local resources = data.resources + -- + for u, d in next, descriptions do + -- + -- -- we can also compact anchors and cursives (basechar basemark baselig mark) + -- + local slookups = d.slookups + if type(slookups) == "table" then + local s = { } + for k, v in next, slookups do + s[tags[k]] = v + end + d.slookups = s + end + -- + local mlookups = d.mlookups + if type(mlookups) == "table" then + local m = { } + for k, v in next, mlookups do + m[tags[k]] = v + end + d.mlookups = m + end + -- + local kerns = d.kerns + if type(kerns) == "table" then + local t = { } + for k, v in next, kerns do + t[tags[k]] = v + end + d.kerns = t + end + end + -- + local lookups = data.lookups + if lookups then + local l = { } + for k, v in next, lookups do + local rules = v.rules + if rules then + for i=1,#rules do + local l = rules[i].lookups + if type(l) == "table" then + for i=1,#l do + l[i] = tags[l[i]] + end + end + end + end + l[tags[k]] = v + end + data.lookups = l + end + -- + local lookups = resources.lookups + if lookups then + local l = { } + for k, v in next, lookups do + local s = v.subtables + if type(s) == "table" then + for i=1,#s do + s[i] = tags[s[i]] + end + end + l[tags[k]] = v + end + resources.lookups = l + end + -- + local sequences = resources.sequences + if sequences then + for i=1,#sequences do + local s = sequences[i] + local n = s.name + if n then + s.name = tags[n] + end + local t = s.subtables + if type(t) == "table" then + for i=1,#t do + t[i] = tags[t[i]] + end + end + end + end + -- + local lookuptypes = resources.lookuptypes + if lookuptypes then + local l = { } + for k, v in next, lookuptypes do + l[tags[k]] = v + end + resources.lookuptypes = l + end + -- + local anchor_to_lookup = resources.anchor_to_lookup + if anchor_to_lookup then + for anchor, lookups in next, anchor_to_lookup do + local l = { } + for lookup, value in next, lookups do + l[tags[lookup]] = value + end + anchor_to_lookup[anchor] = l + end + end + -- + local lookup_to_anchor = resources.lookup_to_anchor + if lookup_to_anchor then + local l = { } + for lookup, value in next, lookup_to_anchor do + l[tags[lookup]] = value + end + resources.lookup_to_anchor = l + end + -- + tags = table.swapped(tags) + -- + report_otf("%s lookup tags compacted",#tags) + -- + resources.lookuptags = tags +end + +-- modes: node, base, none + +function otf.setfeatures(tfmdata,features) + local okay = constructors.initializefeatures("otf",tfmdata,features,trace_features,report_otf) + if okay then + return constructors.collectprocessors("otf",tfmdata,features,trace_features,report_otf) + else + return { } -- will become false + end +end + +-- the first version made a top/mid/not extensible table, now we just +-- pass on the variants data and deal with it in the tfm scaler (there +-- is no longer an extensible table anyway) +-- +-- we cannot share descriptions as virtual fonts might extend them (ok, +-- we could use a cache with a hash +-- +-- we already assing an empty tabel to characters as we can add for +-- instance protruding info and loop over characters; one is not supposed +-- to change descriptions and if one does so one should make a copy! + +local function copytotfm(data,cache_id) + if data then + local metadata = data.metadata + local warnings = data.warnings + local resources = data.resources + local properties = derivetable(data.properties) + local descriptions = derivetable(data.descriptions) + local goodies = derivetable(data.goodies) + local characters = { } + local parameters = { } + local mathparameters = { } + -- + local pfminfo = metadata.pfminfo or { } + local resources = data.resources + local unicodes = resources.unicodes + -- local mode = data.mode or "base" + local spaceunits = 500 + local spacer = "space" + local designsize = metadata.designsize or metadata.design_size or 100 + local mathspecs = metadata.math + -- + if designsize == 0 then + designsize = 100 + end + if mathspecs then + for name, value in next, mathspecs do + mathparameters[name] = value + end + end + for unicode, _ in next, data.descriptions do -- use parent table + characters[unicode] = { } + end + if mathspecs then + -- we could move this to the scaler but not that much is saved + -- and this is cleaner + for unicode, character in next, characters do + local d = descriptions[unicode] + local m = d.math + if m then + -- watch out: luatex uses horiz_variants for the parts + local variants = m.horiz_variants + local parts = m.horiz_parts + -- local done = { [unicode] = true } + if variants then + local c = character + for i=1,#variants do + local un = variants[i] + -- if done[un] then + -- -- report_otf("skipping cyclic reference %U in math variant %U",un,unicode) + -- else + c.next = un + c = characters[un] + -- done[un] = true + -- end + end -- c is now last in chain + c.horiz_variants = parts + elseif parts then + character.horiz_variants = parts + end + local variants = m.vert_variants + local parts = m.vert_parts + -- local done = { [unicode] = true } + if variants then + local c = character + for i=1,#variants do + local un = variants[i] + -- if done[un] then + -- -- report_otf("skipping cyclic reference %U in math variant %U",un,unicode) + -- else + c.next = un + c = characters[un] + -- done[un] = true + -- end + end -- c is now last in chain + c.vert_variants = parts + elseif parts then + character.vert_variants = parts + end + local italic_correction = m.vert_italic_correction + if italic_correction then + character.vert_italic_correction = italic_correction -- was c. + end + local top_accent = m.top_accent + if top_accent then + character.top_accent = top_accent + end + local kerns = m.kerns + if kerns then + character.mathkerns = kerns + end + end + end + end + -- end math + -- we need a runtime lookup because of running from cdrom or zip, brrr (shouldn't we use the basename then?) + local filename = constructors.checkedfilename(resources) + local fontname = metadata.fontname + local fullname = metadata.fullname or fontname + local psname = fontname or fullname + local units = metadata.units_per_em or 1000 + -- + if units == 0 then -- catch bugs in fonts + units = 1000 -- maybe 2000 when ttf + metadata.units_per_em = 1000 + report_otf("changing %a units to %a",0,units) + end + -- + local monospaced = metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose.proportion == "Monospaced") + local charwidth = pfminfo.avgwidth -- or unset + local charxheight = pfminfo.os2_xheight and pfminfo.os2_xheight > 0 and pfminfo.os2_xheight +-- charwidth = charwidth * units/1000 +-- charxheight = charxheight * units/1000 + local italicangle = metadata.italicangle + properties.monospaced = monospaced + parameters.italicangle = italicangle + parameters.charwidth = charwidth + parameters.charxheight = charxheight + -- + local space = 0x0020 + local emdash = 0x2014 + if monospaced then + if descriptions[space] then + spaceunits, spacer = descriptions[space].width, "space" + end + if not spaceunits and descriptions[emdash] then + spaceunits, spacer = descriptions[emdash].width, "emdash" + end + if not spaceunits and charwidth then + spaceunits, spacer = charwidth, "charwidth" + end + else + if descriptions[space] then + spaceunits, spacer = descriptions[space].width, "space" + end + if not spaceunits and descriptions[emdash] then + spaceunits, spacer = descriptions[emdash].width/2, "emdash/2" + end + if not spaceunits and charwidth then + spaceunits, spacer = charwidth, "charwidth" + end + end + spaceunits = tonumber(spaceunits) or 500 -- brrr + -- + parameters.slant = 0 + parameters.space = spaceunits -- 3.333 (cmr10) + parameters.space_stretch = units/2 -- 500 -- 1.666 (cmr10) + parameters.space_shrink = 1*units/3 -- 333 -- 1.111 (cmr10) + parameters.x_height = 2*units/5 -- 400 + parameters.quad = units -- 1000 + if spaceunits < 2*units/5 then + -- todo: warning + end + if italicangle and italicangle ~= 0 then + parameters.italicangle = italicangle + parameters.italicfactor = math.cos(math.rad(90+italicangle)) + parameters.slant = - math.tan(italicangle*math.pi/180) + end + if monospaced then + parameters.space_stretch = 0 + parameters.space_shrink = 0 + elseif syncspace then -- + parameters.space_stretch = spaceunits/2 + parameters.space_shrink = spaceunits/3 + end + parameters.extra_space = parameters.space_shrink -- 1.111 (cmr10) + if charxheight then + parameters.x_height = charxheight + else + local x = 0x0078 + if x then + local x = descriptions[x] + if x then + parameters.x_height = x.height + end + end + end + -- + parameters.designsize = (designsize/10)*65536 + parameters.ascender = abs(metadata.ascent or 0) + parameters.descender = abs(metadata.descent or 0) + parameters.units = units + -- + properties.space = spacer + properties.encodingbytes = 2 + properties.format = data.format or otf_format(filename) or formats.otf +-- if units ~= 1000 and format ~= "truetype" then +-- properties.format = "truetype" +-- end + properties.noglyphnames = true + properties.filename = filename + properties.fontname = fontname + properties.fullname = fullname + properties.psname = psname + properties.name = filename or fullname + -- + -- properties.name = specification.name + -- properties.sub = specification.sub + -- + if warnings and #warnings > 0 then + report_otf("warnings for font: %s",filename) + report_otf() + for i=1,#warnings do + report_otf(" %s",warnings[i]) + end + report_otf() + end + return { + characters = characters, + descriptions = descriptions, + parameters = parameters, + mathparameters = mathparameters, + resources = resources, + properties = properties, + goodies = goodies, + warnings = warnings, + } + end +end + +local function otftotfm(specification) + local cache_id = specification.hash + local tfmdata = containers.read(constructors.cache,cache_id) + if not tfmdata then + local name = specification.name + local sub = specification.sub + local filename = specification.filename + -- local format = specification.format + local features = specification.features.normal + local rawdata = otf.load(filename,sub,features and features.featurefile) + if rawdata and next(rawdata) then + local descriptions = rawdata.descriptions + local duplicates = rawdata.resources.duplicates + if duplicates then + local nofduplicates, nofduplicated = 0, 0 + for parent, list in next, duplicates do + if type(list) == "table" then + local n = #list + for i=1,n do + local unicode = list[i] + if not descriptions[unicode] then + descriptions[unicode] = descriptions[parent] -- or copy + nofduplicated = nofduplicated + 1 + end + end + nofduplicates = nofduplicates + n + else + if not descriptions[list] then + descriptions[list] = descriptions[parent] -- or copy + nofduplicated = nofduplicated + 1 + end + nofduplicates = nofduplicates + 1 + end + end + if trace_otf and nofduplicated ~= nofduplicates then + report_otf("%i extra duplicates copied out of %i",nofduplicated,nofduplicates) + end + end + rawdata.lookuphash = { } + tfmdata = copytotfm(rawdata,cache_id) + if tfmdata and next(tfmdata) then + -- at this moment no characters are assigned yet, only empty slots + local features = constructors.checkedfeatures("otf",features) + local shared = tfmdata.shared + if not shared then + shared = { } + tfmdata.shared = shared + end + shared.rawdata = rawdata + -- shared.features = features -- default + shared.dynamics = { } + -- shared.processes = { } + tfmdata.changed = { } + shared.features = features + shared.processes = otf.setfeatures(tfmdata,features) + end + end + containers.write(constructors.cache,cache_id,tfmdata) + end + return tfmdata +end + +local function read_from_otf(specification) + local tfmdata = otftotfm(specification) + if tfmdata then + -- this late ? .. needs checking + tfmdata.properties.name = specification.name + tfmdata.properties.sub = specification.sub + -- + tfmdata = constructors.scale(tfmdata,specification) + local allfeatures = tfmdata.shared.features or specification.features.normal + constructors.applymanipulators("otf",tfmdata,allfeatures,trace_features,report_otf) + constructors.setname(tfmdata,specification) -- only otf? + fonts.loggers.register(tfmdata,file.suffix(specification.filename),specification) + end + return tfmdata +end + +local function checkmathsize(tfmdata,mathsize) + local mathdata = tfmdata.shared.rawdata.metadata.math + local mathsize = tonumber(mathsize) + if mathdata then -- we cannot use mathparameters as luatex will complain + local parameters = tfmdata.parameters + parameters.scriptpercentage = mathdata.ScriptPercentScaleDown + parameters.scriptscriptpercentage = mathdata.ScriptScriptPercentScaleDown + parameters.mathsize = mathsize + end +end + +registerotffeature { + name = "mathsize", + description = "apply mathsize specified in the font", + initializers = { + base = checkmathsize, + node = checkmathsize, + } +} + +-- helpers + +function otf.collectlookups(rawdata,kind,script,language) + local sequences = rawdata.resources.sequences + if sequences then + local featuremap, featurelist = { }, { } + for s=1,#sequences do + local sequence = sequences[s] + local features = sequence.features + features = features and features[kind] + features = features and (features[script] or features[default] or features[wildcard]) + features = features and (features[language] or features[default] or features[wildcard]) + if features then + local subtables = sequence.subtables + if subtables then + for s=1,#subtables do + local ss = subtables[s] + if not featuremap[s] then + featuremap[ss] = true + featurelist[#featurelist+1] = ss + end + end + end + end + end + if #featurelist > 0 then + return featuremap, featurelist + end + end + return nil, nil +end + +-- readers (a bit messy, this forced so I might redo that bit: foo.ttf FOO.ttf foo.TTF FOO.TTF) + +local function check_otf(forced,specification,suffix) + local name = specification.name + if forced then + name = specification.forcedname -- messy + end + local fullname = findbinfile(name,suffix) or "" + if fullname == "" then + fullname = fonts.names.getfilename(name,suffix) or "" + end + if fullname ~= "" and not fonts.names.ignoredfile(fullname) then + specification.filename = fullname + return read_from_otf(specification) + end +end + +local function opentypereader(specification,suffix) + local forced = specification.forced or "" + if formats[forced] then + return check_otf(true,specification,forced) + else + return check_otf(false,specification,suffix) + end +end + +readers.opentype = opentypereader -- kind of useless and obsolete + +function readers.otf (specification) return opentypereader(specification,"otf") end +function readers.ttf (specification) return opentypereader(specification,"ttf") end +function readers.ttc (specification) return opentypereader(specification,"ttf") end +function readers.dfont(specification) return opentypereader(specification,"ttf") end + +-- this will be overloaded + +function otf.scriptandlanguage(tfmdata,attr) + local properties = tfmdata.properties + return properties.script or "dflt", properties.language or "dflt" +end diff --git a/src/fontloader/misc/fontloader-font-oti.lua b/src/fontloader/misc/fontloader-font-oti.lua new file mode 100644 index 0000000..06c2a42 --- /dev/null +++ b/src/fontloader/misc/fontloader-font-oti.lua @@ -0,0 +1,91 @@ +if not modules then modules = { } end modules ['font-oti'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local lower = string.lower + +local fonts = fonts +local constructors = fonts.constructors + +local otf = constructors.newhandler("otf") +local otffeatures = constructors.newfeatures("otf") +local otftables = otf.tables +local registerotffeature = otffeatures.register + +local allocate = utilities.storage.allocate + +registerotffeature { + name = "features", + description = "initialization of feature handler", + default = true, +} + +-- these are later hooked into node and base initializaters + +local function setmode(tfmdata,value) + if value then + tfmdata.properties.mode = lower(value) + end +end + +local function setlanguage(tfmdata,value) + if value then + local cleanvalue = lower(value) + local languages = otftables and otftables.languages + local properties = tfmdata.properties + if not languages then + properties.language = cleanvalue + elseif languages[value] then + properties.language = cleanvalue + else + properties.language = "dflt" + end + end +end + +local function setscript(tfmdata,value) + if value then + local cleanvalue = lower(value) + local scripts = otftables and otftables.scripts + local properties = tfmdata.properties + if not scripts then + properties.script = cleanvalue + elseif scripts[value] then + properties.script = cleanvalue + else + properties.script = "dflt" + end + end +end + +registerotffeature { + name = "mode", + description = "mode", + initializers = { + base = setmode, + node = setmode, + } +} + +registerotffeature { + name = "language", + description = "language", + initializers = { + base = setlanguage, + node = setlanguage, + } +} + +registerotffeature { + name = "script", + description = "script", + initializers = { + base = setscript, + node = setscript, + } +} + diff --git a/src/fontloader/misc/fontloader-font-otp.lua b/src/fontloader/misc/fontloader-font-otp.lua new file mode 100644 index 0000000..63e4184 --- /dev/null +++ b/src/fontloader/misc/fontloader-font-otp.lua @@ -0,0 +1,906 @@ +if not modules then modules = { } end modules ['font-otp'] = { + version = 1.001, + comment = "companion to font-otf.lua (packing)", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- todo: pack math (but not that much to share) +-- +-- pitfall 5.2: hashed tables can suddenly become indexed with nil slots + +local next, type = next, type +local sort, concat = table.sort, table.concat +local sortedhash = table.sortedhash + +local trace_packing = false trackers.register("otf.packing", function(v) trace_packing = v end) +local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end) + +local report_otf = logs.reporter("fonts","otf loading") + +-- also used in other scripts so we need to check some tables: + +fonts = fonts or { } + +local handlers = fonts.handlers or { } +fonts.handlers = handlers + +local otf = handlers.otf or { } +handlers.otf = otf + +local enhancers = otf.enhancers or { } +otf.enhancers = enhancers + +local glists = otf.glists or { "gsub", "gpos" } +otf.glists = glists + +local criterium = 1 +local threshold = 0 + +local function tabstr_normal(t) + local s = { } + local n = 0 + for k, v in next, t do + n = n + 1 + if type(v) == "table" then + s[n] = k .. ">" .. tabstr_normal(v) + elseif v == true then + s[n] = k .. "+" -- "=true" + elseif v then + s[n] = k .. "=" .. v + else + s[n] = k .. "-" -- "=false" + end + end + if n == 0 then + return "" + elseif n == 1 then + return s[1] + else + sort(s) -- costly but needed (occasional wrong hit otherwise) + return concat(s,",") + end +end + +local function tabstr_flat(t) + local s = { } + local n = 0 + for k, v in next, t do + n = n + 1 + s[n] = k .. "=" .. v + end + if n == 0 then + return "" + elseif n == 1 then + return s[1] + else + sort(s) -- costly but needed (occasional wrong hit otherwise) + return concat(s,",") + end +end + +local function tabstr_mixed(t) -- indexed + local s = { } + local n = #t + if n == 0 then + return "" + elseif n == 1 then + local k = t[1] + if k == true then + return "++" -- we need to distinguish from "true" + elseif k == false then + return "--" -- we need to distinguish from "false" + else + return tostring(k) -- number or string + end + else + for i=1,n do + local k = t[i] + if k == true then + s[i] = "++" -- we need to distinguish from "true" + elseif k == false then + s[i] = "--" -- we need to distinguish from "false" + else + s[i] = k -- number or string + end + end + return concat(s,",") + end +end + +local function tabstr_boolean(t) + local s = { } + local n = 0 + for k, v in next, t do + n = n + 1 + if v then + s[n] = k .. "+" + else + s[n] = k .. "-" + end + end + if n == 0 then + return "" + elseif n == 1 then + return s[1] + else + sort(s) -- costly but needed (occasional wrong hit otherwise) + return concat(s,",") + end +end + +-- tabstr_boolean_x = tabstr_boolean + +-- tabstr_boolean = function(t) +-- local a = tabstr_normal(t) +-- local b = tabstr_boolean_x(t) +-- print(a) +-- print(b) +-- return b +-- end + +-- beware: we cannot unpack and repack the same table because then sharing +-- interferes (we could catch this if needed) .. so for now: save, reload +-- and repack in such cases (never needed anyway) .. a tricky aspect is that +-- we then need to sort more thanks to random hashing + +local function packdata(data) + if data then + -- stripdata(data) + local h, t, c = { }, { }, { } + local hh, tt, cc = { }, { }, { } + local nt, ntt = 0, 0 + local function pack_normal(v) + local tag = tabstr_normal(v) + local ht = h[tag] + if ht then + c[ht] = c[ht] + 1 + return ht + else + nt = nt + 1 + t[nt] = v + h[tag] = nt + c[nt] = 1 + return nt + end + end + local function pack_flat(v) + local tag = tabstr_flat(v) + local ht = h[tag] + if ht then + c[ht] = c[ht] + 1 + return ht + else + nt = nt + 1 + t[nt] = v + h[tag] = nt + c[nt] = 1 + return nt + end + end + local function pack_boolean(v) + local tag = tabstr_boolean(v) + local ht = h[tag] + if ht then + c[ht] = c[ht] + 1 + return ht + else + nt = nt + 1 + t[nt] = v + h[tag] = nt + c[nt] = 1 + return nt + end + end + local function pack_indexed(v) + local tag = concat(v," ") + local ht = h[tag] + if ht then + c[ht] = c[ht] + 1 + return ht + else + nt = nt + 1 + t[nt] = v + h[tag] = nt + c[nt] = 1 + return nt + end + end + local function pack_mixed(v) + local tag = tabstr_mixed(v) + local ht = h[tag] + if ht then + c[ht] = c[ht] + 1 + return ht + else + nt = nt + 1 + t[nt] = v + h[tag] = nt + c[nt] = 1 + return nt + end + end + local function pack_final(v) + -- v == number + if c[v] <= criterium then + return t[v] + else + -- compact hash + local hv = hh[v] + if hv then + return hv + else + ntt = ntt + 1 + tt[ntt] = t[v] + hh[v] = ntt + cc[ntt] = c[v] + return ntt + end + end + end + local function success(stage,pass) + if nt == 0 then + if trace_loading or trace_packing then + report_otf("pack quality: nothing to pack") + end + return false + elseif nt >= threshold then + local one, two, rest = 0, 0, 0 + if pass == 1 then + for k,v in next, c do + if v == 1 then + one = one + 1 + elseif v == 2 then + two = two + 1 + else + rest = rest + 1 + end + end + else + for k,v in next, cc do + if v > 20 then + rest = rest + 1 + elseif v > 10 then + two = two + 1 + else + one = one + 1 + end + end + data.tables = tt + end + if trace_loading or trace_packing then + report_otf("pack quality: stage %s, pass %s, %s packed, 1-10:%s, 11-20:%s, rest:%s (criterium: %s)", stage, pass, one+two+rest, one, two, rest, criterium) + end + return true + else + if trace_loading or trace_packing then + report_otf("pack quality: stage %s, pass %s, %s packed, aborting pack (threshold: %s)", stage, pass, nt, threshold) + end + return false + end + end + local function packers(pass) + if pass == 1 then + return pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed + else + return pack_final, pack_final, pack_final, pack_final, pack_final + end + end + local resources = data.resources + local lookuptypes = resources.lookuptypes + for pass=1,2 do + if trace_packing then + report_otf("start packing: stage 1, pass %s",pass) + end + local pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed = packers(pass) + for unicode, description in next, data.descriptions do + local boundingbox = description.boundingbox + if boundingbox then + description.boundingbox = pack_indexed(boundingbox) + end + local slookups = description.slookups + if slookups then + for tag, slookup in next, slookups do + local what = lookuptypes[tag] + if what == "pair" then + local t = slookup[2] if t then slookup[2] = pack_indexed(t) end + local t = slookup[3] if t then slookup[3] = pack_indexed(t) end + elseif what ~= "substitution" then + slookups[tag] = pack_indexed(slookup) -- true is new + end + end + end + local mlookups = description.mlookups + if mlookups then + for tag, mlookup in next, mlookups do + local what = lookuptypes[tag] + if what == "pair" then + for i=1,#mlookup do + local lookup = mlookup[i] + local t = lookup[2] if t then lookup[2] = pack_indexed(t) end + local t = lookup[3] if t then lookup[3] = pack_indexed(t) end + end + elseif what ~= "substitution" then + for i=1,#mlookup do + mlookup[i] = pack_indexed(mlookup[i]) -- true is new + end + end + end + end + local kerns = description.kerns + if kerns then + for tag, kern in next, kerns do + kerns[tag] = pack_flat(kern) + end + end + local math = description.math + if math then + local kerns = math.kerns + if kerns then + for tag, kern in next, kerns do + kerns[tag] = pack_normal(kern) + end + end + end + local anchors = description.anchors + if anchors then + for what, anchor in next, anchors do + if what == "baselig" then + for _, a in next, anchor do + for k=1,#a do + a[k] = pack_indexed(a[k]) + end + end + else + for k, v in next, anchor do + anchor[k] = pack_indexed(v) + end + end + end + end + local altuni = description.altuni + if altuni then + for i=1,#altuni do + altuni[i] = pack_flat(altuni[i]) + end + end + end + local lookups = data.lookups + if lookups then + for _, lookup in next, lookups do + local rules = lookup.rules + if rules then + for i=1,#rules do + local rule = rules[i] + local r = rule.before if r then for i=1,#r do r[i] = pack_boolean(r[i]) end end + local r = rule.after if r then for i=1,#r do r[i] = pack_boolean(r[i]) end end + local r = rule.current if r then for i=1,#r do r[i] = pack_boolean(r[i]) end end + local r = rule.replacements if r then rule.replacements = pack_flat (r) end -- can have holes + local r = rule.lookups if r then rule.lookups = pack_indexed(r) end -- can have "" + -- local r = rule.lookups if r then rule.lookups = pack_flat(r) end -- can have holes (already taken care of some cases) + end + end + end + end + local anchor_to_lookup = resources.anchor_to_lookup + if anchor_to_lookup then + for anchor, lookup in next, anchor_to_lookup do + anchor_to_lookup[anchor] = pack_normal(lookup) + end + end + local lookup_to_anchor = resources.lookup_to_anchor + if lookup_to_anchor then + for lookup, anchor in next, lookup_to_anchor do + lookup_to_anchor[lookup] = pack_normal(anchor) + end + end + local sequences = resources.sequences + if sequences then + for feature, sequence in next, sequences do + local flags = sequence.flags + if flags then + sequence.flags = pack_normal(flags) + end + local subtables = sequence.subtables + if subtables then + sequence.subtables = pack_normal(subtables) + end + local features = sequence.features + if features then + for script, feature in next, features do + features[script] = pack_normal(feature) + end + end + local order = sequence.order + if order then + sequence.order = pack_indexed(order) + end + local markclass = sequence.markclass + if markclass then + sequence.markclass = pack_boolean(markclass) + end + end + end + local lookups = resources.lookups + if lookups then + for name, lookup in next, lookups do + local flags = lookup.flags + if flags then + lookup.flags = pack_normal(flags) + end + local subtables = lookup.subtables + if subtables then + lookup.subtables = pack_normal(subtables) + end + end + end + local features = resources.features + if features then + for _, what in next, glists do + local list = features[what] + if list then + for feature, spec in next, list do + list[feature] = pack_normal(spec) + end + end + end + end + if not success(1,pass) then + return + end + end + if nt > 0 then + for pass=1,2 do + if trace_packing then + report_otf("start packing: stage 2, pass %s",pass) + end + local pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed = packers(pass) + for unicode, description in next, data.descriptions do + local kerns = description.kerns + if kerns then + description.kerns = pack_normal(kerns) + end + local math = description.math + if math then + local kerns = math.kerns + if kerns then + math.kerns = pack_normal(kerns) + end + end + local anchors = description.anchors + if anchors then + description.anchors = pack_normal(anchors) + end + local mlookups = description.mlookups + if mlookups then + for tag, mlookup in next, mlookups do + mlookups[tag] = pack_normal(mlookup) + end + end + local altuni = description.altuni + if altuni then + description.altuni = pack_normal(altuni) + end + end + local lookups = data.lookups + if lookups then + for _, lookup in next, lookups do + local rules = lookup.rules + if rules then + for i=1,#rules do -- was next loop + local rule = rules[i] + local r = rule.before if r then rule.before = pack_normal(r) end + local r = rule.after if r then rule.after = pack_normal(r) end + local r = rule.current if r then rule.current = pack_normal(r) end + end + end + end + end + local sequences = resources.sequences + if sequences then + for feature, sequence in next, sequences do + sequence.features = pack_normal(sequence.features) + end + end + if not success(2,pass) then + -- return + end + end + + for pass=1,2 do + local pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed = packers(pass) + for unicode, description in next, data.descriptions do + local slookups = description.slookups + if slookups then + description.slookups = pack_normal(slookups) + end + local mlookups = description.mlookups + if mlookups then + description.mlookups = pack_normal(mlookups) + end + end + end + + end + end +end + +local unpacked_mt = { + __index = + function(t,k) + t[k] = false + return k -- next time true + end +} + +local function unpackdata(data) + if data then + local tables = data.tables + if tables then + local resources = data.resources + local lookuptypes = resources.lookuptypes + local unpacked = { } + setmetatable(unpacked,unpacked_mt) + for unicode, description in next, data.descriptions do + local tv = tables[description.boundingbox] + if tv then + description.boundingbox = tv + end + local slookups = description.slookups + if slookups then + local tv = tables[slookups] + if tv then + description.slookups = tv + slookups = unpacked[tv] + end + if slookups then + for tag, lookup in next, slookups do + local what = lookuptypes[tag] + if what == "pair" then + local tv = tables[lookup[2]] + if tv then + lookup[2] = tv + end + local tv = tables[lookup[3]] + if tv then + lookup[3] = tv + end + elseif what ~= "substitution" then + local tv = tables[lookup] + if tv then + slookups[tag] = tv + end + end + end + end + end + local mlookups = description.mlookups + if mlookups then + local tv = tables[mlookups] + if tv then + description.mlookups = tv + mlookups = unpacked[tv] + end + if mlookups then + for tag, list in next, mlookups do + local tv = tables[list] + if tv then + mlookups[tag] = tv + list = unpacked[tv] + end + if list then + local what = lookuptypes[tag] + if what == "pair" then + for i=1,#list do + local lookup = list[i] + local tv = tables[lookup[2]] + if tv then + lookup[2] = tv + end + local tv = tables[lookup[3]] + if tv then + lookup[3] = tv + end + end + elseif what ~= "substitution" then + for i=1,#list do + local tv = tables[list[i]] + if tv then + list[i] = tv + end + end + end + end + end + end + end + local kerns = description.kerns + if kerns then + local tm = tables[kerns] + if tm then + description.kerns = tm + kerns = unpacked[tm] + end + if kerns then + for k, kern in next, kerns do + local tv = tables[kern] + if tv then + kerns[k] = tv + end + end + end + end + local math = description.math + if math then + local kerns = math.kerns + if kerns then + local tm = tables[kerns] + if tm then + math.kerns = tm + kerns = unpacked[tm] + end + if kerns then + for k, kern in next, kerns do + local tv = tables[kern] + if tv then + kerns[k] = tv + end + end + end + end + end + local anchors = description.anchors + if anchors then + local ta = tables[anchors] + if ta then + description.anchors = ta + anchors = unpacked[ta] + end + if anchors then + for tag, anchor in next, anchors do + if tag == "baselig" then + for _, list in next, anchor do + for i=1,#list do + local tv = tables[list[i]] + if tv then + list[i] = tv + end + end + end + else + for a, data in next, anchor do + local tv = tables[data] + if tv then + anchor[a] = tv + end + end + end + end + end + end + local altuni = description.altuni + if altuni then + local altuni = tables[altuni] + if altuni then + description.altuni = altuni + for i=1,#altuni do + local tv = tables[altuni[i]] + if tv then + altuni[i] = tv + end + end + end + end + end + local lookups = data.lookups + if lookups then + for _, lookup in next, lookups do + local rules = lookup.rules + if rules then + for i=1,#rules do -- was next loop + local rule = rules[i] + local before = rule.before + if before then + local tv = tables[before] + if tv then + rule.before = tv + before = unpacked[tv] + end + if before then + for i=1,#before do + local tv = tables[before[i]] + if tv then + before[i] = tv + end + end + end + end + local after = rule.after + if after then + local tv = tables[after] + if tv then + rule.after = tv + after = unpacked[tv] + end + if after then + for i=1,#after do + local tv = tables[after[i]] + if tv then + after[i] = tv + end + end + end + end + local current = rule.current + if current then + local tv = tables[current] + if tv then + rule.current = tv + current = unpacked[tv] + end + if current then + for i=1,#current do + local tv = tables[current[i]] + if tv then + current[i] = tv + end + end + end + end + local replacements = rule.replacements + if replacements then + local tv = tables[replacements] + if tv then + rule.replacements = tv + end + end + -- local fore = rule.fore + -- if fore then + -- local tv = tables[fore] + -- if tv then + -- rule.fore = tv + -- end + -- end + -- local back = rule.back + -- if back then + -- local tv = tables[back] + -- if tv then + -- rule.back = tv + -- end + -- end + -- local names = rule.names + -- if names then + -- local tv = tables[names] + -- if tv then + -- rule.names = tv + -- end + -- end + -- + local lookups = rule.lookups + if lookups then + local tv = tables[lookups] + if tv then + rule.lookups = tv + end + end + end + end + end + end + local anchor_to_lookup = resources.anchor_to_lookup + if anchor_to_lookup then + for anchor, lookup in next, anchor_to_lookup do + local tv = tables[lookup] + if tv then + anchor_to_lookup[anchor] = tv + end + end + end + local lookup_to_anchor = resources.lookup_to_anchor + if lookup_to_anchor then + for lookup, anchor in next, lookup_to_anchor do + local tv = tables[anchor] + if tv then + lookup_to_anchor[lookup] = tv + end + end + end + local ls = resources.sequences + if ls then + for _, feature in next, ls do + local flags = feature.flags + if flags then + local tv = tables[flags] + if tv then + feature.flags = tv + end + end + local subtables = feature.subtables + if subtables then + local tv = tables[subtables] + if tv then + feature.subtables = tv + end + end + local features = feature.features + if features then + local tv = tables[features] + if tv then + feature.features = tv + features = unpacked[tv] + end + if features then + for script, data in next, features do + local tv = tables[data] + if tv then + features[script] = tv + end + end + end + end + local order = feature.order + if order then + local tv = tables[order] + if tv then + feature.order = tv + end + end + local markclass = feature.markclass + if markclass then + local tv = tables[markclass] + if tv then + feature.markclass = tv + end + end + end + end + local lookups = resources.lookups + if lookups then + for _, lookup in next, lookups do + local flags = lookup.flags + if flags then + local tv = tables[flags] + if tv then + lookup.flags = tv + end + end + local subtables = lookup.subtables + if subtables then + local tv = tables[subtables] + if tv then + lookup.subtables = tv + end + end + end + end + local features = resources.features + if features then + for _, what in next, glists do + local feature = features[what] + if feature then + for tag, spec in next, feature do + local tv = tables[spec] + if tv then + feature[tag] = tv + end + end + end + end + end + data.tables = nil + end + end +end + +if otf.enhancers.register then + + otf.enhancers.register( "pack", packdata) + otf.enhancers.register("unpack",unpackdata) + +-- todo: directive + +end + +otf.enhancers.unpack = unpackdata -- used elsewhere +otf.enhancers.pack = packdata -- used elsewhere diff --git a/src/fontloader/misc/fontloader-font-tfm.lua b/src/fontloader/misc/fontloader-font-tfm.lua new file mode 100644 index 0000000..49df94e --- /dev/null +++ b/src/fontloader/misc/fontloader-font-tfm.lua @@ -0,0 +1,158 @@ +if not modules then modules = { } end modules ['font-tfm'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local next = next +local match = string.match + +local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end) +local trace_features = false trackers.register("tfm.features", function(v) trace_features = v end) + +local report_defining = logs.reporter("fonts","defining") +local report_tfm = logs.reporter("fonts","tfm loading") + +local findbinfile = resolvers.findbinfile + +local fonts = fonts +local handlers = fonts.handlers +local readers = fonts.readers +local constructors = fonts.constructors +local encodings = fonts.encodings + +local tfm = constructors.newhandler("tfm") + +local tfmfeatures = constructors.newfeatures("tfm") +local registertfmfeature = tfmfeatures.register + +constructors.resolvevirtualtoo = false -- wil be set in font-ctx.lua + +fonts.formats.tfm = "type1" -- we need to have at least a value here + +--[[ldx-- +

The next function encapsulates the standard loader as +supplied by .

+--ldx]]-- + +-- this might change: not scaling and then apply features and do scaling in the +-- usual way with dummy descriptions but on the other hand .. we no longer use +-- tfm so why bother + +-- ofm directive blocks local path search unless set; btw, in context we +-- don't support ofm files anyway as this format is obsolete + +function tfm.setfeatures(tfmdata,features) + local okay = constructors.initializefeatures("tfm",tfmdata,features,trace_features,report_tfm) + if okay then + return constructors.collectprocessors("tfm",tfmdata,features,trace_features,report_tfm) + else + return { } -- will become false + end +end + +local function read_from_tfm(specification) + local filename = specification.filename + local size = specification.size + if trace_defining then + report_defining("loading tfm file %a at size %s",filename,size) + end + local tfmdata = font.read_tfm(filename,size) -- not cached, fast enough + if tfmdata then + local features = specification.features and specification.features.normal or { } + local resources = tfmdata.resources or { } + local properties = tfmdata.properties or { } + local parameters = tfmdata.parameters or { } + local shared = tfmdata.shared or { } + properties.name = tfmdata.name + properties.fontname = tfmdata.fontname + properties.psname = tfmdata.psname + properties.filename = specification.filename + properties.format = fonts.formats.tfm -- better than nothing + parameters.size = size + shared.rawdata = { } + shared.features = features + shared.processes = next(features) and tfm.setfeatures(tfmdata,features) or nil + -- + tfmdata.properties = properties + tfmdata.resources = resources + tfmdata.parameters = parameters + tfmdata.shared = shared + -- + parameters.slant = parameters.slant or parameters[1] or 0 + parameters.space = parameters.space or parameters[2] or 0 + parameters.space_stretch = parameters.space_stretch or parameters[3] or 0 + parameters.space_shrink = parameters.space_shrink or parameters[4] or 0 + parameters.x_height = parameters.x_height or parameters[5] or 0 + parameters.quad = parameters.quad or parameters[6] or 0 + parameters.extra_space = parameters.extra_space or parameters[7] or 0 + -- + constructors.enhanceparameters(parameters) -- official copies for us + -- + if constructors.resolvevirtualtoo then + fonts.loggers.register(tfmdata,file.suffix(filename),specification) -- strange, why here + local vfname = findbinfile(specification.name, 'ovf') + if vfname and vfname ~= "" then + local vfdata = font.read_vf(vfname,size) -- not cached, fast enough + if vfdata then + local chars = tfmdata.characters + for k,v in next, vfdata.characters do + chars[k].commands = v.commands + end + properties.virtualized = true + tfmdata.fonts = vfdata.fonts + end + end + end + -- + local allfeatures = tfmdata.shared.features or specification.features.normal + constructors.applymanipulators("tfm",tfmdata,allfeatures.normal,trace_features,report_tfm) + if not features.encoding then + local encoding, filename = match(properties.filename,"^(.-)%-(.*)$") -- context: encoding-name.* + if filename and encoding and encodings.known and encodings.known[encoding] then + features.encoding = encoding + end + end + -- let's play safe: + properties.haskerns = true + properties.haslogatures = true + resources.unicodes = { } + resources.lookuptags = { } + -- + return tfmdata + end +end + +local function check_tfm(specification,fullname) -- we could split up like afm/otf + local foundname = findbinfile(fullname, 'tfm') or "" + if foundname == "" then + foundname = findbinfile(fullname, 'ofm') or "" -- not needed in context + end + if foundname == "" then + foundname = fonts.names.getfilename(fullname,"tfm") or "" + end + if foundname ~= "" then + specification.filename = foundname + specification.format = "ofm" + return read_from_tfm(specification) + elseif trace_defining then + report_defining("loading tfm with name %a fails",specification.name) + end +end + +readers.check_tfm = check_tfm + +function readers.tfm(specification) + local fullname = specification.filename or "" + if fullname == "" then + local forced = specification.forced or "" + if forced ~= "" then + fullname = specification.name .. "." .. forced + else + fullname = specification.name + end + end + return check_tfm(specification,fullname) +end diff --git a/src/fontloader/misc/fontloader-fonts-cbk.lua b/src/fontloader/misc/fontloader-fonts-cbk.lua new file mode 100644 index 0000000..8632701 --- /dev/null +++ b/src/fontloader/misc/fontloader-fonts-cbk.lua @@ -0,0 +1,95 @@ +if not modules then modules = { } end modules ['luatex-fonts-cbk'] = { + version = 1.001, + comment = "companion to luatex-*.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end + +local fonts = fonts +local nodes = nodes + +-- Fonts: (might move to node-gef.lua) + +local traverse_id = node.traverse_id +local glyph_code = nodes.nodecodes.glyph + +-- from now on we apply ligaturing and kerning here because it might interfere with complex +-- opentype discretionary handling where the base ligature pass expect some weird extra +-- pointers (which then confuse the tail slider that has some checking built in) + +local ligaturing = node.ligaturing +local kerning = node.kerning + +function node.ligaturing() texio.write_nl("warning: node.ligaturing is already applied") end +function node.kerning () texio.write_nl("warning: node.kerning is already applied") end + +function nodes.handlers.characters(head) + local fontdata = fonts.hashes.identifiers + if fontdata then + local usedfonts, basefonts, prevfont, basefont = { }, { }, nil, nil + for n in traverse_id(glyph_code,head) do + local font = n.font + if font ~= prevfont then + if basefont then + basefont[2] = n.prev + end + prevfont = font + local used = usedfonts[font] + if not used then + local tfmdata = fontdata[font] -- + if tfmdata then + local shared = tfmdata.shared -- we need to check shared, only when same features + if shared then + local processors = shared.processes + if processors and #processors > 0 then + usedfonts[font] = processors + else + basefont = { n, nil } + basefonts[#basefonts+1] = basefont + end + end + end + end + end + end + if next(usedfonts) then + for font, processors in next, usedfonts do + for i=1,#processors do + head = processors[i](head,font,0) or head + end + end + end + if #basefonts > 0 then + for i=1,#basefonts do + local range = basefonts[i] + local start, stop = range[1], range[2] + if stop then + ligaturing(start,stop) + kerning(start,stop) + else + ligaturing(start) + kerning(start) + end + end + end + return head, true + else + return head, false + end +end + +function nodes.simple_font_handler(head) +-- lang.hyphenate(head) + head = nodes.handlers.characters(head) + nodes.injections.handler(head) + nodes.handlers.protectglyphs(head) + -- head = node.ligaturing(head) + -- head = node.kerning(head) + return head +end diff --git a/src/fontloader/misc/fontloader-fonts-def.lua b/src/fontloader/misc/fontloader-fonts-def.lua new file mode 100644 index 0000000..0c2f0db --- /dev/null +++ b/src/fontloader/misc/fontloader-fonts-def.lua @@ -0,0 +1,97 @@ +if not modules then modules = { } end modules ['luatex-font-def'] = { + version = 1.001, + comment = "companion to luatex-*.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end + +local fonts = fonts + +-- A bit of tuning for definitions. + +fonts.constructors.namemode = "specification" -- somehow latex needs this (changed name!) => will change into an overload + +-- tricky: we sort of bypass the parser and directly feed all into +-- the sub parser + +function fonts.definers.getspecification(str) + return "", str, "", ":", str +end + +-- the generic name parser (different from context!) + +local list = { } + +local function issome () list.lookup = 'name' end -- xetex mode prefers name (not in context!) +local function isfile () list.lookup = 'file' end +local function isname () list.lookup = 'name' end +local function thename(s) list.name = s end +local function issub (v) list.sub = v end +local function iscrap (s) list.crap = string.lower(s) end +local function iskey (k,v) list[k] = v end +local function istrue (s) list[s] = true end +local function isfalse(s) list[s] = false end + +local P, S, R, C = lpeg.P, lpeg.S, lpeg.R, lpeg.C + +local spaces = P(" ")^0 +local namespec = (1-S("/:("))^0 -- was: (1-S("/: ("))^0 +local crapspec = spaces * P("/") * (((1-P(":"))^0)/iscrap) * spaces +local filename_1 = P("file:")/isfile * (namespec/thename) +local filename_2 = P("[") * P(true)/isname * (((1-P("]"))^0)/thename) * P("]") +local fontname_1 = P("name:")/isname * (namespec/thename) +local fontname_2 = P(true)/issome * (namespec/thename) +local sometext = (R("az","AZ","09") + S("+-."))^1 +local truevalue = P("+") * spaces * (sometext/istrue) +local falsevalue = P("-") * spaces * (sometext/isfalse) +local keyvalue = (C(sometext) * spaces * P("=") * spaces * C(sometext))/iskey +local somevalue = sometext/istrue +local subvalue = P("(") * (C(P(1-S("()"))^1)/issub) * P(")") -- for Kim +local option = spaces * (keyvalue + falsevalue + truevalue + somevalue) * spaces +local options = P(":") * spaces * (P(";")^0 * option)^0 + +local pattern = (filename_1 + filename_2 + fontname_1 + fontname_2) * subvalue^0 * crapspec^0 * options^0 + +local function colonized(specification) -- xetex mode + list = { } + lpeg.match(pattern,specification.specification) + list.crap = nil -- style not supported, maybe some day + if list.name then + specification.name = list.name + list.name = nil + end + if list.lookup then + specification.lookup = list.lookup + list.lookup = nil + end + if list.sub then + specification.sub = list.sub + list.sub = nil + end + specification.features.normal = fonts.handlers.otf.features.normalize(list) + return specification +end + +fonts.definers.registersplit(":",colonized,"cryptic") +fonts.definers.registersplit("", colonized,"more cryptic") -- catches \font\text=[names] + +function fonts.definers.applypostprocessors(tfmdata) + local postprocessors = tfmdata.postprocessors + if postprocessors then + for i=1,#postprocessors do + local extrahash = postprocessors[i](tfmdata) -- after scaling etc + if type(extrahash) == "string" and extrahash ~= "" then + -- e.g. a reencoding needs this + extrahash = string.gsub(lower(extrahash),"[^a-z]","-") + tfmdata.properties.fullname = format("%s-%s",tfmdata.properties.fullname,extrahash) + end + end + end + return tfmdata +end diff --git a/src/fontloader/misc/fontloader-fonts-demo-vf-1.lua b/src/fontloader/misc/fontloader-fonts-demo-vf-1.lua new file mode 100644 index 0000000..3878ae6 --- /dev/null +++ b/src/fontloader/misc/fontloader-fonts-demo-vf-1.lua @@ -0,0 +1,38 @@ +local identifiers = fonts.hashes.identifiers + +return function(specification) + local f1, id1 = fonts.constructors.readanddefine('lmroman10-regular', specification.size) + local f2, id2 = fonts.constructors.readanddefine('lmsans10-regular', specification.size) + local f3, id3 = fonts.constructors.readanddefine('lmtypewriter10-regular',specification.size) + if f1 and f2 and f3 then + f1.properties.name = specification.name + f1.properties.virtualized = true + f1.fonts = { + { id = id1 }, + { id = id2 }, + { id = id3 }, + } + local color = { [0] = + { "special", "pdf:0 g" }, + { "special", "pdf:1 0 0 rg" }, + { "special", "pdf:0 1 0 rg" }, + { "special", "pdf:0 0 1 rg" }, + } + local chars = { + identifiers[id1].characters, + identifiers[id2].characters, + identifiers[id3].characters, + } + for u, v in next, f1.characters do + local n = math.floor(math.random(1,3)+0.5) + local c = chars[n][u] or v + v.commands = { color[n], { 'slot', n, u }, color[0] } + v.kerns = nil + v.width = c.width + v.height = c.height + v.depth = c.depth + v.italic = nil + end + end + return f1 +end diff --git a/src/fontloader/misc/fontloader-fonts-enc.lua b/src/fontloader/misc/fontloader-fonts-enc.lua new file mode 100644 index 0000000..2e1c6a4 --- /dev/null +++ b/src/fontloader/misc/fontloader-fonts-enc.lua @@ -0,0 +1,29 @@ +if not modules then modules = { } end modules ['luatex-font-enc'] = { + version = 1.001, + comment = "companion to luatex-*.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end + +local fonts = fonts +fonts.encodings = { } +fonts.encodings.agl = { } +fonts.encodings.known = { } + +setmetatable(fonts.encodings.agl, { __index = function(t,k) + if k == "unicodes" then + texio.write(" ") + local unicodes = dofile(resolvers.findfile("font-age.lua")) + fonts.encodings.agl = { unicodes = unicodes } + return unicodes + else + return nil + end +end }) + diff --git a/src/fontloader/misc/fontloader-fonts-ext.lua b/src/fontloader/misc/fontloader-fonts-ext.lua new file mode 100644 index 0000000..b60d045 --- /dev/null +++ b/src/fontloader/misc/fontloader-fonts-ext.lua @@ -0,0 +1,272 @@ +if not modules then modules = { } end modules ['luatex-fonts-ext'] = { + version = 1.001, + comment = "companion to luatex-*.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end + +local fonts = fonts +local otffeatures = fonts.constructors.newfeatures("otf") + +-- A few generic extensions. + +local function initializeitlc(tfmdata,value) + if value then + -- the magic 40 and it formula come from Dohyun Kim but we might need another guess + local parameters = tfmdata.parameters + local italicangle = parameters.italicangle + if italicangle and italicangle ~= 0 then + local properties = tfmdata.properties + local factor = tonumber(value) or 1 + properties.hasitalics = true + properties.autoitalicamount = factor * (parameters.uwidth or 40)/2 + end + end +end + +otffeatures.register { + name = "itlc", + description = "italic correction", + initializers = { + base = initializeitlc, + node = initializeitlc, + } +} + +-- slant and extend + +local function initializeslant(tfmdata,value) + value = tonumber(value) + if not value then + value = 0 + elseif value > 1 then + value = 1 + elseif value < -1 then + value = -1 + end + tfmdata.parameters.slantfactor = value +end + +otffeatures.register { + name = "slant", + description = "slant glyphs", + initializers = { + base = initializeslant, + node = initializeslant, + } +} + +local function initializeextend(tfmdata,value) + value = tonumber(value) + if not value then + value = 0 + elseif value > 10 then + value = 10 + elseif value < -10 then + value = -10 + end + tfmdata.parameters.extendfactor = value +end + +otffeatures.register { + name = "extend", + description = "scale glyphs horizontally", + initializers = { + base = initializeextend, + node = initializeextend, + } +} + +-- expansion and protrusion + +fonts.protrusions = fonts.protrusions or { } +fonts.protrusions.setups = fonts.protrusions.setups or { } + +local setups = fonts.protrusions.setups + +local function initializeprotrusion(tfmdata,value) + if value then + local setup = setups[value] + if setup then + local factor, left, right = setup.factor or 1, setup.left or 1, setup.right or 1 + local emwidth = tfmdata.parameters.quad + tfmdata.parameters.protrusion = { + auto = true, + } + for i, chr in next, tfmdata.characters do + local v, pl, pr = setup[i], nil, nil + if v then + pl, pr = v[1], v[2] + end + if pl and pl ~= 0 then chr.left_protruding = left *pl*factor end + if pr and pr ~= 0 then chr.right_protruding = right*pr*factor end + end + end + end +end + +otffeatures.register { + name = "protrusion", + description = "shift characters into the left and or right margin", + initializers = { + base = initializeprotrusion, + node = initializeprotrusion, + } +} + +fonts.expansions = fonts.expansions or { } +fonts.expansions.setups = fonts.expansions.setups or { } + +local setups = fonts.expansions.setups + +local function initializeexpansion(tfmdata,value) + if value then + local setup = setups[value] + if setup then + local factor = setup.factor or 1 + tfmdata.parameters.expansion = { + stretch = 10 * (setup.stretch or 0), + shrink = 10 * (setup.shrink or 0), + step = 10 * (setup.step or 0), + auto = true, + } + for i, chr in next, tfmdata.characters do + local v = setup[i] + if v and v ~= 0 then + chr.expansion_factor = v*factor + else -- can be option + chr.expansion_factor = factor + end + end + end + end +end + +otffeatures.register { + name = "expansion", + description = "apply hz optimization", + initializers = { + base = initializeexpansion, + node = initializeexpansion, + } +} + +-- left over + +function fonts.loggers.onetimemessage() end + +-- example vectors + +local byte = string.byte + +fonts.expansions.setups['default'] = { + + stretch = 2, shrink = 2, step = .5, factor = 1, + + [byte('A')] = 0.5, [byte('B')] = 0.7, [byte('C')] = 0.7, [byte('D')] = 0.5, [byte('E')] = 0.7, + [byte('F')] = 0.7, [byte('G')] = 0.5, [byte('H')] = 0.7, [byte('K')] = 0.7, [byte('M')] = 0.7, + [byte('N')] = 0.7, [byte('O')] = 0.5, [byte('P')] = 0.7, [byte('Q')] = 0.5, [byte('R')] = 0.7, + [byte('S')] = 0.7, [byte('U')] = 0.7, [byte('W')] = 0.7, [byte('Z')] = 0.7, + [byte('a')] = 0.7, [byte('b')] = 0.7, [byte('c')] = 0.7, [byte('d')] = 0.7, [byte('e')] = 0.7, + [byte('g')] = 0.7, [byte('h')] = 0.7, [byte('k')] = 0.7, [byte('m')] = 0.7, [byte('n')] = 0.7, + [byte('o')] = 0.7, [byte('p')] = 0.7, [byte('q')] = 0.7, [byte('s')] = 0.7, [byte('u')] = 0.7, + [byte('w')] = 0.7, [byte('z')] = 0.7, + [byte('2')] = 0.7, [byte('3')] = 0.7, [byte('6')] = 0.7, [byte('8')] = 0.7, [byte('9')] = 0.7, +} + +fonts.protrusions.setups['default'] = { + + factor = 1, left = 1, right = 1, + + [0x002C] = { 0, 1 }, -- comma + [0x002E] = { 0, 1 }, -- period + [0x003A] = { 0, 1 }, -- colon + [0x003B] = { 0, 1 }, -- semicolon + [0x002D] = { 0, 1 }, -- hyphen + [0x2013] = { 0, 0.50 }, -- endash + [0x2014] = { 0, 0.33 }, -- emdash + [0x3001] = { 0, 1 }, -- ideographic comma 、 + [0x3002] = { 0, 1 }, -- ideographic full stop 。 + [0x060C] = { 0, 1 }, -- arabic comma ، + [0x061B] = { 0, 1 }, -- arabic semicolon ؛ + [0x06D4] = { 0, 1 }, -- arabic full stop ۔ + +} + +-- normalizer + +fonts.handlers.otf.features.normalize = function(t) + if t.rand then + t.rand = "random" + end + return t +end + +-- bonus + +function fonts.helpers.nametoslot(name) + local t = type(name) + if t == "string" then + local tfmdata = fonts.hashes.identifiers[currentfont()] + local shared = tfmdata and tfmdata.shared + local fntdata = shared and shared.rawdata + return fntdata and fntdata.resources.unicodes[name] + elseif t == "number" then + return n + end +end + +-- \font\test=file:somefont:reencode=mymessup +-- +-- fonts.encodings.reencodings.mymessup = { +-- [109] = 110, -- m +-- [110] = 109, -- n +-- } + +fonts.encodings = fonts.encodings or { } +local reencodings = { } +fonts.encodings.reencodings = reencodings + +local function specialreencode(tfmdata,value) + -- we forget about kerns as we assume symbols and we + -- could issue a message if ther are kerns but it's + -- a hack anyway so we odn't care too much here + local encoding = value and reencodings[value] + if encoding then + local temp = { } + local char = tfmdata.characters + for k, v in next, encoding do + temp[k] = char[v] + end + for k, v in next, temp do + char[k] = temp[k] + end + -- if we use the font otherwise luatex gets confused so + -- we return an additional hash component for fullname + return string.format("reencoded:%s",value) + end +end + +local function reencode(tfmdata,value) + tfmdata.postprocessors = tfmdata.postprocessors or { } + table.insert(tfmdata.postprocessors, + function(tfmdata) + return specialreencode(tfmdata,value) + end + ) +end + +otffeatures.register { + name = "reencode", + description = "reencode characters", + manipulators = { + base = reencode, + node = reencode, + } +} diff --git a/src/fontloader/misc/fontloader-fonts-inj.lua b/src/fontloader/misc/fontloader-fonts-inj.lua new file mode 100644 index 0000000..4024035 --- /dev/null +++ b/src/fontloader/misc/fontloader-fonts-inj.lua @@ -0,0 +1,603 @@ +if not modules then modules = { } end modules ['node-inj'] = { + version = 1.001, + comment = "companion to node-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files", +} + +-- This is very experimental (this will change when we have luatex > .50 and +-- a few pending thingies are available. Also, Idris needs to make a few more +-- test fonts. Some optimizations can go away when we have faster machines. + +-- todo: ignore kerns between disc and glyph + +local next = next +local utfchar = utf.char + +local trace_injections = false trackers.register("nodes.injections", function(v) trace_injections = v end) + +local report_injections = logs.reporter("nodes","injections") + +local attributes, nodes, node = attributes, nodes, node + +fonts = fonts +local fontdata = fonts.hashes.identifiers + +nodes.injections = nodes.injections or { } +local injections = nodes.injections + +local nodecodes = nodes.nodecodes +local glyph_code = nodecodes.glyph +local kern_code = nodecodes.kern + +local nuts = nodes.nuts +local nodepool = nuts.pool + +local newkern = nodepool.kern + +local tonode = nuts.tonode +local tonut = nuts.tonut + +local getfield = nuts.getfield +local getnext = nuts.getnext +local getprev = nuts.getprev +local getid = nuts.getid +local getattr = nuts.getattr +local getfont = nuts.getfont +local getsubtype = nuts.getsubtype +local getchar = nuts.getchar + +local setfield = nuts.setfield +local setattr = nuts.setattr + +local traverse_id = nuts.traverse_id +local insert_node_before = nuts.insert_before +local insert_node_after = nuts.insert_after + +local a_kernpair = attributes.private('kernpair') +local a_ligacomp = attributes.private('ligacomp') +local a_markbase = attributes.private('markbase') +local a_markmark = attributes.private('markmark') +local a_markdone = attributes.private('markdone') +local a_cursbase = attributes.private('cursbase') +local a_curscurs = attributes.private('curscurs') +local a_cursdone = attributes.private('cursdone') + +local unsetvalue = attributes.unsetvalue + +-- This injector has been tested by Idris Samawi Hamid (several arabic fonts as well as +-- the rather demanding Husayni font), Khaled Hosny (latin and arabic) and Kaj Eigner +-- (arabic, hebrew and thai) and myself (whatever font I come across). I'm pretty sure +-- that this code is not 100% okay but examples are needed to figure things out. + +function injections.installnewkern(nk) + newkern = nk or newkern +end + +local cursives = { } +local marks = { } +local kerns = { } + +-- Currently we do gpos/kern in a bit inofficial way but when we have the extra fields in +-- glyphnodes to manipulate ht/dp/wd explicitly I will provide an alternative; also, we +-- can share tables. + +-- For the moment we pass the r2l key ... volt/arabtype tests .. idris: this needs +-- checking with husayni (volt and fontforge). + +function injections.reset(n) +-- if getattr(n,a_kernpair) then +-- setattr(n,a_kernpair,unsetvalue) +-- end +-- if getattr(n,a_markdone) then +-- setattr(n,a_markbase,unsetvalue) +-- setattr(n,a_markmark,unsetvalue) +-- setattr(n,a_markdone,unsetvalue) +-- end +-- if getattr(n,a_cursdone) then +-- setattr(n,a_cursbase,unsetvalue) +-- setattr(n,a_curscurs,unsetvalue) +-- setattr(n,a_cursdone,unsetvalue) +-- end +-- if getattr(n,a_ligacomp) then +-- setattr(n,a_ligacomp,unsetvalue) +-- end +end + +function injections.setligaindex(n,index) + setattr(n,a_ligacomp,index) +end + +function injections.getligaindex(n,default) + return getattr(n,a_ligacomp) or default +end + +function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext) + local dx, dy = factor*(exit[1]-entry[1]), factor*(exit[2]-entry[2]) + local ws, wn = tfmstart.width, tfmnext.width + local bound = #cursives + 1 + setattr(start,a_cursbase,bound) + setattr(nxt,a_curscurs,bound) + cursives[bound] = { rlmode, dx, dy, ws, wn } + return dx, dy, bound +end + +function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr) + local x, y, w, h = factor*spec[1], factor*spec[2], factor*spec[3], factor*spec[4] + -- dy = y - h + if x ~= 0 or w ~= 0 or y ~= 0 or h ~= 0 then + local bound = getattr(current,a_kernpair) + if bound then + local kb = kerns[bound] + -- inefficient but singles have less, but weird anyway, needs checking + kb[2], kb[3], kb[4], kb[5] = (kb[2] or 0) + x, (kb[3] or 0) + y, (kb[4] or 0)+ w, (kb[5] or 0) + h + else + bound = #kerns + 1 + setattr(current,a_kernpair,bound) + kerns[bound] = { rlmode, x, y, w, h, r2lflag, tfmchr.width } + end + return x, y, w, h, bound + end + return x, y, w, h -- no bound +end + +function injections.setkern(current,factor,rlmode,x,tfmchr) + local dx = factor*x + if dx ~= 0 then + local bound = #kerns + 1 + setattr(current,a_kernpair,bound) + kerns[bound] = { rlmode, dx } + return dx, bound + else + return 0, 0 + end +end + +function injections.setmark(start,base,factor,rlmode,ba,ma) -- ba=baseanchor, ma=markanchor + local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2]) + local bound = getattr(base,a_markbase) + local index = 1 + if bound then + local mb = marks[bound] + if mb then + -- if not index then index = #mb + 1 end + index = #mb + 1 + mb[index] = { dx, dy, rlmode } + setattr(start,a_markmark,bound) + setattr(start,a_markdone,index) + return dx, dy, bound + else + report_injections("possible problem, %U is base mark without data (id %a)",getchar(base),bound) + end + end + index = index or 1 + bound = #marks + 1 + setattr(base,a_markbase,bound) + setattr(start,a_markmark,bound) + setattr(start,a_markdone,index) + marks[bound] = { [index] = { dx, dy, rlmode } } + return dx, dy, bound +end + +local function dir(n) + return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset" +end + +local function trace(head) + report_injections("begin run") + for n in traverse_id(glyph_code,head) do + if getsubtype(n) < 256 then + local kp = getattr(n,a_kernpair) + local mb = getattr(n,a_markbase) + local mm = getattr(n,a_markmark) + local md = getattr(n,a_markdone) + local cb = getattr(n,a_cursbase) + local cc = getattr(n,a_curscurs) + local char = getchar(n) + report_injections("font %s, char %U, glyph %c",getfont(n),char,char) + if kp then + local k = kerns[kp] + if k[3] then + report_injections(" pairkern: dir %a, x %p, y %p, w %p, h %p",dir(k[1]),k[2],k[3],k[4],k[5]) + else + report_injections(" kern: dir %a, dx %p",dir(k[1]),k[2]) + end + end + if mb then + report_injections(" markbase: bound %a",mb) + end + if mm then + local m = marks[mm] + if mb then + local m = m[mb] + if m then + report_injections(" markmark: bound %a, index %a, dx %p, dy %p",mm,md,m[1],m[2]) + else + report_injections(" markmark: bound %a, missing index",mm) + end + else + m = m[1] + report_injections(" markmark: bound %a, dx %p, dy %p",mm,m and m[1],m and m[2]) + end + end + if cb then + report_injections(" cursbase: bound %a",cb) + end + if cc then + local c = cursives[cc] + report_injections(" curscurs: bound %a, dir %a, dx %p, dy %p",cc,dir(c[1]),c[2],c[3]) + end + end + end + report_injections("end run") +end + +-- todo: reuse tables (i.e. no collection), but will be extra fields anyway +-- todo: check for attribute + +-- We can have a fast test on a font being processed, so we can check faster for marks etc +-- but I'll make a context variant anyway. + +local function show_result(head) + local current = head + local skipping = false + while current do + local id = getid(current) + if id == glyph_code then + report_injections("char: %C, width %p, xoffset %p, yoffset %p", + getchar(current),getfield(current,"width"),getfield(current,"xoffset"),getfield(current,"yoffset")) + skipping = false + elseif id == kern_code then + report_injections("kern: %p",getfield(current,"kern")) + skipping = false + elseif not skipping then + report_injections() + skipping = true + end + current = getnext(current) + end +end + +function injections.handler(head,where,keep) + head = tonut(head) + local has_marks, has_cursives, has_kerns = next(marks), next(cursives), next(kerns) + if has_marks or has_cursives then + if trace_injections then + trace(head) + end + -- in the future variant we will not copy items but refs to tables + local done, ky, rl, valid, cx, wx, mk, nofvalid = false, { }, { }, { }, { }, { }, { }, 0 + if has_kerns then -- move outside loop + local nf, tm = nil, nil + for n in traverse_id(glyph_code,head) do -- only needed for relevant fonts + if getsubtype(n) < 256 then + nofvalid = nofvalid + 1 + valid[nofvalid] = n + local f = getfont(n) + if f ~= nf then + nf = f + tm = fontdata[nf].resources.marks -- other hash in ctx + end + if tm then + mk[n] = tm[getchar(n)] + end + local k = getattr(n,a_kernpair) + if k then + local kk = kerns[k] + if kk then + local x, y, w, h = kk[2] or 0, kk[3] or 0, kk[4] or 0, kk[5] or 0 + local dy = y - h + if dy ~= 0 then + ky[n] = dy + end + if w ~= 0 or x ~= 0 then + wx[n] = kk + end + rl[n] = kk[1] -- could move in test + end + end + end + end + else + local nf, tm = nil, nil + for n in traverse_id(glyph_code,head) do + if getsubtype(n) < 256 then + nofvalid = nofvalid + 1 + valid[nofvalid] = n + local f = getfont(n) + if f ~= nf then + nf = f + tm = fontdata[nf].resources.marks -- other hash in ctx + end + if tm then + mk[n] = tm[getchar(n)] + end + end + end + end + if nofvalid > 0 then + -- we can assume done == true because we have cursives and marks + local cx = { } + if has_kerns and next(ky) then + for n, k in next, ky do + setfield(n,"yoffset",k) + end + end + -- todo: reuse t and use maxt + if has_cursives then + local p_cursbase, p = nil, nil + -- since we need valid[n+1] we can also use a "while true do" + local t, d, maxt = { }, { }, 0 + for i=1,nofvalid do -- valid == glyphs + local n = valid[i] + if not mk[n] then + local n_cursbase = getattr(n,a_cursbase) + if p_cursbase then + local n_curscurs = getattr(n,a_curscurs) + if p_cursbase == n_curscurs then + local c = cursives[n_curscurs] + if c then + local rlmode, dx, dy, ws, wn = c[1], c[2], c[3], c[4], c[5] + if rlmode >= 0 then + dx = dx - ws + else + dx = dx + wn + end + if dx ~= 0 then + cx[n] = dx + rl[n] = rlmode + end + -- if rlmode and rlmode < 0 then + dy = -dy + -- end + maxt = maxt + 1 + t[maxt] = p + d[maxt] = dy + else + maxt = 0 + end + end + elseif maxt > 0 then + local ny = getfield(n,"yoffset") + for i=maxt,1,-1 do + ny = ny + d[i] + local ti = t[i] + setfield(ti,"yoffset",getfield(ti,"yoffset") + ny) + end + maxt = 0 + end + if not n_cursbase and maxt > 0 then + local ny = getfield(n,"yoffset") + for i=maxt,1,-1 do + ny = ny + d[i] + local ti = t[i] + setfield(ti,"yoffset",ny) -- maybe add to current yoffset + end + maxt = 0 + end + p_cursbase, p = n_cursbase, n + end + end + if maxt > 0 then + local ny = getfield(n,"yoffset") -- hm, n unset ? + for i=maxt,1,-1 do + ny = ny + d[i] + local ti = t[i] + setfield(ti,"yoffset",ny) + end + maxt = 0 + end + if not keep then + cursives = { } + end + end + if has_marks then + for i=1,nofvalid do + local p = valid[i] + local p_markbase = getattr(p,a_markbase) + if p_markbase then + local mrks = marks[p_markbase] + local nofmarks = #mrks + for n in traverse_id(glyph_code,getnext(p)) do + local n_markmark = getattr(n,a_markmark) + if p_markbase == n_markmark then + local index = getattr(n,a_markdone) or 1 + local d = mrks[index] + if d then + local rlmode = d[3] + -- + local k = wx[p] + local px = getfield(p,"xoffset") + local ox = 0 + if k then + local x = k[2] + local w = k[4] + if w then + if rlmode and rlmode >= 0 then + -- kern(x) glyph(p) kern(w-x) mark(n) + ox = px - getfield(p,"width") + d[1] - (w-x) + -- report_injections("l2r case 1: %p",ox) + else + -- kern(w-x) glyph(p) kern(x) mark(n) + ox = px - d[1] - x + -- report_injections("r2l case 1: %p",ox) + end + else + if rlmode and rlmode >= 0 then + -- okay for husayni + ox = px - getfield(p,"width") + d[1] + -- report_injections("r2l case 2: %p",ox) + else + -- needs checking: is x ok here? + ox = px - d[1] - x + -- report_injections("r2l case 2: %p",ox) + end + end + else + -- if rlmode and rlmode >= 0 then + -- ox = px - getfield(p,"width") + d[1] + -- -- report_injections("l2r case 3: %p",ox) + -- else + -- ox = px - d[1] + -- -- report_injections("r2l case 3: %p",ox) + -- end + -- + -- we need to deal with fonts that have marks with width + -- + local wp = getfield(p,"width") + local wn = getfield(n,"width") -- in arial marks have widths + if rlmode and rlmode >= 0 then + ox = px - wp + d[1] + -- report_injections("l2r case 3: %p",ox) + else + ox = px - d[1] + -- report_injections("r2l case 3: %p",ox) + end + if wn ~= 0 then + -- bad: we should center + insert_node_before(head,n,newkern(-wn/2)) + insert_node_after(head,n,newkern(-wn/2)) + -- wx[n] = { 0, -wn/2, 0, -wn } + end + -- so far + end + setfield(n,"xoffset",ox) + -- + local py = getfield(p,"yoffset") + local oy = 0 + if mk[p] then + oy = py + d[2] + else + oy = getfield(n,"yoffset") + py + d[2] + end + setfield(n,"yoffset",oy) + -- + if nofmarks == 1 then + break + else + nofmarks = nofmarks - 1 + end + end + elseif not n_markmark then + break -- HH: added 2013-09-12: no need to deal with non marks + else + -- KE: there can be sequences in ligatures + end + end + end + end + if not keep then + marks = { } + end + end + -- todo : combine + if next(wx) then + for n, k in next, wx do + -- only w can be nil (kernclasses), can be sped up when w == nil + local x = k[2] + local w = k[4] + if w then + local rl = k[1] -- r2l = k[6] + local wx = w - x + if rl < 0 then -- KE: don't use r2l here + if wx ~= 0 then + insert_node_before(head,n,newkern(wx)) -- type 0/2 + end + if x ~= 0 then + insert_node_after (head,n,newkern(x)) -- type 0/2 + end + else + if x ~= 0 then + insert_node_before(head,n,newkern(x)) -- type 0/2 + end + if wx ~= 0 then + insert_node_after (head,n,newkern(wx)) -- type 0/2 + end + end + elseif x ~= 0 then + -- this needs checking for rl < 0 but it is unlikely that a r2l script + -- uses kernclasses between glyphs so we're probably safe (KE has a + -- problematic font where marks interfere with rl < 0 in the previous + -- case) + insert_node_before(head,n,newkern(x)) -- a real font kern, type 0 + end + end + end + if next(cx) then + for n, k in next, cx do + if k ~= 0 then + local rln = rl[n] + if rln and rln < 0 then + insert_node_before(head,n,newkern(-k)) -- type 0/2 + else + insert_node_before(head,n,newkern(k)) -- type 0/2 + end + end + end + end + if not keep then + kerns = { } + end + -- if trace_injections then + -- show_result(head) + -- end + return tonode(head), true + elseif not keep then + kerns, cursives, marks = { }, { }, { } + end + elseif has_kerns then + if trace_injections then + trace(head) + end + for n in traverse_id(glyph_code,head) do + if getsubtype(n) < 256 then + local k = getattr(n,a_kernpair) + if k then + local kk = kerns[k] + if kk then + local rl, x, y, w = kk[1], kk[2] or 0, kk[3], kk[4] + if y and y ~= 0 then + setfield(n,"yoffset",y) -- todo: h ? + end + if w then + -- copied from above + -- local r2l = kk[6] + local wx = w - x + if rl < 0 then -- KE: don't use r2l here + if wx ~= 0 then + insert_node_before(head,n,newkern(wx)) + end + if x ~= 0 then + insert_node_after (head,n,newkern(x)) + end + else + if x ~= 0 then + insert_node_before(head,n,newkern(x)) + end + if wx ~= 0 then + insert_node_after(head,n,newkern(wx)) + end + end + else + -- simple (e.g. kernclass kerns) + if x ~= 0 then + insert_node_before(head,n,newkern(x)) + end + end + end + end + end + end + if not keep then + kerns = { } + end + -- if trace_injections then + -- show_result(head) + -- end + return tonode(head), true + else + -- no tracing needed + end + return tonode(head), false +end diff --git a/src/fontloader/misc/fontloader-fonts-lua.lua b/src/fontloader/misc/fontloader-fonts-lua.lua new file mode 100644 index 0000000..ec3fe38 --- /dev/null +++ b/src/fontloader/misc/fontloader-fonts-lua.lua @@ -0,0 +1,33 @@ +if not modules then modules = { } end modules ['luatex-fonts-lua'] = { + version = 1.001, + comment = "companion to luatex-*.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end + +local fonts = fonts +fonts.formats.lua = "lua" + +function fonts.readers.lua(specification) + local fullname = specification.filename or "" + if fullname == "" then + local forced = specification.forced or "" + if forced ~= "" then + fullname = specification.name .. "." .. forced + else + fullname = specification.name + end + end + local fullname = resolvers.findfile(fullname) or "" + if fullname ~= "" then + local loader = loadfile(fullname) + loader = loader and loader() + return loader and loader(specification) + end +end diff --git a/src/fontloader/misc/fontloader-fonts-ota.lua b/src/fontloader/misc/fontloader-fonts-ota.lua new file mode 100644 index 0000000..f083fe0 --- /dev/null +++ b/src/fontloader/misc/fontloader-fonts-ota.lua @@ -0,0 +1,459 @@ +if not modules then modules = { } end modules ['font-otx'] = { + version = 1.001, + comment = "companion to font-otf.lua (analysing)", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local type = type + +if not trackers then trackers = { register = function() end } end + +----- trace_analyzing = false trackers.register("otf.analyzing", function(v) trace_analyzing = v end) + +local fonts, nodes, node = fonts, nodes, node + +local allocate = utilities.storage.allocate + +local otf = fonts.handlers.otf + +local analyzers = fonts.analyzers +local initializers = allocate() +local methods = allocate() + +analyzers.initializers = initializers +analyzers.methods = methods +analyzers.useunicodemarks = false + +local a_state = attributes.private('state') + +local nuts = nodes.nuts +local tonut = nuts.tonut + +local getfield = nuts.getfield +local getnext = nuts.getnext +local getprev = nuts.getprev +local getid = nuts.getid +local getprop = nuts.getprop +local setprop = nuts.setprop +local getfont = nuts.getfont +local getsubtype = nuts.getsubtype +local getchar = nuts.getchar + +local traverse_id = nuts.traverse_id +local traverse_node_list = nuts.traverse +local end_of_math = nuts.end_of_math + +local nodecodes = nodes.nodecodes +local glyph_code = nodecodes.glyph +local disc_code = nodecodes.disc +local math_code = nodecodes.math + +local fontdata = fonts.hashes.identifiers +local categories = characters and characters.categories or { } -- sorry, only in context + +local otffeatures = fonts.constructors.newfeatures("otf") +local registerotffeature = otffeatures.register + +--[[ldx-- +

Analyzers run per script and/or language and are needed in order to +process features right.

+--ldx]]-- + +-- never use these numbers directly + +local s_init = 1 local s_rphf = 7 +local s_medi = 2 local s_half = 8 +local s_fina = 3 local s_pref = 9 +local s_isol = 4 local s_blwf = 10 +local s_mark = 5 local s_pstf = 11 +local s_rest = 6 + +local states = { + init = s_init, + medi = s_medi, + fina = s_fina, + isol = s_isol, + mark = s_mark, + rest = s_rest, + rphf = s_rphf, + half = s_half, + pref = s_pref, + blwf = s_blwf, + pstf = s_pstf, +} + +local features = { + init = s_init, + medi = s_medi, + fina = s_fina, + isol = s_isol, + -- mark = s_mark, + -- rest = s_rest, + rphf = s_rphf, + half = s_half, + pref = s_pref, + blwf = s_blwf, + pstf = s_pstf, +} + +analyzers.states = states +analyzers.features = features + +-- todo: analyzers per script/lang, cross font, so we need an font id hash -> script +-- e.g. latin -> hyphenate, arab -> 1/2/3 analyze -- its own namespace + +function analyzers.setstate(head,font) + local useunicodemarks = analyzers.useunicodemarks + local tfmdata = fontdata[font] + local descriptions = tfmdata.descriptions + local first, last, current, n, done = nil, nil, head, 0, false -- maybe make n boolean + current = tonut(current) + while current do + local id = getid(current) + if id == glyph_code and getfont(current) == font then + done = true + local char = getchar(current) + local d = descriptions[char] + if d then + if d.class == "mark" or (useunicodemarks and categories[char] == "mn") then + done = true + setprop(current,a_state,s_mark) + elseif n == 0 then + first, last, n = current, current, 1 + setprop(current,a_state,s_init) + else + last, n = current, n+1 + setprop(current,a_state,s_medi) + end + else -- finish + if first and first == last then + setprop(last,a_state,s_isol) + elseif last then + setprop(last,a_state,s_fina) + end + first, last, n = nil, nil, 0 + end + elseif id == disc_code then + -- always in the middle + setprop(current,a_state,s_medi) + last = current + else -- finish + if first and first == last then + setprop(last,a_state,s_isol) + elseif last then + setprop(last,a_state,s_fina) + end + first, last, n = nil, nil, 0 + if id == math_code then + current = end_of_math(current) + end + end + current = getnext(current) + end + if first and first == last then + setprop(last,a_state,s_isol) + elseif last then + setprop(last,a_state,s_fina) + end + return head, done +end + +-- in the future we will use language/script attributes instead of the +-- font related value, but then we also need dynamic features which is +-- somewhat slower; and .. we need a chain of them + +local function analyzeinitializer(tfmdata,value) -- attr + local script, language = otf.scriptandlanguage(tfmdata) -- attr + local action = initializers[script] + if not action then + -- skip + elseif type(action) == "function" then + return action(tfmdata,value) + else + local action = action[language] + if action then + return action(tfmdata,value) + end + end +end + +local function analyzeprocessor(head,font,attr) + local tfmdata = fontdata[font] + local script, language = otf.scriptandlanguage(tfmdata,attr) + local action = methods[script] + if not action then + -- skip + elseif type(action) == "function" then + return action(head,font,attr) + else + action = action[language] + if action then + return action(head,font,attr) + end + end + return head, false +end + +registerotffeature { + name = "analyze", + description = "analysis of character classes", + default = true, + initializers = { + node = analyzeinitializer, + }, + processors = { + position = 1, + node = analyzeprocessor, + } +} + +-- latin + +methods.latn = analyzers.setstate + +-- This info eventually can go into char-def and we will have a state +-- table for generic then (unicode recognized all states but in practice +-- only has only +-- +-- isolated : isol +-- final : isol_fina +-- medial : isol_fina_medi_init +-- +-- so in practice, without analyzer it's rather useless info which is +-- why having it in char-def makes only sense for special purposes (like) +-- like tracing cq. visualizing. + +local tatweel = 0x0640 +local zwnj = 0x200C +local zwj = 0x200D + +local isolated = { -- isol + [0x0600] = true, [0x0601] = true, [0x0602] = true, [0x0603] = true, + [0x0604] = true, + [0x0608] = true, [0x060B] = true, [0x0621] = true, [0x0674] = true, + [0x06DD] = true, + -- mandaic + [0x0856] = true, [0x0858] = true, [0x0857] = true, + -- n'ko + [0x07FA] = true, + -- also here: + [zwnj] = true, + -- 7 + [0x08AD] = true, +} + +local final = { -- isol_fina + [0x0622] = true, [0x0623] = true, [0x0624] = true, [0x0625] = true, + [0x0627] = true, [0x0629] = true, [0x062F] = true, [0x0630] = true, + [0x0631] = true, [0x0632] = true, [0x0648] = true, [0x0671] = true, + [0x0672] = true, [0x0673] = true, [0x0675] = true, [0x0676] = true, + [0x0677] = true, [0x0688] = true, [0x0689] = true, [0x068A] = true, + [0x068B] = true, [0x068C] = true, [0x068D] = true, [0x068E] = true, + [0x068F] = true, [0x0690] = true, [0x0691] = true, [0x0692] = true, + [0x0693] = true, [0x0694] = true, [0x0695] = true, [0x0696] = true, + [0x0697] = true, [0x0698] = true, [0x0699] = true, [0x06C0] = true, + [0x06C3] = true, [0x06C4] = true, [0x06C5] = true, [0x06C6] = true, + [0x06C7] = true, [0x06C8] = true, [0x06C9] = true, [0x06CA] = true, + [0x06CB] = true, [0x06CD] = true, [0x06CF] = true, [0x06D2] = true, + [0x06D3] = true, [0x06D5] = true, [0x06EE] = true, [0x06EF] = true, + [0x0759] = true, [0x075A] = true, [0x075B] = true, [0x076B] = true, + [0x076C] = true, [0x0771] = true, [0x0773] = true, [0x0774] = true, + [0x0778] = true, [0x0779] = true, + [0x08AA] = true, [0x08AB] = true, [0x08AC] = true, + [0xFEF5] = true, [0xFEF7] = true, [0xFEF9] = true, [0xFEFB] = true, + -- syriac + [0x0710] = true, [0x0715] = true, [0x0716] = true, [0x0717] = true, + [0x0718] = true, [0x0719] = true, [0x0728] = true, [0x072A] = true, + [0x072C] = true, [0x071E] = true, + [0x072F] = true, [0x074D] = true, + -- mandaic + [0x0840] = true, [0x0849] = true, [0x0854] = true, [0x0846] = true, + [0x084F] = true, + -- 7 + [0x08AE] = true, [0x08B1] = true, [0x08B2] = true, +} + +local medial = { -- isol_fina_medi_init + [0x0626] = true, [0x0628] = true, [0x062A] = true, [0x062B] = true, + [0x062C] = true, [0x062D] = true, [0x062E] = true, [0x0633] = true, + [0x0634] = true, [0x0635] = true, [0x0636] = true, [0x0637] = true, + [0x0638] = true, [0x0639] = true, [0x063A] = true, [0x063B] = true, + [0x063C] = true, [0x063D] = true, [0x063E] = true, [0x063F] = true, + [0x0641] = true, [0x0642] = true, [0x0643] = true, + [0x0644] = true, [0x0645] = true, [0x0646] = true, [0x0647] = true, + [0x0649] = true, [0x064A] = true, [0x066E] = true, [0x066F] = true, + [0x0678] = true, [0x0679] = true, [0x067A] = true, [0x067B] = true, + [0x067C] = true, [0x067D] = true, [0x067E] = true, [0x067F] = true, + [0x0680] = true, [0x0681] = true, [0x0682] = true, [0x0683] = true, + [0x0684] = true, [0x0685] = true, [0x0686] = true, [0x0687] = true, + [0x069A] = true, [0x069B] = true, [0x069C] = true, [0x069D] = true, + [0x069E] = true, [0x069F] = true, [0x06A0] = true, [0x06A1] = true, + [0x06A2] = true, [0x06A3] = true, [0x06A4] = true, [0x06A5] = true, + [0x06A6] = true, [0x06A7] = true, [0x06A8] = true, [0x06A9] = true, + [0x06AA] = true, [0x06AB] = true, [0x06AC] = true, [0x06AD] = true, + [0x06AE] = true, [0x06AF] = true, [0x06B0] = true, [0x06B1] = true, + [0x06B2] = true, [0x06B3] = true, [0x06B4] = true, [0x06B5] = true, + [0x06B6] = true, [0x06B7] = true, [0x06B8] = true, [0x06B9] = true, + [0x06BA] = true, [0x06BB] = true, [0x06BC] = true, [0x06BD] = true, + [0x06BE] = true, [0x06BF] = true, [0x06C1] = true, [0x06C2] = true, + [0x06CC] = true, [0x06CE] = true, [0x06D0] = true, [0x06D1] = true, + [0x06FA] = true, [0x06FB] = true, [0x06FC] = true, [0x06FF] = true, + [0x0750] = true, [0x0751] = true, [0x0752] = true, [0x0753] = true, + [0x0754] = true, [0x0755] = true, [0x0756] = true, [0x0757] = true, + [0x0758] = true, [0x075C] = true, [0x075D] = true, [0x075E] = true, + [0x075F] = true, [0x0760] = true, [0x0761] = true, [0x0762] = true, + [0x0763] = true, [0x0764] = true, [0x0765] = true, [0x0766] = true, + [0x0767] = true, [0x0768] = true, [0x0769] = true, [0x076A] = true, + [0x076D] = true, [0x076E] = true, [0x076F] = true, [0x0770] = true, + [0x0772] = true, [0x0775] = true, [0x0776] = true, [0x0777] = true, + [0x077A] = true, [0x077B] = true, [0x077C] = true, [0x077D] = true, + [0x077E] = true, [0x077F] = true, + [0x08A0] = true, [0x08A2] = true, [0x08A4] = true, [0x08A5] = true, + [0x08A6] = true, [0x0620] = true, [0x08A8] = true, [0x08A9] = true, + [0x08A7] = true, [0x08A3] = true, + -- syriac + [0x0712] = true, [0x0713] = true, [0x0714] = true, [0x071A] = true, + [0x071B] = true, [0x071C] = true, [0x071D] = true, [0x071F] = true, + [0x0720] = true, [0x0721] = true, [0x0722] = true, [0x0723] = true, + [0x0724] = true, [0x0725] = true, [0x0726] = true, [0x0727] = true, + [0x0729] = true, [0x072B] = true, [0x072D] = true, [0x072E] = true, + [0x074E] = true, [0x074F] = true, + -- mandaic + [0x0841] = true, [0x0842] = true, [0x0843] = true, [0x0844] = true, + [0x0845] = true, [0x0847] = true, [0x0848] = true, [0x0855] = true, + [0x0851] = true, [0x084E] = true, [0x084D] = true, [0x084A] = true, + [0x084B] = true, [0x084C] = true, [0x0850] = true, [0x0852] = true, + [0x0853] = true, + -- n'ko + [0x07D7] = true, [0x07E8] = true, [0x07D9] = true, [0x07EA] = true, + [0x07CA] = true, [0x07DB] = true, [0x07CC] = true, [0x07DD] = true, + [0x07CE] = true, [0x07DF] = true, [0x07D4] = true, [0x07E5] = true, + [0x07E9] = true, [0x07E7] = true, [0x07E3] = true, [0x07E2] = true, + [0x07E0] = true, [0x07E1] = true, [0x07DE] = true, [0x07DC] = true, + [0x07D1] = true, [0x07DA] = true, [0x07D8] = true, [0x07D6] = true, + [0x07D2] = true, [0x07D0] = true, [0x07CF] = true, [0x07CD] = true, + [0x07CB] = true, [0x07D3] = true, [0x07E4] = true, [0x07D5] = true, + [0x07E6] = true, + -- also here: + [tatweel]= true, [zwj] = true, + -- 7 + [0x08A1] = true, [0x08AF] = true, [0x08B0] = true, +} + +local arab_warned = { } + +-- todo: gref + +local function warning(current,what) + local char = getchar(current) + if not arab_warned[char] then + log.report("analyze","arab: character %C has no %a class",char,what) + arab_warned[char] = true + end +end + +-- potential optimization: local medial_final = table.merged(medial,final) + +local function finish(first,last) + if last then + if first == last then + local fc = getchar(first) + if medial[fc] or final[fc] then + setprop(first,a_state,s_isol) + else + warning(first,"isol") + setprop(first,a_state,s_error) + end + else + local lc = getchar(last) + if medial[lc] or final[lc] then + -- if laststate == 1 or laststate == 2 or laststate == 4 then + setprop(last,a_state,s_fina) + else + warning(last,"fina") + setprop(last,a_state,s_error) + end + end + first, last = nil, nil + elseif first then + -- first and last are either both set so we never com here + local fc = getchar(first) + if medial[fc] or final[fc] then + setprop(first,a_state,s_isol) + else + warning(first,"isol") + setprop(first,a_state,s_error) + end + first = nil + end + return first, last +end + +function methods.arab(head,font,attr) + local useunicodemarks = analyzers.useunicodemarks + local tfmdata = fontdata[font] + local marks = tfmdata.resources.marks + local first, last, current, done = nil, nil, head, false + current = tonut(current) + while current do + local id = getid(current) + if id == glyph_code and getfont(current) == font and getsubtype(current)<256 and not getprop(current,a_state) then + done = true + local char = getchar(current) + if marks[char] or (useunicodemarks and categories[char] == "mn") then + setprop(current,a_state,s_mark) + elseif isolated[char] then -- can be zwj or zwnj too + first, last = finish(first,last) + setprop(current,a_state,s_isol) + first, last = nil, nil + elseif not first then + if medial[char] then + setprop(current,a_state,s_init) + first, last = first or current, current + elseif final[char] then + setprop(current,a_state,s_isol) + first, last = nil, nil + else -- no arab + first, last = finish(first,last) + end + elseif medial[char] then + first, last = first or current, current + setprop(current,a_state,s_medi) + elseif final[char] then + if getprop(last,a_state) ~= s_init then + -- tricky, we need to check what last may be ! + setprop(last,a_state,s_medi) + end + setprop(current,a_state,s_fina) + first, last = nil, nil + elseif char >= 0x0600 and char <= 0x06FF then -- needs checking + setprop(current,a_state,s_rest) + first, last = finish(first,last) + else -- no + first, last = finish(first,last) + end + else + if first or last then + first, last = finish(first,last) + end + if id == math_code then + current = end_of_math(current) + end + end + current = getnext(current) + end + if first or last then + finish(first,last) + end + return head, done +end + +methods.syrc = methods.arab +methods.mand = methods.arab +methods.nko = methods.arab + +directives.register("otf.analyze.useunicodemarks",function(v) + analyzers.useunicodemarks = v +end) diff --git a/src/fontloader/misc/fontloader-fonts-otn.lua b/src/fontloader/misc/fontloader-fonts-otn.lua new file mode 100644 index 0000000..32dc820 --- /dev/null +++ b/src/fontloader/misc/fontloader-fonts-otn.lua @@ -0,0 +1,2888 @@ +if not modules then modules = { } end modules ['font-otn'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files", +} + +-- this is a context version which can contain experimental code, but when we +-- have serious patches we also need to change the other two font-otn files + +-- preprocessors = { "nodes" } + +-- anchor class : mark, mkmk, curs, mklg (todo) +-- anchor type : mark, basechar, baselig, basemark, centry, cexit, max (todo) + +-- this is still somewhat preliminary and it will get better in due time; +-- much functionality could only be implemented thanks to the husayni font +-- of Idris Samawi Hamid to who we dedicate this module. + +-- in retrospect it always looks easy but believe it or not, it took a lot +-- of work to get proper open type support done: buggy fonts, fuzzy specs, +-- special made testfonts, many skype sessions between taco, idris and me, +-- torture tests etc etc ... unfortunately the code does not show how much +-- time it took ... + +-- todo: +-- +-- kerning is probably not yet ok for latin around dics nodes (interesting challenge) +-- extension infrastructure (for usage out of context) +-- sorting features according to vendors/renderers +-- alternative loop quitters +-- check cursive and r2l +-- find out where ignore-mark-classes went +-- default features (per language, script) +-- handle positions (we need example fonts) +-- handle gpos_single (we might want an extra width field in glyph nodes because adding kerns might interfere) +-- mark (to mark) code is still not what it should be (too messy but we need some more extreem husayni tests) +-- remove some optimizations (when I have a faster machine) +-- +-- maybe redo the lot some way (more context specific) + +--[[ldx-- +

This module is a bit more split up that I'd like but since we also want to test +with plain it has to be so. This module is part of +and discussion about improvements and functionality mostly happens on the + mailing list.

+ +

The specification of OpenType is kind of vague. Apart from a lack of a proper +free specifications there's also the problem that Microsoft and Adobe +may have their own interpretation of how and in what order to apply features. +In general the Microsoft website has more detailed specifications and is a +better reference. There is also some information in the FontForge help files.

+ +

Because there is so much possible, fonts might contain bugs and/or be made to +work with certain rederers. These may evolve over time which may have the side +effect that suddenly fonts behave differently.

+ +

After a lot of experiments (mostly by Taco, me and Idris) we're now at yet another +implementation. Of course all errors are mine and of course the code can be +improved. There are quite some optimizations going on here and processing speed +is currently acceptable. Not all functions are implemented yet, often because I +lack the fonts for testing. Many scripts are not yet supported either, but I will +look into them as soon as users ask for it.

+ +

Because there are different interpretations possible, I will extend the code +with more (configureable) variants. I can also add hooks for users so that they can +write their own extensions.

+ +

Glyphs are indexed not by unicode but in their own way. This is because there is no +relationship with unicode at all, apart from the fact that a font might cover certain +ranges of characters. One character can have multiple shapes. However, at the + end we use unicode so and all extra glyphs are mapped into a private +space. This is needed because we need to access them and has to include +then in the output eventually.

+ +

The raw table as it coms from gets reorganized in to fit out needs. +In that table is packed (similar tables are shared) and cached on disk +so that successive runs can use the optimized table (after loading the table is +unpacked). The flattening code used later is a prelude to an even more compact table +format (and as such it keeps evolving).

+ +

This module is sparsely documented because it is a moving target. The table format +of the reader changes and we experiment a lot with different methods for supporting +features.

+ +

As with the code, we may decide to store more information in the + table.

+ +

Incrementing the version number will force a re-cache. We jump the number by one +when there's a fix in the library or code that +results in different tables.

+--ldx]]-- + +-- action handler chainproc chainmore comment +-- +-- gsub_single ok ok ok +-- gsub_multiple ok ok not implemented yet +-- gsub_alternate ok ok not implemented yet +-- gsub_ligature ok ok ok +-- gsub_context ok -- +-- gsub_contextchain ok -- +-- gsub_reversecontextchain ok -- +-- chainsub -- ok +-- reversesub -- ok +-- gpos_mark2base ok ok +-- gpos_mark2ligature ok ok +-- gpos_mark2mark ok ok +-- gpos_cursive ok untested +-- gpos_single ok ok +-- gpos_pair ok ok +-- gpos_context ok -- +-- gpos_contextchain ok -- +-- +-- todo: contextpos and contextsub and class stuff +-- +-- actions: +-- +-- handler : actions triggered by lookup +-- chainproc : actions triggered by contextual lookup +-- chainmore : multiple substitutions triggered by contextual lookup (e.g. fij -> f + ij) +-- +-- remark: the 'not implemented yet' variants will be done when we have fonts that use them +-- remark: we need to check what to do with discretionaries + +-- We used to have independent hashes for lookups but as the tags are unique +-- we now use only one hash. If needed we can have multiple again but in that +-- case I will probably prefix (i.e. rename) the lookups in the cached font file. + +-- Todo: make plugin feature that operates on char/glyphnode arrays + +local concat, insert, remove = table.concat, table.insert, table.remove +local gmatch, gsub, find, match, lower, strip = string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip +local type, next, tonumber, tostring = type, next, tonumber, tostring +local lpegmatch = lpeg.match +local random = math.random +local formatters = string.formatters + +local logs, trackers, nodes, attributes = logs, trackers, nodes, attributes + +local registertracker = trackers.register + +local fonts = fonts +local otf = fonts.handlers.otf + +local trace_lookups = false registertracker("otf.lookups", function(v) trace_lookups = v end) +local trace_singles = false registertracker("otf.singles", function(v) trace_singles = v end) +local trace_multiples = false registertracker("otf.multiples", function(v) trace_multiples = v end) +local trace_alternatives = false registertracker("otf.alternatives", function(v) trace_alternatives = v end) +local trace_ligatures = false registertracker("otf.ligatures", function(v) trace_ligatures = v end) +local trace_contexts = false registertracker("otf.contexts", function(v) trace_contexts = v end) +local trace_marks = false registertracker("otf.marks", function(v) trace_marks = v end) +local trace_kerns = false registertracker("otf.kerns", function(v) trace_kerns = v end) +local trace_cursive = false registertracker("otf.cursive", function(v) trace_cursive = v end) +local trace_preparing = false registertracker("otf.preparing", function(v) trace_preparing = v end) +local trace_bugs = false registertracker("otf.bugs", function(v) trace_bugs = v end) +local trace_details = false registertracker("otf.details", function(v) trace_details = v end) +local trace_applied = false registertracker("otf.applied", function(v) trace_applied = v end) +local trace_steps = false registertracker("otf.steps", function(v) trace_steps = v end) +local trace_skips = false registertracker("otf.skips", function(v) trace_skips = v end) +local trace_directions = false registertracker("otf.directions", function(v) trace_directions = v end) + +local report_direct = logs.reporter("fonts","otf direct") +local report_subchain = logs.reporter("fonts","otf subchain") +local report_chain = logs.reporter("fonts","otf chain") +local report_process = logs.reporter("fonts","otf process") +local report_prepare = logs.reporter("fonts","otf prepare") +local report_warning = logs.reporter("fonts","otf warning") + +registertracker("otf.verbose_chain", function(v) otf.setcontextchain(v and "verbose") end) +registertracker("otf.normal_chain", function(v) otf.setcontextchain(v and "normal") end) + +registertracker("otf.replacements", "otf.singles,otf.multiples,otf.alternatives,otf.ligatures") +registertracker("otf.positions","otf.marks,otf.kerns,otf.cursive") +registertracker("otf.actions","otf.replacements,otf.positions") +registertracker("otf.injections","nodes.injections") + +registertracker("*otf.sample","otf.steps,otf.actions,otf.analyzing") + +local nuts = nodes.nuts +local tonode = nuts.tonode +local tonut = nuts.tonut + +local getfield = nuts.getfield +local setfield = nuts.setfield +local getnext = nuts.getnext +local getprev = nuts.getprev +local getid = nuts.getid +local getattr = nuts.getattr +local setattr = nuts.setattr +local getprop = nuts.getprop +local setprop = nuts.setprop +local getfont = nuts.getfont +local getsubtype = nuts.getsubtype +local getchar = nuts.getchar + +local insert_node_after = nuts.insert_after +local delete_node = nuts.delete +local copy_node = nuts.copy +local find_node_tail = nuts.tail +local flush_node_list = nuts.flush_list +local end_of_math = nuts.end_of_math + +local setmetatableindex = table.setmetatableindex + +local zwnj = 0x200C +local zwj = 0x200D +local wildcard = "*" +local default = "dflt" + +local nodecodes = nodes.nodecodes +local whatcodes = nodes.whatcodes +local glyphcodes = nodes.glyphcodes +local disccodes = nodes.disccodes + +local glyph_code = nodecodes.glyph +local glue_code = nodecodes.glue +local disc_code = nodecodes.disc +local whatsit_code = nodecodes.whatsit +local math_code = nodecodes.math + +local dir_code = whatcodes.dir +local localpar_code = whatcodes.localpar + +local discretionary_code = disccodes.discretionary + +local ligature_code = glyphcodes.ligature + +local privateattribute = attributes.private + +-- Something is messed up: we have two mark / ligature indices, one at the injection +-- end and one here ... this is bases in KE's patches but there is something fishy +-- there as I'm pretty sure that for husayni we need some connection (as it's much +-- more complex than an average font) but I need proper examples of all cases, not +-- of only some. + +local a_state = privateattribute('state') +local a_cursbase = privateattribute('cursbase') -- to be checked, probably can go + +local injections = nodes.injections +local setmark = injections.setmark +local setcursive = injections.setcursive +local setkern = injections.setkern +local setpair = injections.setpair +local resetinjection = injections.reset +local setligaindex = injections.setligaindex +local getligaindex = injections.getligaindex + +local cursonce = true + +local fonthashes = fonts.hashes +local fontdata = fonthashes.identifiers + +local otffeatures = fonts.constructors.newfeatures("otf") +local registerotffeature = otffeatures.register + +local onetimemessage = fonts.loggers.onetimemessage or function() end + +otf.defaultnodealternate = "none" -- first last + +-- we share some vars here, after all, we have no nested lookups and less code + +local tfmdata = false +local characters = false +local descriptions = false +local resources = false +local marks = false +local currentfont = false +local lookuptable = false +local anchorlookups = false +local lookuptypes = false +local lookuptags = false +local handlers = { } +local rlmode = 0 +local featurevalue = false + +-- head is always a whatsit so we can safely assume that head is not changed + +-- we use this for special testing and documentation + +local checkstep = (nodes and nodes.tracers and nodes.tracers.steppers.check) or function() end +local registerstep = (nodes and nodes.tracers and nodes.tracers.steppers.register) or function() end +local registermessage = (nodes and nodes.tracers and nodes.tracers.steppers.message) or function() end + +local function logprocess(...) + if trace_steps then + registermessage(...) + end + report_direct(...) +end + +local function logwarning(...) + report_direct(...) +end + +local f_unicode = formatters["%U"] +local f_uniname = formatters["%U (%s)"] +local f_unilist = formatters["% t (% t)"] + +local function gref(n) -- currently the same as in font-otb + if type(n) == "number" then + local description = descriptions[n] + local name = description and description.name + if name then + return f_uniname(n,name) + else + return f_unicode(n) + end + elseif n then + local num, nam = { }, { } + for i=1,#n do + local ni = n[i] + if tonumber(ni) then -- later we will start at 2 + local di = descriptions[ni] + num[i] = f_unicode(ni) + nam[i] = di and di.name or "-" + end + end + return f_unilist(num,nam) + else + return "" + end +end + +local function cref(kind,chainname,chainlookupname,lookupname,index) -- not in the mood to alias f_ + if index then + return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookuptags[lookupname],index) + elseif lookupname then + return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookuptags[lookupname]) + elseif chainlookupname then + return formatters["feature %a, chain %a, sub %a"](kind,lookuptags[chainname],lookuptags[chainlookupname]) + elseif chainname then + return formatters["feature %a, chain %a"](kind,lookuptags[chainname]) + else + return formatters["feature %a"](kind) + end +end + +local function pref(kind,lookupname) + return formatters["feature %a, lookup %a"](kind,lookuptags[lookupname]) +end + +-- We can assume that languages that use marks are not hyphenated. We can also assume +-- that at most one discretionary is present. + +-- We do need components in funny kerning mode but maybe I can better reconstruct then +-- as we do have the font components info available; removing components makes the +-- previous code much simpler. Also, later on copying and freeing becomes easier. +-- However, for arabic we need to keep them around for the sake of mark placement +-- and indices. + +local function copy_glyph(g) -- next and prev are untouched ! + local components = getfield(g,"components") + if components then + setfield(g,"components",nil) + local n = copy_node(g) + setfield(g,"components",components) + return n + else + return copy_node(g) + end +end + +-- start is a mark and we need to keep that one + +local function markstoligature(kind,lookupname,head,start,stop,char) + if start == stop and getchar(start) == char then + return head, start + else + local prev = getprev(start) + local next = getnext(stop) + setfield(start,"prev",nil) + setfield(stop,"next",nil) + local base = copy_glyph(start) + if head == start then + head = base + end + resetinjection(base) + setfield(base,"char",char) + setfield(base,"subtype",ligature_code) + setfield(base,"components",start) + if prev then + setfield(prev,"next",base) + end + if next then + setfield(next,"prev",base) + end + setfield(base,"next",next) + setfield(base,"prev",prev) + return head, base + end +end + +-- The next code is somewhat complicated by the fact that some fonts can have ligatures made +-- from ligatures that themselves have marks. This was identified by Kai in for instance +-- arabtype: KAF LAM SHADDA ALEF FATHA (0x0643 0x0644 0x0651 0x0627 0x064E). This becomes +-- KAF LAM-ALEF with a SHADDA on the first and a FATHA op de second component. In a next +-- iteration this becomes a KAF-LAM-ALEF with a SHADDA on the second and a FATHA on the +-- third component. + +local function getcomponentindex(start) + if getid(start) ~= glyph_code then + return 0 + elseif getsubtype(start) == ligature_code then + local i = 0 + local components = getfield(start,"components") + while components do + i = i + getcomponentindex(components) + components = getnext(components) + end + return i + elseif not marks[getchar(start)] then + return 1 + else + return 0 + end +end + +-- eventually we will do positioning in an other way (needs addional w/h/d fields) + +local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound) -- brr head + if start == stop and getchar(start) == char then + resetinjection(start) + setfield(start,"char",char) + return head, start + end + local prev = getprev(start) + local next = getnext(stop) + setfield(start,"prev",nil) + setfield(stop,"next",nil) + local base = copy_glyph(start) + if start == head then + head = base + end + resetinjection(base) + setfield(base,"char",char) + setfield(base,"subtype",ligature_code) + setfield(base,"components",start) -- start can have components + if prev then + setfield(prev,"next",base) + end + if next then + setfield(next,"prev",base) + end + setfield(base,"next",next) + setfield(base,"prev",prev) + if not discfound then + local deletemarks = markflag ~= "mark" + local components = start + local baseindex = 0 + local componentindex = 0 + local head = base + local current = base + -- first we loop over the glyphs in start .. stop + while start do + local char = getchar(start) + if not marks[char] then + baseindex = baseindex + componentindex + componentindex = getcomponentindex(start) + elseif not deletemarks then -- quite fishy + setligaindex(start,baseindex + getligaindex(start,componentindex)) + if trace_marks then + logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),getligaindex(start)) + end + head, current = insert_node_after(head,current,copy_node(start)) -- unlikely that mark has components + elseif trace_marks then + logwarning("%s: delete mark %s",pref(kind,lookupname),gref(char)) + end + start = getnext(start) + end + -- we can have one accent as part of a lookup and another following + -- local start = components -- was wrong (component scanning was introduced when more complex ligs in devanagari was added) + local start = getnext(current) + while start and getid(start) == glyph_code do + local char = getchar(start) + if marks[char] then + setligaindex(start,baseindex + getligaindex(start,componentindex)) + if trace_marks then + logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),getligaindex(start)) + end + else + break + end + start = getnext(start) + end + end + return head, base +end + +function handlers.gsub_single(head,start,kind,lookupname,replacement) + if trace_singles then + logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(getchar(start)),gref(replacement)) + end + resetinjection(start) + setfield(start,"char",replacement) + return head, start, true +end + +local function get_alternative_glyph(start,alternatives,value,trace_alternatives) + local n = #alternatives + if value == "random" then + local r = random(1,n) + return alternatives[r], trace_alternatives and formatters["value %a, taking %a"](value,r) + elseif value == "first" then + return alternatives[1], trace_alternatives and formatters["value %a, taking %a"](value,1) + elseif value == "last" then + return alternatives[n], trace_alternatives and formatters["value %a, taking %a"](value,n) + else + value = tonumber(value) + if type(value) ~= "number" then + return alternatives[1], trace_alternatives and formatters["invalid value %s, taking %a"](value,1) + elseif value > n then + local defaultalt = otf.defaultnodealternate + if defaultalt == "first" then + return alternatives[n], trace_alternatives and formatters["invalid value %s, taking %a"](value,1) + elseif defaultalt == "last" then + return alternatives[1], trace_alternatives and formatters["invalid value %s, taking %a"](value,n) + else + return false, trace_alternatives and formatters["invalid value %a, %s"](value,"out of range") + end + elseif value == 0 then + return getchar(start), trace_alternatives and formatters["invalid value %a, %s"](value,"no change") + elseif value < 1 then + return alternatives[1], trace_alternatives and formatters["invalid value %a, taking %a"](value,1) + else + return alternatives[value], trace_alternatives and formatters["value %a, taking %a"](value,value) + end + end +end + +local function multiple_glyphs(head,start,multiple,ignoremarks) + local nofmultiples = #multiple + if nofmultiples > 0 then + resetinjection(start) + setfield(start,"char",multiple[1]) + if nofmultiples > 1 then + local sn = getnext(start) + for k=2,nofmultiples do -- todo: use insert_node +-- untested: +-- +-- while ignoremarks and marks[getchar(sn)] then +-- local sn = getnext(sn) +-- end + local n = copy_node(start) -- ignore components + resetinjection(n) + setfield(n,"char",multiple[k]) + setfield(n,"next",sn) + setfield(n,"prev",start) + if sn then + setfield(sn,"prev",n) + end + setfield(start,"next",n) + start = n + end + end + return head, start, true + else + if trace_multiples then + logprocess("no multiple for %s",gref(getchar(start))) + end + return head, start, false + end +end + +function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence) + local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue + local choice, comment = get_alternative_glyph(start,alternative,value,trace_alternatives) + if choice then + if trace_alternatives then + logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(getchar(start)),choice,gref(choice),comment) + end + resetinjection(start) + setfield(start,"char",choice) + else + if trace_alternatives then + logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(getchar(start)),comment) + end + end + return head, start, true +end + +function handlers.gsub_multiple(head,start,kind,lookupname,multiple,sequence) + if trace_multiples then + logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(getchar(start)),gref(multiple)) + end + return multiple_glyphs(head,start,multiple,sequence.flags[1]) +end + +function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence) + local s, stop, discfound = getnext(start), nil, false + local startchar = getchar(start) + if marks[startchar] then + while s do + local id = getid(s) + if id == glyph_code and getfont(s) == currentfont and getsubtype(s)<256 then + local lg = ligature[getchar(s)] + if lg then + stop = s + ligature = lg + s = getnext(s) + else + break + end + else + break + end + end + if stop then + local lig = ligature.ligature + if lig then + if trace_ligatures then + local stopchar = getchar(stop) + head, start = markstoligature(kind,lookupname,head,start,stop,lig) + logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start))) + else + head, start = markstoligature(kind,lookupname,head,start,stop,lig) + end + return head, start, true + else + -- ok, goto next lookup + end + end + else + local skipmark = sequence.flags[1] + while s do + local id = getid(s) + if id == glyph_code and getsubtype(s)<256 then + if getfont(s) == currentfont then + local char = getchar(s) + if skipmark and marks[char] then + s = getnext(s) + else + local lg = ligature[char] + if lg then + stop = s + ligature = lg + s = getnext(s) + else + break + end + end + else + break + end + elseif id == disc_code then + discfound = true + s = getnext(s) + else + break + end + end + local lig = ligature.ligature + if lig then + if stop then + if trace_ligatures then + local stopchar = getchar(stop) + head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound) + logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start))) + else + head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound) + end + else + -- weird but happens (in some arabic font) + resetinjection(start) + setfield(start,"char",lig) + if trace_ligatures then + logprocess("%s: replacing %s by (no real) ligature %s case 3",pref(kind,lookupname),gref(startchar),gref(lig)) + end + end + return head, start, true + else + -- weird but happens + end + end + return head, start, false +end + +--[[ldx-- +

We get hits on a mark, but we're not sure if the it has to be applied so +we need to explicitly test for basechar, baselig and basemark entries.

+--ldx]]-- + +function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence) + local markchar = getchar(start) + if marks[markchar] then + local base = getprev(start) -- [glyph] [start=mark] + if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then + local basechar = getchar(base) + if marks[basechar] then + while true do + base = getprev(base) + if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then + basechar = getchar(base) + if not marks[basechar] then + break + end + else + if trace_bugs then + logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar)) + end + return head, start, false + end + end + end + local baseanchors = descriptions[basechar] + if baseanchors then + baseanchors = baseanchors.anchors + end + if baseanchors then + local baseanchors = baseanchors['basechar'] + if baseanchors then + local al = anchorlookups[lookupname] + for anchor,ba in next, baseanchors do + if al[anchor] then + local ma = markanchors[anchor] + if ma then + local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) + if trace_marks then + logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)", + pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) + end + return head, start, true + end + end + end + if trace_bugs then + logwarning("%s, no matching anchors for mark %s and base %s",pref(kind,lookupname),gref(markchar),gref(basechar)) + end + end + elseif trace_bugs then + -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar)) + onetimemessage(currentfont,basechar,"no base anchors",report_fonts) + end + elseif trace_bugs then + logwarning("%s: prev node is no char",pref(kind,lookupname)) + end + elseif trace_bugs then + logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar)) + end + return head, start, false +end + +function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence) + -- check chainpos variant + local markchar = getchar(start) + if marks[markchar] then + local base = getprev(start) -- [glyph] [optional marks] [start=mark] + if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then + local basechar = getchar(base) + if marks[basechar] then + while true do + base = getprev(base) + if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then + basechar = getchar(base) + if not marks[basechar] then + break + end + else + if trace_bugs then + logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar)) + end + return head, start, false + end + end + end + local index = getligaindex(start) + local baseanchors = descriptions[basechar] + if baseanchors then + baseanchors = baseanchors.anchors + if baseanchors then + local baseanchors = baseanchors['baselig'] + if baseanchors then + local al = anchorlookups[lookupname] + for anchor, ba in next, baseanchors do + if al[anchor] then + local ma = markanchors[anchor] + if ma then + ba = ba[index] + if ba then + local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) -- index + if trace_marks then + logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)", + pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy) + end + return head, start, true + else + if trace_bugs then + logwarning("%s: no matching anchors for mark %s and baselig %s with index %a",pref(kind,lookupname),gref(markchar),gref(basechar),index) + end + end + end + end + end + if trace_bugs then + logwarning("%s: no matching anchors for mark %s and baselig %s",pref(kind,lookupname),gref(markchar),gref(basechar)) + end + end + end + elseif trace_bugs then + -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar)) + onetimemessage(currentfont,basechar,"no base anchors",report_fonts) + end + elseif trace_bugs then + logwarning("%s: prev node is no char",pref(kind,lookupname)) + end + elseif trace_bugs then + logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar)) + end + return head, start, false +end + +function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence) + local markchar = getchar(start) + if marks[markchar] then + local base = getprev(start) -- [glyph] [basemark] [start=mark] + local slc = getligaindex(start) + if slc then -- a rather messy loop ... needs checking with husayni + while base do + local blc = getligaindex(base) + if blc and blc ~= slc then + base = getprev(base) + else + break + end + end + end + if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then -- subtype test can go + local basechar = getchar(base) + local baseanchors = descriptions[basechar] + if baseanchors then + baseanchors = baseanchors.anchors + if baseanchors then + baseanchors = baseanchors['basemark'] + if baseanchors then + local al = anchorlookups[lookupname] + for anchor,ba in next, baseanchors do + if al[anchor] then + local ma = markanchors[anchor] + if ma then + local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) + if trace_marks then + logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)", + pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) + end + return head, start, true + end + end + end + if trace_bugs then + logwarning("%s: no matching anchors for mark %s and basemark %s",pref(kind,lookupname),gref(markchar),gref(basechar)) + end + end + end + elseif trace_bugs then + -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar)) + onetimemessage(currentfont,basechar,"no base anchors",report_fonts) + end + elseif trace_bugs then + logwarning("%s: prev node is no mark",pref(kind,lookupname)) + end + elseif trace_bugs then + logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar)) + end + return head, start, false +end + +function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence) -- to be checked + local alreadydone = cursonce and getprop(start,a_cursbase) + if not alreadydone then + local done = false + local startchar = getchar(start) + if marks[startchar] then + if trace_cursive then + logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar)) + end + else + local nxt = getnext(start) + while not done and nxt and getid(nxt) == glyph_code and getfont(nxt) == currentfont and getsubtype(nxt)<256 do + local nextchar = getchar(nxt) + if marks[nextchar] then + -- should not happen (maybe warning) + nxt = getnext(nxt) + else + local entryanchors = descriptions[nextchar] + if entryanchors then + entryanchors = entryanchors.anchors + if entryanchors then + entryanchors = entryanchors['centry'] + if entryanchors then + local al = anchorlookups[lookupname] + for anchor, entry in next, entryanchors do + if al[anchor] then + local exit = exitanchors[anchor] + if exit then + local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar]) + if trace_cursive then + logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode) + end + done = true + break + end + end + end + end + end + elseif trace_bugs then + -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar)) + onetimemessage(currentfont,startchar,"no entry anchors",report_fonts) + end + break + end + end + end + return head, start, done + else + if trace_cursive and trace_details then + logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone) + end + return head, start, false + end +end + +function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence) + local startchar = getchar(start) + local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar]) + if trace_kerns then + logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h) + end + return head, start, false +end + +function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence) + -- todo: kerns in disc nodes: pre, post, replace -> loop over disc too + -- todo: kerns in components of ligatures + local snext = getnext(start) + if not snext then + return head, start, false + else + local prev, done = start, false + local factor = tfmdata.parameters.factor + local lookuptype = lookuptypes[lookupname] + while snext and getid(snext) == glyph_code and getfont(snext) == currentfont and getsubtype(snext)<256 do + local nextchar = getchar(snext) + local krn = kerns[nextchar] + if not krn and marks[nextchar] then + prev = snext + snext = getnext(snext) + else + if not krn then + -- skip + elseif type(krn) == "table" then + if lookuptype == "pair" then -- probably not needed + local a, b = krn[2], krn[3] + if a and #a > 0 then + local startchar = getchar(start) + local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar]) + if trace_kerns then + logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h) + end + end + if b and #b > 0 then + local startchar = getchar(start) + local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar]) + if trace_kerns then + logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h) + end + end + else -- wrong ... position has different entries + report_process("%s: check this out (old kern stuff)",pref(kind,lookupname)) + -- local a, b = krn[2], krn[6] + -- if a and a ~= 0 then + -- local k = setkern(snext,factor,rlmode,a) + -- if trace_kerns then + -- logprocess("%s: inserting first kern %s between %s and %s",pref(kind,lookupname),k,gref(getchar(prev)),gref(nextchar)) + -- end + -- end + -- if b and b ~= 0 then + -- logwarning("%s: ignoring second kern xoff %s",pref(kind,lookupname),b*factor) + -- end + end + done = true + elseif krn ~= 0 then + local k = setkern(snext,factor,rlmode,krn) + if trace_kerns then + logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(getchar(prev)),gref(nextchar)) + end + done = true + end + break + end + end + return head, start, done + end +end + +--[[ldx-- +

I will implement multiple chain replacements once I run into a font that uses +it. It's not that complex to handle.

+--ldx]]-- + +local chainmores = { } +local chainprocs = { } + +local function logprocess(...) + if trace_steps then + registermessage(...) + end + report_subchain(...) +end + +local logwarning = report_subchain + +local function logprocess(...) + if trace_steps then + registermessage(...) + end + report_chain(...) +end + +local logwarning = report_chain + +-- We could share functions but that would lead to extra function calls with many +-- arguments, redundant tests and confusing messages. + +function chainprocs.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname) + logwarning("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname)) + return head, start, false +end + +function chainmores.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname,n) + logprocess("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname)) + return head, start, false +end + +-- The reversesub is a special case, which is why we need to store the replacements +-- in a bit weird way. There is no lookup and the replacement comes from the lookup +-- itself. It is meant mostly for dealing with Urdu. + +function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements) + local char = getchar(start) + local replacement = replacements[char] + if replacement then + if trace_singles then + logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement)) + end + resetinjection(start) + setfield(start,"char",replacement) + return head, start, true + else + return head, start, false + end +end + +--[[ldx-- +

This chain stuff is somewhat tricky since we can have a sequence of actions to be +applied: single, alternate, multiple or ligature where ligature can be an invalid +one in the sense that it will replace multiple by one but not neccessary one that +looks like the combination (i.e. it is the counterpart of multiple then). For +example, the following is valid:

+ + +xxxabcdexxx [single a->A][multiple b->BCD][ligature cde->E] xxxABCDExxx + + +

Therefore we we don't really do the replacement here already unless we have the +single lookup case. The efficiency of the replacements can be improved by deleting +as less as needed but that would also make the code even more messy.

+--ldx]]-- + +-- local function delete_till_stop(head,start,stop,ignoremarks) -- keeps start +-- local n = 1 +-- if start == stop then +-- -- done +-- elseif ignoremarks then +-- repeat -- start x x m x x stop => start m +-- local next = getnext(start) +-- if not marks[getchar(next)] then +-- local components = getfield(next,"components") +-- if components then -- probably not needed +-- flush_node_list(components) +-- end +-- head = delete_node(head,next) +-- end +-- n = n + 1 +-- until next == stop +-- else -- start x x x stop => start +-- repeat +-- local next = getnext(start) +-- local components = getfield(next,"components") +-- if components then -- probably not needed +-- flush_node_list(components) +-- end +-- head = delete_node(head,next) +-- n = n + 1 +-- until next == stop +-- end +-- return head, n +-- end + +--[[ldx-- +

Here we replace start by a single variant, First we delete the rest of the +match.

+--ldx]]-- + +function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex) + -- todo: marks ? + local current = start + local subtables = currentlookup.subtables + if #subtables > 1 then + logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," ")) + end + while current do + if getid(current) == glyph_code then + local currentchar = getchar(current) + local lookupname = subtables[1] -- only 1 + local replacement = lookuphash[lookupname] + if not replacement then + if trace_bugs then + logwarning("%s: no single hits",cref(kind,chainname,chainlookupname,lookupname,chainindex)) + end + else + replacement = replacement[currentchar] + if not replacement or replacement == "" then + if trace_bugs then + logwarning("%s: no single for %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar)) + end + else + if trace_singles then + logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement)) + end + resetinjection(current) + setfield(current,"char",replacement) + end + end + return head, start, true + elseif current == stop then + break + else + current = getnext(current) + end + end + return head, start, false +end + +chainmores.gsub_single = chainprocs.gsub_single + +--[[ldx-- +

Here we replace start by a sequence of new glyphs. First we delete the rest of +the match.

+--ldx]]-- + +function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) + -- local head, n = delete_till_stop(head,start,stop) + local startchar = getchar(start) + local subtables = currentlookup.subtables + local lookupname = subtables[1] + local replacements = lookuphash[lookupname] + if not replacements then + if trace_bugs then + logwarning("%s: no multiple hits",cref(kind,chainname,chainlookupname,lookupname)) + end + else + replacements = replacements[startchar] + if not replacements or replacement == "" then + if trace_bugs then + logwarning("%s: no multiple for %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar)) + end + else + if trace_multiples then + logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements)) + end + return multiple_glyphs(head,start,replacements,currentlookup.flags[1]) + end + end + return head, start, false +end + +chainmores.gsub_multiple = chainprocs.gsub_multiple + +--[[ldx-- +

Here we replace start by new glyph. First we delete the rest of the match.

+--ldx]]-- + +-- char_1 mark_1 -> char_x mark_1 (ignore marks) +-- char_1 mark_1 -> char_x + +-- to be checked: do we always have just one glyph? +-- we can also have alternates for marks +-- marks come last anyway +-- are there cases where we need to delete the mark + +function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) + local current = start + local subtables = currentlookup.subtables + local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue + while current do + if getid(current) == glyph_code then -- is this check needed? + local currentchar = getchar(current) + local lookupname = subtables[1] + local alternatives = lookuphash[lookupname] + if not alternatives then + if trace_bugs then + logwarning("%s: no alternative hit",cref(kind,chainname,chainlookupname,lookupname)) + end + else + alternatives = alternatives[currentchar] + if alternatives then + local choice, comment = get_alternative_glyph(current,alternatives,value,trace_alternatives) + if choice then + if trace_alternatives then + logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment) + end + resetinjection(start) + setfield(start,"char",choice) + else + if trace_alternatives then + logwarning("%s: no variant %a for %s, %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(char),comment) + end + end + elseif trace_bugs then + logwarning("%s: no alternative for %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(currentchar),comment) + end + end + return head, start, true + elseif current == stop then + break + else + current = getnext(current) + end + end + return head, start, false +end + +chainmores.gsub_alternate = chainprocs.gsub_alternate + +--[[ldx-- +

When we replace ligatures we use a helper that handles the marks. I might change +this function (move code inline and handle the marks by a separate function). We +assume rather stupid ligatures (no complex disc nodes).

+--ldx]]-- + +function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex) + local startchar = getchar(start) + local subtables = currentlookup.subtables + local lookupname = subtables[1] + local ligatures = lookuphash[lookupname] + if not ligatures then + if trace_bugs then + logwarning("%s: no ligature hits",cref(kind,chainname,chainlookupname,lookupname,chainindex)) + end + else + ligatures = ligatures[startchar] + if not ligatures then + if trace_bugs then + logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar)) + end + else + local s = getnext(start) + local discfound = false + local last = stop + local nofreplacements = 0 + local skipmark = currentlookup.flags[1] + while s do + local id = getid(s) + if id == disc_code then + s = getnext(s) + discfound = true + else + local schar = getchar(s) + if skipmark and marks[schar] then -- marks + s = getnext(s) + else + local lg = ligatures[schar] + if lg then + ligatures, last, nofreplacements = lg, s, nofreplacements + 1 + if s == stop then + break + else + s = getnext(s) + end + else + break + end + end + end + end + local l2 = ligatures.ligature + if l2 then + if chainindex then + stop = last + end + if trace_ligatures then + if start == stop then + logprocess("%s: replacing character %s by ligature %s case 3",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2)) + else + logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop)),gref(l2)) + end + end + head, start = toligature(kind,lookupname,head,start,stop,l2,currentlookup.flags[1],discfound) + return head, start, true, nofreplacements + elseif trace_bugs then + if start == stop then + logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar)) + else + logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop))) + end + end + end + end + return head, start, false, 0 +end + +chainmores.gsub_ligature = chainprocs.gsub_ligature + +function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) + local markchar = getchar(start) + if marks[markchar] then + local subtables = currentlookup.subtables + local lookupname = subtables[1] + local markanchors = lookuphash[lookupname] + if markanchors then + markanchors = markanchors[markchar] + end + if markanchors then + local base = getprev(start) -- [glyph] [start=mark] + if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then + local basechar = getchar(base) + if marks[basechar] then + while true do + base = getprev(base) + if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then + basechar = getchar(base) + if not marks[basechar] then + break + end + else + if trace_bugs then + logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar)) + end + return head, start, false + end + end + end + local baseanchors = descriptions[basechar].anchors + if baseanchors then + local baseanchors = baseanchors['basechar'] + if baseanchors then + local al = anchorlookups[lookupname] + for anchor,ba in next, baseanchors do + if al[anchor] then + local ma = markanchors[anchor] + if ma then + local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) + if trace_marks then + logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)", + cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) + end + return head, start, true + end + end + end + if trace_bugs then + logwarning("%s, no matching anchors for mark %s and base %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar)) + end + end + end + elseif trace_bugs then + logwarning("%s: prev node is no char",cref(kind,chainname,chainlookupname,lookupname)) + end + elseif trace_bugs then + logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar)) + end + elseif trace_bugs then + logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar)) + end + return head, start, false +end + +function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) + local markchar = getchar(start) + if marks[markchar] then + local subtables = currentlookup.subtables + local lookupname = subtables[1] + local markanchors = lookuphash[lookupname] + if markanchors then + markanchors = markanchors[markchar] + end + if markanchors then + local base = getprev(start) -- [glyph] [optional marks] [start=mark] + if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then + local basechar = getchar(base) + if marks[basechar] then + while true do + base = getprev(base) + if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then + basechar = getchar(base) + if not marks[basechar] then + break + end + else + if trace_bugs then + logwarning("%s: no base for mark %s",cref(kind,chainname,chainlookupname,lookupname),markchar) + end + return head, start, false + end + end + end + -- todo: like marks a ligatures hash + local index = getligaindex(start) + local baseanchors = descriptions[basechar].anchors + if baseanchors then + local baseanchors = baseanchors['baselig'] + if baseanchors then + local al = anchorlookups[lookupname] + for anchor,ba in next, baseanchors do + if al[anchor] then + local ma = markanchors[anchor] + if ma then + ba = ba[index] + if ba then + local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) + if trace_marks then + logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)", + cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy) + end + return head, start, true + end + end + end + end + if trace_bugs then + logwarning("%s: no matching anchors for mark %s and baselig %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar)) + end + end + end + elseif trace_bugs then + logwarning("feature %s, lookup %s: prev node is no char",kind,lookupname) + end + elseif trace_bugs then + logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar)) + end + elseif trace_bugs then + logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar)) + end + return head, start, false +end + +function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) + local markchar = getchar(start) + if marks[markchar] then + -- local markanchors = descriptions[markchar].anchors markanchors = markanchors and markanchors.mark + local subtables = currentlookup.subtables + local lookupname = subtables[1] + local markanchors = lookuphash[lookupname] + if markanchors then + markanchors = markanchors[markchar] + end + if markanchors then + local base = getprev(start) -- [glyph] [basemark] [start=mark] + local slc = getligaindex(start) + if slc then -- a rather messy loop ... needs checking with husayni + while base do + local blc = getligaindex(base) + if blc and blc ~= slc then + base = getprev(base) + else + break + end + end + end + if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then -- subtype test can go + local basechar = getchar(base) + local baseanchors = descriptions[basechar].anchors + if baseanchors then + baseanchors = baseanchors['basemark'] + if baseanchors then + local al = anchorlookups[lookupname] + for anchor,ba in next, baseanchors do + if al[anchor] then + local ma = markanchors[anchor] + if ma then + local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) + if trace_marks then + logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)", + cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) + end + return head, start, true + end + end + end + if trace_bugs then + logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar)) + end + end + end + elseif trace_bugs then + logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname)) + end + elseif trace_bugs then + logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar)) + end + elseif trace_bugs then + logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar)) + end + return head, start, false +end + +function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) + local alreadydone = cursonce and getprop(start,a_cursbase) + if not alreadydone then + local startchar = getchar(start) + local subtables = currentlookup.subtables + local lookupname = subtables[1] + local exitanchors = lookuphash[lookupname] + if exitanchors then + exitanchors = exitanchors[startchar] + end + if exitanchors then + local done = false + if marks[startchar] then + if trace_cursive then + logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar)) + end + else + local nxt = getnext(start) + while not done and nxt and getid(nxt) == glyph_code and getfont(nxt) == currentfont and getsubtype(nxt)<256 do + local nextchar = getchar(nxt) + if marks[nextchar] then + -- should not happen (maybe warning) + nxt = getnext(nxt) + else + local entryanchors = descriptions[nextchar] + if entryanchors then + entryanchors = entryanchors.anchors + if entryanchors then + entryanchors = entryanchors['centry'] + if entryanchors then + local al = anchorlookups[lookupname] + for anchor, entry in next, entryanchors do + if al[anchor] then + local exit = exitanchors[anchor] + if exit then + local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar]) + if trace_cursive then + logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode) + end + done = true + break + end + end + end + end + end + elseif trace_bugs then + -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar)) + onetimemessage(currentfont,startchar,"no entry anchors",report_fonts) + end + break + end + end + end + return head, start, done + else + if trace_cursive and trace_details then + logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone) + end + return head, start, false + end + end + return head, start, false +end + +function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence) + -- untested .. needs checking for the new model + local startchar = getchar(start) + local subtables = currentlookup.subtables + local lookupname = subtables[1] + local kerns = lookuphash[lookupname] + if kerns then + kerns = kerns[startchar] -- needed ? + if kerns then + local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar]) + if trace_kerns then + logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h) + end + end + end + return head, start, false +end + +chainmores.gpos_single = chainprocs.gpos_single -- okay? + +-- when machines become faster i will make a shared function + +function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence) + local snext = getnext(start) + if snext then + local startchar = getchar(start) + local subtables = currentlookup.subtables + local lookupname = subtables[1] + local kerns = lookuphash[lookupname] + if kerns then + kerns = kerns[startchar] + if kerns then + local lookuptype = lookuptypes[lookupname] + local prev, done = start, false + local factor = tfmdata.parameters.factor + while snext and getid(snext) == glyph_code and getfont(snext) == currentfont and getsubtype(snext)<256 do + local nextchar = getchar(snext) + local krn = kerns[nextchar] + if not krn and marks[nextchar] then + prev = snext + snext = getnext(snext) + else + if not krn then + -- skip + elseif type(krn) == "table" then + if lookuptype == "pair" then + local a, b = krn[2], krn[3] + if a and #a > 0 then + local startchar = getchar(start) + local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar]) + if trace_kerns then + logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h) + end + end + if b and #b > 0 then + local startchar = getchar(start) + local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar]) + if trace_kerns then + logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h) + end + end + else + report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname)) + local a, b = krn[2], krn[6] + if a and a ~= 0 then + local k = setkern(snext,factor,rlmode,a) + if trace_kerns then + logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar)) + end + end + if b and b ~= 0 then + logwarning("%s: ignoring second kern xoff %s",cref(kind,chainname,chainlookupname),b*factor) + end + end + done = true + elseif krn ~= 0 then + local k = setkern(snext,factor,rlmode,krn) + if trace_kerns then + logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar)) + end + done = true + end + break + end + end + return head, start, done + end + end + end + return head, start, false +end + +chainmores.gpos_pair = chainprocs.gpos_pair -- okay? + +-- what pointer to return, spec says stop +-- to be discussed ... is bidi changer a space? +-- elseif char == zwnj and sequence[n][32] then -- brrr + +-- somehow l or f is global +-- we don't need to pass the currentcontext, saves a bit +-- make a slow variant then can be activated but with more tracing + +local function show_skip(kind,chainname,char,ck,class) + if ck[9] then + logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a, %a => %a",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10]) + else + logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a",cref(kind,chainname),gref(char),class,ck[1],ck[2]) + end +end + +local quit_on_no_replacement = true + +directives.register("otf.chain.quitonnoreplacement",function(value) -- maybe per font + quit_on_no_replacement = value +end) + +local function normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash) + -- local rule, lookuptype, sequence, f, l, lookups = ck[1], ck[2] ,ck[3], ck[4], ck[5], ck[6] + local flags = sequence.flags + local done = false + local skipmark = flags[1] + local skipligature = flags[2] + local skipbase = flags[3] + local someskip = skipmark or skipligature or skipbase -- could be stored in flags for a fast test (hm, flags could be false !) + local markclass = sequence.markclass -- todo, first we need a proper test + local skipped = false + for k=1,#contexts do + local match = true + local current = start + local last = start + local ck = contexts[k] + local seq = ck[3] + local s = #seq + -- f..l = mid string + if s == 1 then + -- never happens + match = getid(current) == glyph_code and getfont(current) == currentfont and getsubtype(current)<256 and seq[1][getchar(current)] + else + -- maybe we need a better space check (maybe check for glue or category or combination) + -- we cannot optimize for n=2 because there can be disc nodes + local f, l = ck[4], ck[5] + -- current match + if f == 1 and f == l then -- current only + -- already a hit + -- match = true + else -- before/current/after | before/current | current/after + -- no need to test first hit (to be optimized) + if f == l then -- new, else last out of sync (f is > 1) + -- match = true + else + local n = f + 1 + last = getnext(last) + while n <= l do + if last then + local id = getid(last) + if id == glyph_code then + if getfont(last) == currentfont and getsubtype(last)<256 then + local char = getchar(last) + local ccd = descriptions[char] + if ccd then + local class = ccd.class + if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then + skipped = true + if trace_skips then + show_skip(kind,chainname,char,ck,class) + end + last = getnext(last) + elseif seq[n][char] then + if n < l then + last = getnext(last) + end + n = n + 1 + else + match = false + break + end + else + match = false + break + end + else + match = false + break + end + elseif id == disc_code then + last = getnext(last) + else + match = false + break + end + else + match = false + break + end + end + end + end + -- before + if match and f > 1 then + local prev = getprev(start) + if prev then + local n = f-1 + while n >= 1 do + if prev then + local id = getid(prev) + if id == glyph_code then + if getfont(prev) == currentfont and getsubtype(prev)<256 then -- normal char + local char = getchar(prev) + local ccd = descriptions[char] + if ccd then + local class = ccd.class + if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then + skipped = true + if trace_skips then + show_skip(kind,chainname,char,ck,class) + end + elseif seq[n][char] then + n = n -1 + else + match = false + break + end + else + match = false + break + end + else + match = false + break + end + elseif id == disc_code then + -- skip 'm + elseif seq[n][32] then + n = n -1 + else + match = false + break + end + prev = getprev(prev) + elseif seq[n][32] then -- somewhat special, as zapfino can have many preceding spaces + n = n -1 + else + match = false + break + end + end + elseif f == 2 then + match = seq[1][32] + else + for n=f-1,1 do + if not seq[n][32] then + match = false + break + end + end + end + end + -- after + if match and s > l then + local current = last and getnext(last) + if current then + -- removed optimization for s-l == 1, we have to deal with marks anyway + local n = l + 1 + while n <= s do + if current then + local id = getid(current) + if id == glyph_code then + if getfont(current) == currentfont and getsubtype(current)<256 then -- normal char + local char = getchar(current) + local ccd = descriptions[char] + if ccd then + local class = ccd.class + if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then + skipped = true + if trace_skips then + show_skip(kind,chainname,char,ck,class) + end + elseif seq[n][char] then + n = n + 1 + else + match = false + break + end + else + match = false + break + end + else + match = false + break + end + elseif id == disc_code then + -- skip 'm + elseif seq[n][32] then -- brrr + n = n + 1 + else + match = false + break + end + current = getnext(current) + elseif seq[n][32] then + n = n + 1 + else + match = false + break + end + end + elseif s-l == 1 then + match = seq[s][32] + else + for n=l+1,s do + if not seq[n][32] then + match = false + break + end + end + end + end + end + if match then + -- ck == currentcontext + if trace_contexts then + local rule, lookuptype, f, l = ck[1], ck[2], ck[4], ck[5] + local char = getchar(start) + if ck[9] then + logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a, %a => %a", + cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10]) + else + logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a", + cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype) + end + end + local chainlookups = ck[6] + if chainlookups then + local nofchainlookups = #chainlookups + -- we can speed this up if needed + if nofchainlookups == 1 then + local chainlookupname = chainlookups[1] + local chainlookup = lookuptable[chainlookupname] + if chainlookup then + local cp = chainprocs[chainlookup.type] + if cp then + local ok + head, start, ok = cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence) + if ok then + done = true + end + else + logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type) + end + else -- shouldn't happen + logprocess("%s is not yet supported",cref(kind,chainname,chainlookupname)) + end + else + local i = 1 + while true do + if skipped then + while true do + local char = getchar(start) + local ccd = descriptions[char] + if ccd then + local class = ccd.class + if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then + start = getnext(start) + else + break + end + else + break + end + end + end + local chainlookupname = chainlookups[i] + local chainlookup = lookuptable[chainlookupname] + if not chainlookup then + -- okay, n matches, < n replacements + i = i + 1 + else + local cp = chainmores[chainlookup.type] + if not cp then + -- actually an error + logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type) + i = i + 1 + else + local ok, n + head, start, ok, n = cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence) + -- messy since last can be changed ! + if ok then + done = true + -- skip next one(s) if ligature + i = i + (n or 1) + else + i = i + 1 + end + end + end + if i > nofchainlookups then + break + elseif start then + start = getnext(start) + else + -- weird + end + end + end + else + local replacements = ck[7] + if replacements then + head, start, done = chainprocs.reversesub(head,start,last,kind,chainname,ck,lookuphash,replacements) -- sequence + else + done = quit_on_no_replacement -- can be meant to be skipped / quite inconsistent in fonts + if trace_contexts then + logprocess("%s: skipping match",cref(kind,chainname)) + end + end + end + end + end + return head, start, done +end + +-- Because we want to keep this elsewhere (an because speed is less an issue) we +-- pass the font id so that the verbose variant can access the relevant helper tables. + +local verbose_handle_contextchain = function(font,...) + logwarning("no verbose handler installed, reverting to 'normal'") + otf.setcontextchain() + return normal_handle_contextchain(...) +end + +otf.chainhandlers = { + normal = normal_handle_contextchain, + verbose = verbose_handle_contextchain, +} + +function otf.setcontextchain(method) + if not method or method == "normal" or not otf.chainhandlers[method] then + if handlers.contextchain then -- no need for a message while making the format + logwarning("installing normal contextchain handler") + end + handlers.contextchain = normal_handle_contextchain + else + logwarning("installing contextchain handler %a",method) + local handler = otf.chainhandlers[method] + handlers.contextchain = function(...) + return handler(currentfont,...) -- hm, get rid of ... + end + end + handlers.gsub_context = handlers.contextchain + handlers.gsub_contextchain = handlers.contextchain + handlers.gsub_reversecontextchain = handlers.contextchain + handlers.gpos_contextchain = handlers.contextchain + handlers.gpos_context = handlers.contextchain +end + +otf.setcontextchain() + +local missing = { } -- we only report once + +local function logprocess(...) + if trace_steps then + registermessage(...) + end + report_process(...) +end + +local logwarning = report_process + +local function report_missing_cache(typ,lookup) + local f = missing[currentfont] if not f then f = { } missing[currentfont] = f end + local t = f[typ] if not t then t = { } f[typ] = t end + if not t[lookup] then + t[lookup] = true + logwarning("missing cache for lookup %a, type %a, font %a, name %a",lookup,typ,currentfont,tfmdata.properties.fullname) + end +end + +local resolved = { } -- we only resolve a font,script,language pair once + +-- todo: pass all these 'locals' in a table + +local lookuphashes = { } + +setmetatableindex(lookuphashes, function(t,font) + local lookuphash = fontdata[font].resources.lookuphash + if not lookuphash or not next(lookuphash) then + lookuphash = false + end + t[font] = lookuphash + return lookuphash +end) + +-- fonts.hashes.lookups = lookuphashes + +local autofeatures = fonts.analyzers.features -- was: constants + +local function initialize(sequence,script,language,enabled) + local features = sequence.features + if features then + local order = sequence.order + if order then + for i=1,#order do -- + local kind = order[i] -- + local valid = enabled[kind] + if valid then + local scripts = features[kind] -- + local languages = scripts[script] or scripts[wildcard] + if languages and (languages[language] or languages[wildcard]) then + return { valid, autofeatures[kind] or false, sequence.chain or 0, kind, sequence } + end + end + end + else + -- can't happen + end + end + return false +end + +function otf.dataset(tfmdata,font) -- generic variant, overloaded in context + local shared = tfmdata.shared + local properties = tfmdata.properties + local language = properties.language or "dflt" + local script = properties.script or "dflt" + local enabled = shared.features + local res = resolved[font] + if not res then + res = { } + resolved[font] = res + end + local rs = res[script] + if not rs then + rs = { } + res[script] = rs + end + local rl = rs[language] + if not rl then + rl = { + -- indexed but we can also add specific data by key + } + rs[language] = rl + local sequences = tfmdata.resources.sequences + for s=1,#sequences do + local v = enabled and initialize(sequences[s],script,language,enabled) + if v then + rl[#rl+1] = v + end + end + end + return rl +end + +-- elseif id == glue_code then +-- if p[5] then -- chain +-- local pc = pp[32] +-- if pc then +-- start, ok = start, false -- p[1](start,kind,p[2],pc,p[3],p[4]) +-- if ok then +-- done = true +-- end +-- if start then start = getnext(start) end +-- else +-- start = getnext(start) +-- end +-- else +-- start = getnext(start) +-- end + +-- there will be a new direction parser (pre-parsed etc) + +-- less bytecode: 290 -> 254 +-- +-- attr = attr or false +-- +-- local a = getattr(start,0) +-- if (a == attr and (not attribute or getprop(start,a_state) == attribute)) or (not attribute or getprop(start,a_state) == attribute) then +-- -- the action +-- end + +local function featuresprocessor(head,font,attr) + + local lookuphash = lookuphashes[font] -- we can also check sequences here + + if not lookuphash then + return head, false + end + + head = tonut(head) + + if trace_steps then + checkstep(head) + end + + tfmdata = fontdata[font] + descriptions = tfmdata.descriptions + characters = tfmdata.characters + resources = tfmdata.resources + + marks = resources.marks + anchorlookups = resources.lookup_to_anchor + lookuptable = resources.lookups + lookuptypes = resources.lookuptypes + lookuptags = resources.lookuptags + + currentfont = font + rlmode = 0 + + local sequences = resources.sequences + local done = false + local datasets = otf.dataset(tfmdata,font,attr) + + local dirstack = { } -- could move outside function + + -- We could work on sub start-stop ranges instead but I wonder if there is that + -- much speed gain (experiments showed that it made not much sense) and we need + -- to keep track of directions anyway. Also at some point I want to play with + -- font interactions and then we do need the full sweeps. + + -- Keeping track of the headnode is needed for devanagari (I generalized it a bit + -- so that multiple cases are also covered.) + + -- todo: retain prev + + for s=1,#datasets do + local dataset = datasets[s] + featurevalue = dataset[1] -- todo: pass to function instead of using a global + + local sequence = dataset[5] -- sequences[s] -- also dataset[5] + local rlparmode = 0 + local topstack = 0 + local success = false + local attribute = dataset[2] + local chain = dataset[3] -- sequence.chain or 0 + local typ = sequence.type + local subtables = sequence.subtables + if chain < 0 then + -- this is a limited case, no special treatments like 'init' etc + local handler = handlers[typ] + -- we need to get rid of this slide! probably no longer needed in latest luatex + local start = find_node_tail(head) -- slow (we can store tail because there's always a skip at the end): todo + while start do + local id = getid(start) + if id == glyph_code then + if getfont(start) == font and getsubtype(start) < 256 then + local a = getattr(start,0) + if a then + a = a == attr + else + a = true + end + if a then + for i=1,#subtables do + local lookupname = subtables[i] + local lookupcache = lookuphash[lookupname] + if lookupcache then + local lookupmatch = lookupcache[getchar(start)] + if lookupmatch then + head, start, success = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) + if success then + break + end + end + else + report_missing_cache(typ,lookupname) + end + end + if start then start = getprev(start) end + else + start = getprev(start) + end + else + start = getprev(start) + end + else + start = getprev(start) + end + end + else + local handler = handlers[typ] + local ns = #subtables + local start = head -- local ? + rlmode = 0 -- to be checked ? + if ns == 1 then -- happens often + local lookupname = subtables[1] + local lookupcache = lookuphash[lookupname] + if not lookupcache then -- also check for empty cache + report_missing_cache(typ,lookupname) + else + + local function subrun(start) + -- mostly for gsub, gpos would demand a more clever approach + local head = start + local done = false + while start do + local id = getid(start) + if id == glyph_code and getfont(start) == font and getsubtype(start) < 256 then + local a = getattr(start,0) + if a then + a = (a == attr) and (not attribute or getprop(start,a_state) == attribute) + else + a = not attribute or getprop(start,a_state) == attribute + end + if a then + local lookupmatch = lookupcache[getchar(start)] + if lookupmatch then + -- sequence kan weg + local ok + head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) + if ok then + done = true + end + end + if start then start = getnext(start) end + else + start = getnext(start) + end + else + start = getnext(start) + end + end + if done then + success = true + return head + end + end + + local function kerndisc(disc) -- we can assume that prev and next are glyphs + local prev = getprev(disc) + local next = getnext(disc) + if prev and next then + setfield(prev,"next",next) + -- setfield(next,"prev",prev) + local a = getattr(prev,0) + if a then + a = (a == attr) and (not attribute or getprop(prev,a_state) == attribute) + else + a = not attribute or getprop(prev,a_state) == attribute + end + if a then + local lookupmatch = lookupcache[getchar(prev)] + if lookupmatch then + -- sequence kan weg + local h, d, ok = handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) + if ok then + done = true + success = true + end + end + end + setfield(prev,"next",disc) + -- setfield(next,"prev",disc) + end + return next + end + + while start do + local id = getid(start) + if id == glyph_code then + if getfont(start) == font and getsubtype(start) < 256 then + local a = getattr(start,0) + if a then + a = (a == attr) and (not attribute or getprop(start,a_state) == attribute) + else + a = not attribute or getprop(start,a_state) == attribute + end + if a then + local lookupmatch = lookupcache[getchar(start)] + if lookupmatch then + -- sequence kan weg + local ok + head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) + if ok then + success = true + end + end + if start then start = getnext(start) end + else + start = getnext(start) + end + else + start = getnext(start) + end + elseif id == disc_code then + -- mostly for gsub + if getsubtype(start) == discretionary_code then + local pre = getfield(start,"pre") + if pre then + local new = subrun(pre) + if new then setfield(start,"pre",new) end + end + local post = getfield(start,"post") + if post then + local new = subrun(post) + if new then setfield(start,"post",new) end + end + local replace = getfield(start,"replace") + if replace then + local new = subrun(replace) + if new then setfield(start,"replace",new) end + end +elseif typ == "gpos_single" or typ == "gpos_pair" then + kerndisc(start) + end + start = getnext(start) + elseif id == whatsit_code then -- will be function + local subtype = getsubtype(start) + if subtype == dir_code then + local dir = getfield(start,"dir") + if dir == "+TRT" or dir == "+TLT" then + topstack = topstack + 1 + dirstack[topstack] = dir + elseif dir == "-TRT" or dir == "-TLT" then + topstack = topstack - 1 + end + local newdir = dirstack[topstack] + if newdir == "+TRT" then + rlmode = -1 + elseif newdir == "+TLT" then + rlmode = 1 + else + rlmode = rlparmode + end + if trace_directions then + report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir) + end + elseif subtype == localpar_code then + local dir = getfield(start,"dir") + if dir == "TRT" then + rlparmode = -1 + elseif dir == "TLT" then + rlparmode = 1 + else + rlparmode = 0 + end + -- one might wonder if the par dir should be looked at, so we might as well drop the next line + rlmode = rlparmode + if trace_directions then + report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode) + end + end + start = getnext(start) + elseif id == math_code then + start = getnext(end_of_math(start)) + else + start = getnext(start) + end + end + end + else + + local function subrun(start) + -- mostly for gsub, gpos would demand a more clever approach + local head = start + local done = false + while start do + local id = getid(start) + if id == glyph_code and getfont(start) == font and getsubtype(start) < 256 then + local a = getattr(start,0) + if a then + a = (a == attr) and (not attribute or getprop(start,a_state) == attribute) + else + a = not attribute or getprop(start,a_state) == attribute + end + if a then + for i=1,ns do + local lookupname = subtables[i] + local lookupcache = lookuphash[lookupname] + if lookupcache then + local lookupmatch = lookupcache[getchar(start)] + if lookupmatch then + -- we could move all code inline but that makes things even more unreadable + local ok + head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) + if ok then + done = true + break + elseif not start then + -- don't ask why ... shouldn't happen + break + end + end + else + report_missing_cache(typ,lookupname) + end + end + if start then start = getnext(start) end + else + start = getnext(start) + end + else + start = getnext(start) + end + end + if done then + success = true + return head + end + end + + local function kerndisc(disc) -- we can assume that prev and next are glyphs + local prev = getprev(disc) + local next = getnext(disc) + if prev and next then + setfield(prev,"next",next) + -- setfield(next,"prev",prev) + local a = getattr(prev,0) + if a then + a = (a == attr) and (not attribute or getprop(prev,a_state) == attribute) + else + a = not attribute or getprop(prev,a_state) == attribute + end + if a then + for i=1,ns do + local lookupname = subtables[i] + local lookupcache = lookuphash[lookupname] + if lookupcache then + local lookupmatch = lookupcache[getchar(prev)] + if lookupmatch then + -- we could move all code inline but that makes things even more unreadable + local h, d, ok = handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) + if ok then + done = true + break + end + end + else + report_missing_cache(typ,lookupname) + end + end + end + setfield(prev,"next",disc) + -- setfield(next,"prev",disc) + end + return next + end + + while start do + local id = getid(start) + if id == glyph_code then + if getfont(start) == font and getsubtype(start) < 256 then + local a = getattr(start,0) + if a then + a = (a == attr) and (not attribute or getprop(start,a_state) == attribute) + else + a = not attribute or getprop(start,a_state) == attribute + end + if a then + for i=1,ns do + local lookupname = subtables[i] + local lookupcache = lookuphash[lookupname] + if lookupcache then + local lookupmatch = lookupcache[getchar(start)] + if lookupmatch then + -- we could move all code inline but that makes things even more unreadable + local ok + head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) + if ok then + success = true + break + elseif not start then + -- don't ask why ... shouldn't happen + break + end + end + else + report_missing_cache(typ,lookupname) + end + end + if start then start = getnext(start) end + else + start = getnext(start) + end + else + start = getnext(start) + end + elseif id == disc_code then + -- mostly for gsub + if getsubtype(start) == discretionary_code then + local pre = getfield(start,"pre") + if pre then + local new = subrun(pre) + if new then setfield(start,"pre",new) end + end + local post = getfield(start,"post") + if post then + local new = subrun(post) + if new then setfield(start,"post",new) end + end + local replace = getfield(start,"replace") + if replace then + local new = subrun(replace) + if new then setfield(start,"replace",new) end + end +elseif typ == "gpos_single" or typ == "gpos_pair" then + kerndisc(start) + end + start = getnext(start) + elseif id == whatsit_code then + local subtype = getsubtype(start) + if subtype == dir_code then + local dir = getfield(start,"dir") + if dir == "+TRT" or dir == "+TLT" then + topstack = topstack + 1 + dirstack[topstack] = dir + elseif dir == "-TRT" or dir == "-TLT" then + topstack = topstack - 1 + end + local newdir = dirstack[topstack] + if newdir == "+TRT" then + rlmode = -1 + elseif newdir == "+TLT" then + rlmode = 1 + else + rlmode = rlparmode + end + if trace_directions then + report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir) + end + elseif subtype == localpar_code then + local dir = getfield(start,"dir") + if dir == "TRT" then + rlparmode = -1 + elseif dir == "TLT" then + rlparmode = 1 + else + rlparmode = 0 + end + rlmode = rlparmode + if trace_directions then + report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode) + end + end + start = getnext(start) + elseif id == math_code then + start = getnext(end_of_math(start)) + else + start = getnext(start) + end + end + end + end + if success then + done = true + end + if trace_steps then -- ? + registerstep(head) + end + end + + head = tonode(head) + + return head, done +end + +local function generic(lookupdata,lookupname,unicode,lookuphash) + local target = lookuphash[lookupname] + if target then + target[unicode] = lookupdata + else + lookuphash[lookupname] = { [unicode] = lookupdata } + end +end + +local action = { + + substitution = generic, + multiple = generic, + alternate = generic, + position = generic, + + ligature = function(lookupdata,lookupname,unicode,lookuphash) + local target = lookuphash[lookupname] + if not target then + target = { } + lookuphash[lookupname] = target + end + for i=1,#lookupdata do + local li = lookupdata[i] + local tu = target[li] + if not tu then + tu = { } + target[li] = tu + end + target = tu + end + target.ligature = unicode + end, + + pair = function(lookupdata,lookupname,unicode,lookuphash) + local target = lookuphash[lookupname] + if not target then + target = { } + lookuphash[lookupname] = target + end + local others = target[unicode] + local paired = lookupdata[1] + if others then + others[paired] = lookupdata + else + others = { [paired] = lookupdata } + target[unicode] = others + end + end, + +} + +local function prepare_lookups(tfmdata) + + local rawdata = tfmdata.shared.rawdata + local resources = rawdata.resources + local lookuphash = resources.lookuphash + local anchor_to_lookup = resources.anchor_to_lookup + local lookup_to_anchor = resources.lookup_to_anchor + local lookuptypes = resources.lookuptypes + local characters = tfmdata.characters + local descriptions = tfmdata.descriptions + + -- we cannot free the entries in the descriptions as sometimes we access + -- then directly (for instance anchors) ... selectively freeing does save + -- much memory as it's only a reference to a table and the slot in the + -- description hash is not freed anyway + + for unicode, character in next, characters do -- we cannot loop over descriptions ! + + local description = descriptions[unicode] + + if description then + + local lookups = description.slookups + if lookups then + for lookupname, lookupdata in next, lookups do + action[lookuptypes[lookupname]](lookupdata,lookupname,unicode,lookuphash) + end + end + + local lookups = description.mlookups + if lookups then + for lookupname, lookuplist in next, lookups do + local lookuptype = lookuptypes[lookupname] + for l=1,#lookuplist do + local lookupdata = lookuplist[l] + action[lookuptype](lookupdata,lookupname,unicode,lookuphash) + end + end + end + + local list = description.kerns + if list then + for lookup, krn in next, list do -- ref to glyph, saves lookup + local target = lookuphash[lookup] + if target then + target[unicode] = krn + else + lookuphash[lookup] = { [unicode] = krn } + end + end + end + + local list = description.anchors + if list then + for typ, anchors in next, list do -- types + if typ == "mark" or typ == "cexit" then -- or entry? + for name, anchor in next, anchors do + local lookups = anchor_to_lookup[name] + if lookups then + for lookup, _ in next, lookups do + local target = lookuphash[lookup] + if target then + target[unicode] = anchors + else + lookuphash[lookup] = { [unicode] = anchors } + end + end + end + end + end + end + end + + end + + end + +end + +local function split(replacement,original) + local result = { } + for i=1,#replacement do + result[original[i]] = replacement[i] + end + return result +end + +local valid = { + coverage = { chainsub = true, chainpos = true, contextsub = true }, + reversecoverage = { reversesub = true }, + glyphs = { chainsub = true, chainpos = true }, +} + +local function prepare_contextchains(tfmdata) + local rawdata = tfmdata.shared.rawdata + local resources = rawdata.resources + local lookuphash = resources.lookuphash + local lookuptags = resources.lookuptags + local lookups = rawdata.lookups + if lookups then + for lookupname, lookupdata in next, rawdata.lookups do + local lookuptype = lookupdata.type + if lookuptype then + local rules = lookupdata.rules + if rules then + local format = lookupdata.format + local validformat = valid[format] + if not validformat then + report_prepare("unsupported format %a",format) + elseif not validformat[lookuptype] then + -- todo: dejavu-serif has one (but i need to see what use it has) + report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookuptags[lookupname]) + else + local contexts = lookuphash[lookupname] + if not contexts then + contexts = { } + lookuphash[lookupname] = contexts + end + local t, nt = { }, 0 + for nofrules=1,#rules do + local rule = rules[nofrules] + local current = rule.current + local before = rule.before + local after = rule.after + local replacements = rule.replacements + local sequence = { } + local nofsequences = 0 + -- Eventually we can store start, stop and sequence in the cached file + -- but then less sharing takes place so best not do that without a lot + -- of profiling so let's forget about it. + if before then + for n=1,#before do + nofsequences = nofsequences + 1 + sequence[nofsequences] = before[n] + end + end + local start = nofsequences + 1 + for n=1,#current do + nofsequences = nofsequences + 1 + sequence[nofsequences] = current[n] + end + local stop = nofsequences + if after then + for n=1,#after do + nofsequences = nofsequences + 1 + sequence[nofsequences] = after[n] + end + end + if sequence[1] then + -- Replacements only happen with reverse lookups as they are single only. We + -- could pack them into current (replacement value instead of true) and then + -- use sequence[start] instead but it's somewhat ugly. + nt = nt + 1 + t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups, replacements } + for unic, _ in next, sequence[start] do + local cu = contexts[unic] + if not cu then + contexts[unic] = t + end + end + end + end + end + else + -- no rules + end + else + report_prepare("missing lookuptype for lookupname %a",lookuptags[lookupname]) + end + end + end +end + +-- we can consider lookuphash == false (initialized but empty) vs lookuphash == table + +local function featuresinitializer(tfmdata,value) + if true then -- value then + -- beware we need to use the topmost properties table + local rawdata = tfmdata.shared.rawdata + local properties = rawdata.properties + if not properties.initialized then + local starttime = trace_preparing and os.clock() + local resources = rawdata.resources + resources.lookuphash = resources.lookuphash or { } + prepare_contextchains(tfmdata) + prepare_lookups(tfmdata) + properties.initialized = true + if trace_preparing then + report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,tfmdata.properties.fullname) + end + end + end +end + +registerotffeature { + name = "features", + description = "features", + default = true, + initializers = { + position = 1, + node = featuresinitializer, + }, + processors = { + node = featuresprocessor, + } +} + +-- This can be used for extra handlers, but should be used with care! + +otf.handlers = handlers diff --git a/src/fontloader/misc/fontloader-fonts-syn.lua b/src/fontloader/misc/fontloader-fonts-syn.lua new file mode 100644 index 0000000..f03d558 --- /dev/null +++ b/src/fontloader/misc/fontloader-fonts-syn.lua @@ -0,0 +1,106 @@ +if not modules then modules = { } end modules ['luatex-fonts-syn'] = { + version = 1.001, + comment = "companion to luatex-*.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end + +-- Generic font names support. +-- +-- Watch out, the version number is the same as the one used in +-- the mtx-fonts.lua function scripts.fonts.names as we use a +-- simplified font database in the plain solution and by using +-- a different number we're less dependent on context. +-- +-- mtxrun --script font --reload --simple +-- +-- The format of the file is as follows: +-- +-- return { +-- ["version"] = 1.001, +-- ["cache_version"] = 1.001, +-- ["mappings"] = { +-- ["somettcfontone"] = { "Some TTC Font One", "SomeFontA.ttc", 1 }, +-- ["somettcfonttwo"] = { "Some TTC Font Two", "SomeFontA.ttc", 2 }, +-- ["somettffont"] = { "Some TTF Font", "SomeFontB.ttf" }, +-- ["someotffont"] = { "Some OTF Font", "SomeFontC.otf" }, +-- }, +-- } + +local fonts = fonts +fonts.names = fonts.names or { } + +fonts.names.version = 1.001 -- not the same as in context but matches mtx-fonts --simple +fonts.names.basename = "luatex-fonts-names" +fonts.names.new_to_old = { } +fonts.names.old_to_new = { } +fonts.names.cache = containers.define("fonts","data",fonts.names.version,true) + +local data, loaded = nil, false + +local fileformats = { "lua", "tex", "other text files" } + +function fonts.names.reportmissingbase() + texio.write("") + fonts.names.reportmissingbase = nil +end + +function fonts.names.reportmissingname() + texio.write("") + fonts.names.reportmissingname = nil +end + +function fonts.names.resolve(name,sub) + if not loaded then + local basename = fonts.names.basename + if basename and basename ~= "" then + data = containers.read(fonts.names.cache,basename) + if not data then + basename = file.addsuffix(basename,"lua") + for i=1,#fileformats do + local format = fileformats[i] + local foundname = resolvers.findfile(basename,format) or "" + if foundname ~= "" then + data = dofile(foundname) + texio.write("") + break + end + end + end + end + loaded = true + end + if type(data) == "table" and data.version == fonts.names.version then + local condensed = string.gsub(string.lower(name),"[^%a%d]","") + local found = data.mappings and data.mappings[condensed] + if found then + local fontname, filename, subfont = found[1], found[2], found[3] + if subfont then + return filename, fontname + else + return filename, false + end + elseif fonts.names.reportmissingname then + fonts.names.reportmissingname() + return name, false -- fallback to filename + end + elseif fonts.names.reportmissingbase then + fonts.names.reportmissingbase() + end +end + +fonts.names.resolvespec = fonts.names.resolve -- only supported in mkiv + +function fonts.names.getfilename(askedname,suffix) -- only supported in mkiv + return "" +end + +function fonts.names.ignoredfile(filename) -- only supported in mkiv + return false -- will be overloaded +end diff --git a/src/fontloader/misc/fontloader-fonts-tfm.lua b/src/fontloader/misc/fontloader-fonts-tfm.lua new file mode 100644 index 0000000..b9bb1bd --- /dev/null +++ b/src/fontloader/misc/fontloader-fonts-tfm.lua @@ -0,0 +1,38 @@ +if not modules then modules = { } end modules ['luatex-fonts-tfm'] = { + version = 1.001, + comment = "companion to luatex-*.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end + +local fonts = fonts +local tfm = { } +fonts.handlers.tfm = tfm +fonts.formats.tfm = "type1" -- we need to have at least a value here + +function fonts.readers.tfm(specification) + local fullname = specification.filename or "" + if fullname == "" then + local forced = specification.forced or "" + if forced ~= "" then + fullname = specification.name .. "." .. forced + else + fullname = specification.name + end + end + local foundname = resolvers.findbinfile(fullname, 'tfm') or "" + if foundname == "" then + foundname = resolvers.findbinfile(fullname, 'ofm') or "" + end + if foundname ~= "" then + specification.filename = foundname + specification.format = "ofm" + return font.read_tfm(specification.filename,specification.size) + end +end diff --git a/src/fontloader/misc/fontloader-fonts.lua b/src/fontloader/misc/fontloader-fonts.lua new file mode 100644 index 0000000..678a283 --- /dev/null +++ b/src/fontloader/misc/fontloader-fonts.lua @@ -0,0 +1,275 @@ +if not modules then modules = { } end modules ['luatex-fonts'] = { + version = 1.001, + comment = "companion to luatex-fonts.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- The following code isolates the generic context code from already defined or to be defined +-- namespaces. This is the reference loader for plain tex. This generic code is also used in +-- luaotfload which is a low level lualatex opentype font loader but somehow has gotten a bit +-- too generic name / prefix, originally set up and maintained by Khaled Hosny. Currently that +-- set of derived files is maintained by a larger team lead by Philipp Gesang so when there are +-- issues with this code in latex, you can best contact him. It might make sense then to first +-- check if context has the same issue. We do our best to keep the interface as clean as possible. +-- +-- The code base is rather stable now, especially if you stay away from the non generic code. All +-- relevant data is organized in tables within the main table of a font instance. There are a few +-- places where in context other code is plugged in, but this does not affect the core code. Users +-- can (given that their macro package provides this option) access the font data (characters, +-- descriptions, properties, parameters, etc) of this main table. The documentation is part of +-- context. There is also a manual for the helper libraries (maintained as part of the cld manuals). +-- +-- Future versions will probably have some more specific context code removed, like tracing and +-- obscure hooks, so that we have a more efficient version (and less files too). So, don't depend +-- too much on low level code that is meant for context as it can change without notice. We might +-- also add more helper code here, but that depends to what extend metatex (sidetrack of context) +-- evolves into a low level layer (depends on time, as usual). + +texio.write_nl("") +texio.write_nl("--------------------------------------------------------------------------------") +texio.write_nl("The font code has been brought in sync with the context version of 2014.12.01 so") +texio.write_nl("if things don't work out as expected the interfacing needs to be checked. When") +texio.write_nl("this works as expected a second upgrade will happen that gives a more complete") +texio.write_nl("support and another sync with the context code (that new code is currently being") +texio.write_nl("tested. The base pass is now integrated in the main pass.") +texio.write_nl("--------------------------------------------------------------------------------") +texio.write_nl("") + +utf = utf or unicode.utf8 + +-- We have some (global) hooks (for latex): + +if not non_generic_context then + non_generic_context = { } +end + +if not non_generic_context.luatex_fonts then + non_generic_context.luatex_fonts = { + -- load_before = nil, + -- load_after = nil, + -- skip_loading = nil, + } +end + +if not generic_context then + generic_context = { } +end + +if not generic_context.push_namespaces then + + function generic_context.push_namespaces() + texio.write(" ") + local normalglobal = { } + for k, v in next, _G do + normalglobal[k] = v + end + return normalglobal + end + + function generic_context.pop_namespaces(normalglobal,isolate) + if normalglobal then + texio.write(" ") + for k, v in next, _G do + if not normalglobal[k] then + generic_context[k] = v + if isolate then + _G[k] = nil + end + end + end + for k, v in next, normalglobal do + _G[k] = v + end + -- just to be sure: + setmetatable(generic_context,_G) + else + texio.write(" ") + os.exit() + end + end + +end + +local whatever = generic_context.push_namespaces() + +-- We keep track of load time by storing the current time. That way we cannot be accused +-- of slowing down loading too much. Anyhow, there is no reason for this library to perform +-- slower in any other package as it does in context. +-- +-- Please don't update to this version without proper testing. It might be that this version +-- lags behind stock context and the only formal release takes place around tex live code +-- freeze. + +local starttime = os.gettimeofday() + +-- As we don't use the context file searching, we need to initialize the kpse library. As the +-- progname can be anything we will temporary switch to the context namespace if needed. Just +-- adding the context paths to the path specification is somewhat faster. +-- +-- Now, with lua 5.2 being used we might create a special ENV for this. + +-- kpse.set_program_name("luatex") + +local ctxkpse = nil +local verbose = true + +local function loadmodule(name,continue) + local foundname = kpse.find_file(name,"tex") or "" + if not foundname then + if not ctxkpse then + ctxkpse = kpse.new("luatex","context") + end + foundname = ctxkpse:find_file(name,"tex") or "" + end + if foundname == "" then + if not continue then + texio.write_nl(string.format(" ",name)) + os.exit() + end + else + if verbose then + texio.write(string.format(" <%s>",foundname)) -- no file.basename yet + end + dofile(foundname) + end +end + +if non_generic_context.luatex_fonts.load_before then + loadmodule(non_generic_context.luatex_fonts.load_before,true) +end + +if non_generic_context.luatex_fonts.skip_loading ~= true then + + loadmodule('luatex-fonts-merged.lua',true) + + if fonts then + + if not fonts._merge_loaded_message_done_ then + texio.write_nl("log", "!") + texio.write_nl("log", "! I am using the merged version of 'luatex-fonts.lua' here. If") + texio.write_nl("log", "! you run into problems or experience unexpected behaviour, and") + texio.write_nl("log", "! if you have ConTeXt installed you can try to delete the file") + texio.write_nl("log", "! 'luatex-font-merged.lua' as I might then use the possibly") + texio.write_nl("log", "! updated libraries. The merged version is not supported as it") + texio.write_nl("log", "! is a frozen instance. Problems can be reported to the ConTeXt") + texio.write_nl("log", "! mailing list.") + texio.write_nl("log", "!") + end + + fonts._merge_loaded_message_done_ = true + + else + + -- The following helpers are a bit overkill but I don't want to mess up context code for the + -- sake of general generality. Around version 1.0 there will be an official api defined. + -- + -- So, I will strip these libraries and see what is really needed so that we don't have this + -- overhead in the generic modules. The next section is only there for the packager, so stick + -- to using luatex-fonts with luatex-fonts-merged.lua and forget about the rest. The following + -- list might change without prior notice (for instance because we shuffled code around). + + loadmodule("l-lua.lua") + loadmodule("l-lpeg.lua") + loadmodule("l-function.lua") + loadmodule("l-string.lua") + loadmodule("l-table.lua") + loadmodule("l-io.lua") + loadmodule("l-file.lua") + loadmodule("l-boolean.lua") + loadmodule("l-math.lua") + loadmodule("util-str.lua") + + -- The following modules contain code that is either not used at all outside context or will fail + -- when enabled due to lack of other modules. + + -- First we load a few helper modules. This is about the miminum needed to let the font modules do + -- their work. Don't depend on their functions as we might strip them in future versions of his + -- generic variant. + + loadmodule('luatex-basics-gen.lua') + loadmodule('data-con.lua') + + -- We do need some basic node support. The code in there is not for general use as it might change. + + loadmodule('luatex-basics-nod.lua') + + -- Now come the font modules that deal with traditional tex fonts as well as open type fonts. We only + -- support OpenType fonts here. + -- + -- The font database file (if used at all) must be put someplace visible for kpse and is not shared + -- with context. The mtx-fonts script can be used to genate this file (using the --names option). + + -- In 2013/14 I will merge/move some generic files into luatex-fonts-* files (copies) so that + -- intermediate updates of context don't interfere. We can then also use the general merger and + -- consider stripping debug code. + + loadmodule('font-ini.lua') + loadmodule('font-con.lua') + loadmodule('luatex-fonts-enc.lua') -- will load font-age on demand + loadmodule('font-cid.lua') + loadmodule('font-map.lua') -- for loading lum file (will be stripped) + loadmodule('luatex-fonts-syn.lua') -- deals with font names (synonyms) + -- begin of test + loadmodule('font-tfm.lua') -- optional + loadmodule('font-afm.lua') -- optional + loadmodule('font-afk.lua') -- optional + -- end of test + loadmodule('luatex-fonts-tfm.lua') + loadmodule('font-oti.lua') + loadmodule('font-otf.lua') + loadmodule('font-otb.lua') + loadmodule('luatex-fonts-inj.lua') + loadmodule('luatex-fonts-ota.lua') + loadmodule('luatex-fonts-otn.lua') + loadmodule('font-otp.lua') + loadmodule('luatex-fonts-lua.lua') + loadmodule('font-def.lua') -- this code (stripped) might end up in luatex-fonts-def.lua + loadmodule('luatex-fonts-def.lua') + loadmodule('luatex-fonts-ext.lua') -- some extensions + + -- We need to plug into a callback and the following module implements the handlers. Actual plugging + -- in happens later. + + loadmodule('luatex-fonts-cbk.lua') + + end + +end + +if non_generic_context.luatex_fonts.load_after then + loadmodule(non_generic_context.luatex_fonts.load_after,true) +end + +resolvers.loadmodule = loadmodule + +-- In order to deal with the fonts we need to initialize some callbacks. One can overload them later on if +-- needed. First a bit of abstraction. + +generic_context.callback_ligaturing = false +generic_context.callback_kerning = false +generic_context.callback_pre_linebreak_filter = nodes.simple_font_handler +generic_context.callback_hpack_filter = nodes.simple_font_handler +generic_context.callback_define_font = fonts.definers.read + +-- The next ones can be done at a different moment if needed. You can create a generic_context namespace +-- and set no_callbacks_yet to true, load this module, and enable the callbacks later. So, there is really +-- *no* need to create a alternative for luatex-fonts.lua and luatex-fonts-merged.lua: just load this one +-- and overload if needed. + +if not generic_context.no_callbacks_yet then + + -- callback.register('ligaturing', generic_context.callback_ligaturing) + -- callback.register('kerning', generic_context.callback_kerning) + callback.register('pre_linebreak_filter', generic_context.callback_pre_linebreak_filter) + callback.register('hpack_filter', generic_context.callback_hpack_filter) + callback.register('define_font' , generic_context.callback_define_font) + +end + +-- We're done. + +texio.write(string.format(" ", os.gettimeofday()-starttime)) + +generic_context.pop_namespaces(whatever) diff --git a/src/fontloader/misc/fontloader-fonts.tex b/src/fontloader/misc/fontloader-fonts.tex new file mode 100644 index 0000000..7b457e9 --- /dev/null +++ b/src/fontloader/misc/fontloader-fonts.tex @@ -0,0 +1,140 @@ +%D \module +%D [ file=luatex-fonts, +%D version=2009.12.01, +%D title=\LUATEX\ Support Macros, +%D subtitle=Generic \OPENTYPE\ Font Handler, +%D author=Hans Hagen, +%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}] + +%D \subject{Welcome} +%D +%D This file is one of a set of basic functionality enhancements +%D for \LUATEX\ derived from the \CONTEXT\ \MKIV\ code base. Please +%D don't polute the \type {luatex-*} namespace with code not coming +%D from the \CONTEXT\ development team as we may add more files. +%D +%D As this is an experimental setup, it might not always work out as +%D expected. Around \LUATEX\ version 0.50 we expect the code to be +%D more or less okay. +%D +%D This file implements a basic font system for a bare \LUATEX\ +%D system. By default \LUATEX\ only knows about the classic \TFM\ +%D fonts but it can read other font formats and pass them to \LUA. +%D With some glue code one can then construct a suitable \TFM\ +%D representation that \LUATEX\ can work with. For more advanced font +%D support a bit more code is needed that needs to be hooked +%D into the callback mechanism. +%D +%D This file is currently rather simple: it just loads the \LUA\ file +%D with the same name. An example of a \type {luatex.tex} file that is +%D just plain \TEX: +%D +%D \starttyping +%D \catcode`\{=1 % left brace is begin-group character +%D \catcode`\}=2 % right brace is end-group character +%D +%D \input plain +%D +%D \everyjob\expandafter{\the\everyjob\input luatex-fonts\relax} +%D +%D \dump +%D \stoptyping +%D +%D We could load the \LUA\ file in \type {\everyjob} but maybe some +%D day we need more here. +%D +%D When defining a font you can use two prefixes. A \type {file:} +%D prefix forced a file search, while a \type {name:} prefix will +%D result in consulting the names database. Such a database can be +%D generated with: +%D +%D \starttyping +%D mtxrun --usekpse --script fonts --names +%D \stoptyping +%D +%D This will generate a file \type {luatex-fonts-names.lua} that has +%D to be placed in a location where it can be found by \KPSE. Beware: +%D the \type {--kpseonly} flag is only used outside \CONTEXT\ and +%D provides very limited functionality, just enough for this task. +%D +%D The code loaded here does not come out of thin air, but is mostly +%D shared with \CONTEXT, however, in that macropackage we go beyond +%D what is provided here. When you use the code packaged here you +%D need to keep a few things in mind: +%D +%D \startitemize +%D +%D \item This subsystem will be extended, improved etc. in about the +%D same pace as \CONTEXT\ \MKIV. However, because \CONTEXT\ provides a +%D rather high level of integration not all features will be supported +%D in the same quality. Use \CONTEXT\ if you want more goodies. +%D +%D \item There is no official \API\ yet, which means that using +%D functions implemented here is at your own risk, in the sense that +%D names and namespaces might change. There will be a minimal \API\ +%D defined once \LUATEX\ version 1.0 is out. Instead of patching the +%D files it's better to overload functions if needed. +%D +%D \item The modules are not stripped too much, which makes it +%D possible to benefit from improvements in the code that take place +%D in the perspective of \CONTEXT\ development. They might be split a +%D bit more in due time so the baseline might become smaller. +%D +%D \item The code is maintained and tested by the \CONTEXT\ +%D development team. As such it might be better suited for this macro +%D package and integration in other systems might demand some +%D additional wrapping. Problems can be reported to the team but as we +%D use \CONTEXT\ \MKIV\ as baseline, you'd better check if the problem +%D is a general \CONTEXT\ problem too. +%D +%D \item The more high level support for features that is provided in +%D \CONTEXT\ is not part of the code loaded here as it makes no sense +%D elsewhere. Some experimental features are not part of this code +%D either but some might show up later. +%D +%D \item Math font support will be added but only in its basic form +%D once that the Latin Modern and \TEX\ Gyre math fonts are +%D available. +%D +%D \item At this moment the more nifty speed-ups are not enabled +%D because they work in tandem with the alternative file handling +%D that \CONTEXT\ uses. Maybe around \LUATEX\ 1.0 we will bring some +%D speedup into this code too (if it pays off at all). +%D +%D \item The code defines a few global tables. If this code is used +%D in a larger perspective then you can best make sure that no +%D conflicts occur. The \CONTEXT\ package expects users to work in +%D their own namespace (\type {userdata}, \type {thirddata}, \type +%D {moduledata} or \type {document}. The team takes all freedom to +%D use any table at the global level but will not use tables that are +%D named after macro packages. Later the \CONTEXT\ might operate in +%D a more controlled namespace but it has a low priority. +%D +%D \item There is some tracing code present but this is not enabled +%D and not supported outside \CONTEXT\ either as it integrates quite +%D tightly into \CONTEXT. In case of problems you can use \CONTEXT\ +%D for tracking down problems. +%D +%D \item Patching the code in distributions is dangerous as it might +%D fix your problem but introduce new ones for \CONTEXT. So, best keep +%D the original code as it is. +%D +%D \item Attributes are (automatically) taken from the range 127-255 so +%D you'd best not use these yourself. +%D +%D \stopitemize +%D +%D If this all sounds a bit tricky, keep in mind that it makes no sense +%D for us to maintain multiple code bases and we happen to use \CONTEXT. +%D +%D For more details about how the font subsystem works we refer to +%D publications in \TEX\ related journals, the \CONTEXT\ documentation, +%D and the \CONTEXT\ wiki. + +\directlua { + if not fonts then + dofile(kpse.find_file("luatex-fonts.lua","tex")) + end +} + +\endinput diff --git a/src/fontloader/misc/fontloader-l-boolean.lua b/src/fontloader/misc/fontloader-l-boolean.lua new file mode 100644 index 0000000..8f18d4c --- /dev/null +++ b/src/fontloader/misc/fontloader-l-boolean.lua @@ -0,0 +1,69 @@ +if not modules then modules = { } end modules ['l-boolean'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local type, tonumber = type, tonumber + +boolean = boolean or { } +local boolean = boolean + +function boolean.tonumber(b) + if b then return 1 else return 0 end -- test and return or return +end + +function toboolean(str,tolerant) -- global + if str == nil then + return false + elseif str == false then + return false + elseif str == true then + return true + elseif str == "true" then + return true + elseif str == "false" then + return false + elseif not tolerant then + return false + elseif str == 0 then + return false + elseif (tonumber(str) or 0) > 0 then + return true + else + return str == "yes" or str == "on" or str == "t" + end +end + +string.toboolean = toboolean + +function string.booleanstring(str) + if str == "0" then + return false + elseif str == "1" then + return true + elseif str == "" then + return false + elseif str == "false" then + return false + elseif str == "true" then + return true + elseif (tonumber(str) or 0) > 0 then + return true + else + return str == "yes" or str == "on" or str == "t" + end +end + +function string.is_boolean(str,default,strict) + if type(str) == "string" then + if str == "true" or str == "yes" or str == "on" or str == "t" or (not strict and str == "1") then + return true + elseif str == "false" or str == "no" or str == "off" or str == "f" or (not strict and str == "0") then + return false + end + end + return default +end diff --git a/src/fontloader/misc/fontloader-l-file.lua b/src/fontloader/misc/fontloader-l-file.lua new file mode 100644 index 0000000..2742e99 --- /dev/null +++ b/src/fontloader/misc/fontloader-l-file.lua @@ -0,0 +1,691 @@ +if not modules then modules = { } end modules ['l-file'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- needs a cleanup + +file = file or { } +local file = file + +if not lfs then + lfs = optionalrequire("lfs") +end + +if not lfs then + + lfs = { + getcurrentdir = function() + return "." + end, + attributes = function() + return nil + end, + isfile = function(name) + local f = io.open(name,'rb') + if f then + f:close() + return true + end + end, + isdir = function(name) + print("you need to load lfs") + return false + end + } + +elseif not lfs.isfile then + + local attributes = lfs.attributes + + function lfs.isdir(name) + return attributes(name,"mode") == "directory" + end + + function lfs.isfile(name) + return attributes(name,"mode") == "file" + end + + -- function lfs.isdir(name) + -- local a = attributes(name) + -- return a and a.mode == "directory" + -- end + + -- function lfs.isfile(name) + -- local a = attributes(name) + -- return a and a.mode == "file" + -- end + +end + +local insert, concat = table.insert, table.concat +local match, find, gmatch = string.match, string.find, string.gmatch +local lpegmatch = lpeg.match +local getcurrentdir, attributes = lfs.currentdir, lfs.attributes +local checkedsplit = string.checkedsplit + +-- local patterns = file.patterns or { } +-- file.patterns = patterns + +local P, R, S, C, Cs, Cp, Cc, Ct = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cs, lpeg.Cp, lpeg.Cc, lpeg.Ct + +local colon = P(":") +local period = P(".") +local periods = P("..") +local fwslash = P("/") +local bwslash = P("\\") +local slashes = S("\\/") +local noperiod = 1-period +local noslashes = 1-slashes +local name = noperiod^1 +local suffix = period/"" * (1-period-slashes)^1 * -1 + +----- pattern = C((noslashes^0 * slashes^1)^1) +local pattern = C((1 - (slashes^1 * noslashes^1 * -1))^1) * P(1) -- there must be a more efficient way + +local function pathpart(name,default) + return name and lpegmatch(pattern,name) or default or "" +end + +local pattern = (noslashes^0 * slashes)^1 * C(noslashes^1) * -1 + +local function basename(name) + return name and lpegmatch(pattern,name) or name +end + +-- print(pathpart("file")) +-- print(pathpart("dir/file")) +-- print(pathpart("/dir/file")) +-- print(basename("file")) +-- print(basename("dir/file")) +-- print(basename("/dir/file")) + +local pattern = (noslashes^0 * slashes^1)^0 * Cs((1-suffix)^1) * suffix^0 + +local function nameonly(name) + return name and lpegmatch(pattern,name) or name +end + +local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * C(noperiod^1) * -1 + +local function suffixonly(name) + return name and lpegmatch(pattern,name) or "" +end + +local pattern = (noslashes^0 * slashes)^0 * noperiod^1 * ((period * C(noperiod^1))^1) * -1 + Cc("") + +local function suffixesonly(name) + if name then + return lpegmatch(pattern,name) + else + return "" + end +end + +file.pathpart = pathpart +file.basename = basename +file.nameonly = nameonly +file.suffixonly = suffixonly +file.suffix = suffixonly +file.suffixesonly = suffixesonly +file.suffixes = suffixesonly + +file.dirname = pathpart -- obsolete +file.extname = suffixonly -- obsolete + +-- actually these are schemes + +local drive = C(R("az","AZ")) * colon +local path = C((noslashes^0 * slashes)^0) +local suffix = period * C(P(1-period)^0 * P(-1)) +local base = C((1-suffix)^0) +local rest = C(P(1)^0) + +drive = drive + Cc("") +path = path + Cc("") +base = base + Cc("") +suffix = suffix + Cc("") + +local pattern_a = drive * path * base * suffix +local pattern_b = path * base * suffix +local pattern_c = C(drive * path) * C(base * suffix) -- trick: two extra captures +local pattern_d = path * rest + +function file.splitname(str,splitdrive) + if not str then + -- error + elseif splitdrive then + return lpegmatch(pattern_a,str) -- returns drive, path, base, suffix + else + return lpegmatch(pattern_b,str) -- returns path, base, suffix + end +end + +function file.splitbase(str) + if str then + return lpegmatch(pattern_d,str) -- returns path, base+suffix (path has / appended, might change at some point) + else + return "", str -- assume no path + end +end + +---- stripslash = C((1 - P("/")^1*P(-1))^0) + +function file.nametotable(str,splitdrive) + if str then + local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str) + -- if path ~= "" then + -- path = lpegmatch(stripslash,path) -- unfortunate hack, maybe this becomes default + -- end + if splitdrive then + return { + path = path, + drive = drive, + subpath = subpath, + name = name, + base = base, + suffix = suffix, + } + else + return { + path = path, + name = name, + base = base, + suffix = suffix, + } + end + end +end + +-- print(file.splitname("file")) +-- print(file.splitname("dir/file")) +-- print(file.splitname("/dir/file")) +-- print(file.splitname("file")) +-- print(file.splitname("dir/file")) +-- print(file.splitname("/dir/file")) + +-- inspect(file.nametotable("file.ext")) +-- inspect(file.nametotable("dir/file.ext")) +-- inspect(file.nametotable("/dir/file.ext")) +-- inspect(file.nametotable("file.ext")) +-- inspect(file.nametotable("dir/file.ext")) +-- inspect(file.nametotable("/dir/file.ext")) + +----- pattern = Cs(((period * noperiod^1 * -1) / "" + 1)^1) +local pattern = Cs(((period * (1-period-slashes)^1 * -1) / "" + 1)^1) + +function file.removesuffix(name) + return name and lpegmatch(pattern,name) +end + +-- local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * Cp() * noperiod^1 * -1 +-- +-- function file.addsuffix(name, suffix) +-- local p = lpegmatch(pattern,name) +-- if p then +-- return name +-- else +-- return name .. "." .. suffix +-- end +-- end + +local suffix = period/"" * (1-period-slashes)^1 * -1 +local pattern = Cs((noslashes^0 * slashes^1)^0 * ((1-suffix)^1)) * Cs(suffix) + +function file.addsuffix(filename,suffix,criterium) + if not filename or not suffix or suffix == "" then + return filename + elseif criterium == true then + return filename .. "." .. suffix + elseif not criterium then + local n, s = lpegmatch(pattern,filename) + if not s or s == "" then + return filename .. "." .. suffix + else + return filename + end + else + local n, s = lpegmatch(pattern,filename) + if s and s ~= "" then + local t = type(criterium) + if t == "table" then + -- keep if in criterium + for i=1,#criterium do + if s == criterium[i] then + return filename + end + end + elseif t == "string" then + -- keep if criterium + if s == criterium then + return filename + end + end + end + return (n or filename) .. "." .. suffix + end +end + +-- print("1 " .. file.addsuffix("name","new") .. " -> name.new") +-- print("2 " .. file.addsuffix("name.old","new") .. " -> name.old") +-- print("3 " .. file.addsuffix("name.old","new",true) .. " -> name.old.new") +-- print("4 " .. file.addsuffix("name.old","new","new") .. " -> name.new") +-- print("5 " .. file.addsuffix("name.old","new","old") .. " -> name.old") +-- print("6 " .. file.addsuffix("name.old","new","foo") .. " -> name.new") +-- print("7 " .. file.addsuffix("name.old","new",{"foo","bar"}) .. " -> name.new") +-- print("8 " .. file.addsuffix("name.old","new",{"old","bar"}) .. " -> name.old") + +local suffix = period * (1-period-slashes)^1 * -1 +local pattern = Cs((1-suffix)^0) + +function file.replacesuffix(name,suffix) + if name and suffix and suffix ~= "" then + return lpegmatch(pattern,name) .. "." .. suffix + else + return name + end +end + +-- + +local reslasher = lpeg.replacer(P("\\"),"/") + +function file.reslash(str) + return str and lpegmatch(reslasher,str) +end + +-- We should be able to use: +-- +-- local writable = P(1) * P("w") * Cc(true) +-- +-- function file.is_writable(name) +-- local a = attributes(name) or attributes(pathpart(name,".")) +-- return a and lpegmatch(writable,a.permissions) or false +-- end +-- +-- But after some testing Taco and I came up with the more robust +-- variant: + +function file.is_writable(name) + if not name then + -- error + elseif lfs.isdir(name) then + name = name .. "/m_t_x_t_e_s_t.tmp" + local f = io.open(name,"wb") + if f then + f:close() + os.remove(name) + return true + end + elseif lfs.isfile(name) then + local f = io.open(name,"ab") + if f then + f:close() + return true + end + else + local f = io.open(name,"ab") + if f then + f:close() + os.remove(name) + return true + end + end + return false +end + +local readable = P("r") * Cc(true) + +function file.is_readable(name) + if name then + local a = attributes(name) + return a and lpegmatch(readable,a.permissions) or false + else + return false + end +end + +file.isreadable = file.is_readable -- depricated +file.iswritable = file.is_writable -- depricated + +function file.size(name) + if name then + local a = attributes(name) + return a and a.size or 0 + else + return 0 + end +end + +function file.splitpath(str,separator) -- string .. reslash is a bonus (we could do a direct split) + return str and checkedsplit(lpegmatch(reslasher,str),separator or io.pathseparator) +end + +function file.joinpath(tab,separator) -- table + return tab and concat(tab,separator or io.pathseparator) -- can have trailing // +end + +local someslash = S("\\/") +local stripper = Cs(P(fwslash)^0/"" * reslasher) +local isnetwork = someslash * someslash * (1-someslash) + + (1-fwslash-colon)^1 * colon +local isroot = fwslash^1 * -1 +local hasroot = fwslash^1 + +local reslasher = lpeg.replacer(S("\\/"),"/") +local deslasher = lpeg.replacer(S("\\/")^1,"/") + +-- If we have a network or prefix then there is a change that we end up with two +-- // in the middle ... we could prevent this if we (1) expand prefixes: and (2) +-- split and rebuild as url. Of course we could assume no network paths (which +-- makes sense) adn assume either mapped drives (windows) or mounts (unix) but +-- then we still have to deal with urls ... anyhow, multiple // are never a real +-- problem but just ugly. + +-- function file.join(...) +-- local lst = { ... } +-- local one = lst[1] +-- if lpegmatch(isnetwork,one) then +-- local one = lpegmatch(reslasher,one) +-- local two = lpegmatch(deslasher,concat(lst,"/",2)) +-- if lpegmatch(hasroot,two) then +-- return one .. two +-- else +-- return one .. "/" .. two +-- end +-- elseif lpegmatch(isroot,one) then +-- local two = lpegmatch(deslasher,concat(lst,"/",2)) +-- if lpegmatch(hasroot,two) then +-- return two +-- else +-- return "/" .. two +-- end +-- elseif one == "" then +-- return lpegmatch(stripper,concat(lst,"/",2)) +-- else +-- return lpegmatch(deslasher,concat(lst,"/")) +-- end +-- end + +function file.join(one, two, three, ...) + if not two then + return one == "" and one or lpegmatch(stripper,one) + end + if one == "" then + return lpegmatch(stripper,three and concat({ two, three, ... },"/") or two) + end + if lpegmatch(isnetwork,one) then + local one = lpegmatch(reslasher,one) + local two = lpegmatch(deslasher,three and concat({ two, three, ... },"/") or two) + if lpegmatch(hasroot,two) then + return one .. two + else + return one .. "/" .. two + end + elseif lpegmatch(isroot,one) then + local two = lpegmatch(deslasher,three and concat({ two, three, ... },"/") or two) + if lpegmatch(hasroot,two) then + return two + else + return "/" .. two + end + else + return lpegmatch(deslasher,concat({ one, two, three, ... },"/")) + end +end + +-- or we can use this: +-- +-- function file.join(...) +-- local n = select("#",...) +-- local one = select(1,...) +-- if n == 1 then +-- return one == "" and one or lpegmatch(stripper,one) +-- end +-- if one == "" then +-- return lpegmatch(stripper,n > 2 and concat({ ... },"/",2) or select(2,...)) +-- end +-- if lpegmatch(isnetwork,one) then +-- local one = lpegmatch(reslasher,one) +-- local two = lpegmatch(deslasher,n > 2 and concat({ ... },"/",2) or select(2,...)) +-- if lpegmatch(hasroot,two) then +-- return one .. two +-- else +-- return one .. "/" .. two +-- end +-- elseif lpegmatch(isroot,one) then +-- local two = lpegmatch(deslasher,n > 2 and concat({ ... },"/",2) or select(2,...)) +-- if lpegmatch(hasroot,two) then +-- return two +-- else +-- return "/" .. two +-- end +-- else +-- return lpegmatch(deslasher,concat({ ... },"/")) +-- end +-- end + +-- print(file.join("c:/whatever")) +-- print(file.join("c:/whatever","name")) +-- print(file.join("//","/y")) +-- print(file.join("/","/y")) +-- print(file.join("","/y")) +-- print(file.join("/x/","/y")) +-- print(file.join("x/","/y")) +-- print(file.join("http://","/y")) +-- print(file.join("http://a","/y")) +-- print(file.join("http:///a","/y")) +-- print(file.join("//nas-1","/y")) +-- print(file.join("//nas-1/a/b/c","/y")) +-- print(file.join("\\\\nas-1\\a\\b\\c","\\y")) + +-- The previous one fails on "a.b/c" so Taco came up with a split based +-- variant. After some skyping we got it sort of compatible with the old +-- one. After that the anchoring to currentdir was added in a better way. +-- Of course there are some optimizations too. Finally we had to deal with +-- windows drive prefixes and things like sys://. Eventually gsubs and +-- finds were replaced by lpegs. + +local drivespec = R("az","AZ")^1 * colon +local anchors = fwslash + + drivespec +local untouched = periods + + (1-period)^1 * P(-1) +local mswindrive = Cs(drivespec * (bwslash/"/" + fwslash)^0) +local mswinuncpath = (bwslash + fwslash) * (bwslash + fwslash) * Cc("//") +local splitstarter = (mswindrive + mswinuncpath + Cc(false)) + * Ct(lpeg.splitat(S("/\\")^1)) +local absolute = fwslash + +function file.collapsepath(str,anchor) -- anchor: false|nil, true, "." + if not str then + return + end + if anchor == true and not lpegmatch(anchors,str) then + str = getcurrentdir() .. "/" .. str + end + if str == "" or str =="." then + return "." + elseif lpegmatch(untouched,str) then + return lpegmatch(reslasher,str) + end + local starter, oldelements = lpegmatch(splitstarter,str) + local newelements = { } + local i = #oldelements + while i > 0 do + local element = oldelements[i] + if element == '.' then + -- do nothing + elseif element == '..' then + local n = i - 1 + while n > 0 do + local element = oldelements[n] + if element ~= '..' and element ~= '.' then + oldelements[n] = '.' + break + else + n = n - 1 + end + end + if n < 1 then + insert(newelements,1,'..') + end + elseif element ~= "" then + insert(newelements,1,element) + end + i = i - 1 + end + if #newelements == 0 then + return starter or "." + elseif starter then + return starter .. concat(newelements, '/') + elseif lpegmatch(absolute,str) then + return "/" .. concat(newelements,'/') + else + newelements = concat(newelements, '/') + if anchor == "." and find(str,"^%./") then + return "./" .. newelements + else + return newelements + end + end +end + +-- better this way: + +local tricky = S("/\\") * P(-1) +local attributes = lfs.attributes + +function lfs.isdir(name) + if lpegmatch(tricky,name) then + return attributes(name,"mode") == "directory" + else + return attributes(name.."/.","mode") == "directory" + end +end + +function lfs.isfile(name) + return attributes(name,"mode") == "file" +end + +-- local function test(str,...) +-- print(string.format("%-20s %-15s %-30s %-20s",str,file.collapsepath(str),file.collapsepath(str,true),file.collapsepath(str,"."))) +-- end +-- test("a/b.c/d") test("b.c/d") test("b.c/..") +-- test("/") test("c:/..") test("sys://..") +-- test("") test("./") test(".") test("..") test("./..") test("../..") +-- test("a") test("./a") test("/a") test("a/../..") +-- test("a/./b/..") test("a/aa/../b/bb") test("a/.././././b/..") test("a/./././b/..") +-- test("a/b/c/../..") test("./a/b/c/../..") test("a/b/c/../..") +-- test("./a") +-- test([[\\a.b.c\d\e]]) + +local validchars = R("az","09","AZ","--","..") +local pattern_a = lpeg.replacer(1-validchars) +local pattern_a = Cs((validchars + P(1)/"-")^1) +local whatever = P("-")^0 / "" +local pattern_b = Cs(whatever * (1 - whatever * -1)^1) + +function file.robustname(str,strict) + if str then + str = lpegmatch(pattern_a,str) or str + if strict then + return lpegmatch(pattern_b,str) or str -- two step is cleaner (less backtracking) + else + return str + end + end +end + +file.readdata = io.loaddata +file.savedata = io.savedata + +function file.copy(oldname,newname) + if oldname and newname then + local data = io.loaddata(oldname) + if data and data ~= "" then + file.savedata(newname,data) + end + end +end + +-- also rewrite previous + +local letter = R("az","AZ") + S("_-+") +local separator = P("://") + +local qualified = period^0 * fwslash + + letter * colon + + letter^1 * separator + + letter^1 * fwslash +local rootbased = fwslash + + letter * colon + +lpeg.patterns.qualified = qualified +lpeg.patterns.rootbased = rootbased + +-- ./name ../name /name c: :// name/name + +function file.is_qualified_path(filename) + return filename and lpegmatch(qualified,filename) ~= nil +end + +function file.is_rootbased_path(filename) + return filename and lpegmatch(rootbased,filename) ~= nil +end + +-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end +-- +-- test { "c:", "c:/aa", "c:/aa/bb", "c:/aa/bb/cc", "c:/aa/bb/cc.dd", "c:/aa/bb/cc.dd.ee" } +-- test { "c:", "c:aa", "c:aa/bb", "c:aa/bb/cc", "c:aa/bb/cc.dd", "c:aa/bb/cc.dd.ee" } +-- test { "/aa", "/aa/bb", "/aa/bb/cc", "/aa/bb/cc.dd", "/aa/bb/cc.dd.ee" } +-- test { "aa", "aa/bb", "aa/bb/cc", "aa/bb/cc.dd", "aa/bb/cc.dd.ee" } + +-- -- maybe: +-- +-- if os.type == "windows" then +-- local currentdir = getcurrentdir +-- function getcurrentdir() +-- return lpegmatch(reslasher,currentdir()) +-- end +-- end + +-- for myself: + +function file.strip(name,dir) + if name then + local b, a = match(name,"^(.-)" .. dir .. "(.*)$") + return a ~= "" and a or name + end +end + +-- local debuglist = { +-- "pathpart", "basename", "nameonly", "suffixonly", "suffix", "dirname", "extname", +-- "addsuffix", "removesuffix", "replacesuffix", "join", +-- "strip","collapsepath", "joinpath", "splitpath", +-- } + +-- for i=1,#debuglist do +-- local name = debuglist[i] +-- local f = file[name] +-- file[name] = function(...) +-- print(name,f(...)) +-- return f(...) +-- end +-- end + +-- a goodie: a dumb version of mkdirs (not used in context itself, only +-- in generic usage) + +function lfs.mkdirs(path) + local full = "" + for sub in gmatch(path,"(/*[^\\/]+)") do -- accepts leading c: and / + full = full .. sub + -- lfs.isdir("/foo") mistakenly returns true on windows so + -- so we don't test and just make as that one is not too picky + lfs.mkdir(full) + end +end diff --git a/src/fontloader/misc/fontloader-l-function.lua b/src/fontloader/misc/fontloader-l-function.lua new file mode 100644 index 0000000..7ded8ce --- /dev/null +++ b/src/fontloader/misc/fontloader-l-function.lua @@ -0,0 +1,11 @@ +if not modules then modules = { } end modules ['l-functions'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +functions = functions or { } + +function functions.dummy() end diff --git a/src/fontloader/misc/fontloader-l-io.lua b/src/fontloader/misc/fontloader-l-io.lua new file mode 100644 index 0000000..020e811 --- /dev/null +++ b/src/fontloader/misc/fontloader-l-io.lua @@ -0,0 +1,363 @@ +if not modules then modules = { } end modules ['l-io'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local io = io +local byte, find, gsub, format = string.byte, string.find, string.gsub, string.format +local concat = table.concat +local floor = math.floor +local type = type + +if string.find(os.getenv("PATH"),";",1,true) then + io.fileseparator, io.pathseparator = "\\", ";" +else + io.fileseparator, io.pathseparator = "/" , ":" +end + +local function readall(f) + return f:read("*all") +end + +-- The next one is upto 50% faster on large files and less memory consumption due +-- to less intermediate large allocations. This phenomena was discussed on the +-- luatex dev list. + +local function readall(f) + local size = f:seek("end") + if size == 0 then + return "" + elseif size < 1024*1024 then + f:seek("set",0) + return f:read('*all') + else + local done = f:seek("set",0) + local step + if size < 1024*1024 then + step = 1024 * 1024 + elseif size > 16*1024*1024 then + step = 16*1024*1024 + else + step = floor(size/(1024*1024)) * 1024 * 1024 / 8 + end + local data = { } + while true do + local r = f:read(step) + if not r then + return concat(data) + else + data[#data+1] = r + end + end + end +end + +io.readall = readall + +function io.loaddata(filename,textmode) -- return nil if empty + local f = io.open(filename,(textmode and 'r') or 'rb') + if f then + -- local data = f:read('*all') + local data = readall(f) + f:close() + if #data > 0 then + return data + end + end +end + +function io.savedata(filename,data,joiner) + local f = io.open(filename,"wb") + if f then + if type(data) == "table" then + f:write(concat(data,joiner or "")) + elseif type(data) == "function" then + data(f) + else + f:write(data or "") + end + f:close() + io.flush() + return true + else + return false + end +end + +-- we can also chunk this one if needed: io.lines(filename,chunksize,"*l") + +function io.loadlines(filename,n) -- return nil if empty + local f = io.open(filename,'r') + if not f then + -- no file + elseif n then + local lines = { } + for i=1,n do + local line = f:read("*lines") + if line then + lines[#lines+1] = line + else + break + end + end + f:close() + lines = concat(lines,"\n") + if #lines > 0 then + return lines + end + else + local line = f:read("*line") or "" + f:close() + if #line > 0 then + return line + end + end +end + +function io.loadchunk(filename,n) + local f = io.open(filename,'rb') + if f then + local data = f:read(n or 1024) + f:close() + if #data > 0 then + return data + end + end +end + +function io.exists(filename) + local f = io.open(filename) + if f == nil then + return false + else + f:close() + return true + end +end + +function io.size(filename) + local f = io.open(filename) + if f == nil then + return 0 + else + local s = f:seek("end") + f:close() + return s + end +end + +function io.noflines(f) + if type(f) == "string" then + local f = io.open(filename) + if f then + local n = f and io.noflines(f) or 0 + f:close() + return n + else + return 0 + end + else + local n = 0 + for _ in f:lines() do + n = n + 1 + end + f:seek('set',0) + return n + end +end + +local nextchar = { + [ 4] = function(f) + return f:read(1,1,1,1) + end, + [ 2] = function(f) + return f:read(1,1) + end, + [ 1] = function(f) + return f:read(1) + end, + [-2] = function(f) + local a, b = f:read(1,1) + return b, a + end, + [-4] = function(f) + local a, b, c, d = f:read(1,1,1,1) + return d, c, b, a + end +} + +function io.characters(f,n) + if f then + return nextchar[n or 1], f + end +end + +local nextbyte = { + [4] = function(f) + local a, b, c, d = f:read(1,1,1,1) + if d then + return byte(a), byte(b), byte(c), byte(d) + end + end, + [3] = function(f) + local a, b, c = f:read(1,1,1) + if b then + return byte(a), byte(b), byte(c) + end + end, + [2] = function(f) + local a, b = f:read(1,1) + if b then + return byte(a), byte(b) + end + end, + [1] = function (f) + local a = f:read(1) + if a then + return byte(a) + end + end, + [-2] = function (f) + local a, b = f:read(1,1) + if b then + return byte(b), byte(a) + end + end, + [-3] = function(f) + local a, b, c = f:read(1,1,1) + if b then + return byte(c), byte(b), byte(a) + end + end, + [-4] = function(f) + local a, b, c, d = f:read(1,1,1,1) + if d then + return byte(d), byte(c), byte(b), byte(a) + end + end +} + +function io.bytes(f,n) + if f then + return nextbyte[n or 1], f + else + return nil, nil + end +end + +function io.ask(question,default,options) + while true do + io.write(question) + if options then + io.write(format(" [%s]",concat(options,"|"))) + end + if default then + io.write(format(" [%s]",default)) + end + io.write(format(" ")) + io.flush() + local answer = io.read() + answer = gsub(answer,"^%s*(.*)%s*$","%1") + if answer == "" and default then + return default + elseif not options then + return answer + else + for k=1,#options do + if options[k] == answer then + return answer + end + end + local pattern = "^" .. answer + for k=1,#options do + local v = options[k] + if find(v,pattern) then + return v + end + end + end + end +end + +local function readnumber(f,n,m) + if m then + f:seek("set",n) + n = m + end + if n == 1 then + return byte(f:read(1)) + elseif n == 2 then + local a, b = byte(f:read(2),1,2) + return 256 * a + b + elseif n == 3 then + local a, b, c = byte(f:read(3),1,3) + return 256*256 * a + 256 * b + c + elseif n == 4 then + local a, b, c, d = byte(f:read(4),1,4) + return 256*256*256 * a + 256*256 * b + 256 * c + d + elseif n == 8 then + local a, b = readnumber(f,4), readnumber(f,4) + return 256 * a + b + elseif n == 12 then + local a, b, c = readnumber(f,4), readnumber(f,4), readnumber(f,4) + return 256*256 * a + 256 * b + c + elseif n == -2 then + local b, a = byte(f:read(2),1,2) + return 256*a + b + elseif n == -3 then + local c, b, a = byte(f:read(3),1,3) + return 256*256 * a + 256 * b + c + elseif n == -4 then + local d, c, b, a = byte(f:read(4),1,4) + return 256*256*256 * a + 256*256 * b + 256*c + d + elseif n == -8 then + local h, g, f, e, d, c, b, a = byte(f:read(8),1,8) + return 256*256*256*256*256*256*256 * a + + 256*256*256*256*256*256 * b + + 256*256*256*256*256 * c + + 256*256*256*256 * d + + 256*256*256 * e + + 256*256 * f + + 256 * g + + h + else + return 0 + end +end + +io.readnumber = readnumber + +function io.readstring(f,n,m) + if m then + f:seek("set",n) + n = m + end + local str = gsub(f:read(n),"\000","") + return str +end + +-- + +if not io.i_limiter then function io.i_limiter() end end -- dummy so we can test safely +if not io.o_limiter then function io.o_limiter() end end -- dummy so we can test safely + +-- This works quite ok: +-- +-- function io.piped(command,writer) +-- local pipe = io.popen(command) +-- -- for line in pipe:lines() do +-- -- print(line) +-- -- end +-- while true do +-- local line = pipe:read(1) +-- if not line then +-- break +-- elseif line ~= "\n" then +-- writer(line) +-- end +-- end +-- return pipe:close() -- ok, status, (error)code +-- end diff --git a/src/fontloader/misc/fontloader-l-lpeg.lua b/src/fontloader/misc/fontloader-l-lpeg.lua new file mode 100644 index 0000000..192e32f --- /dev/null +++ b/src/fontloader/misc/fontloader-l-lpeg.lua @@ -0,0 +1,1107 @@ +if not modules then modules = { } end modules ['l-lpeg'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- lpeg 12 vs lpeg 10: slower compilation, similar parsing speed (i need to check +-- if i can use new features like capture / 2 and .B (at first sight the xml +-- parser is some 5% slower) + +-- a new lpeg fails on a #(1-P(":")) test and really needs a + P(-1) + +-- move utf -> l-unicode +-- move string -> l-string or keep it here + +lpeg = require("lpeg") + +-- The latest lpeg doesn't have print any more, and even the new ones are not +-- available by default (only when debug mode is enabled), which is a pitty as +-- as it helps nailign down bottlenecks. Performance seems comparable: some 10% +-- slower pattern compilation, same parsing speed, although, +-- +-- local p = lpeg.C(lpeg.P(1)^0 * lpeg.P(-1)) +-- local a = string.rep("123",100) +-- lpeg.match(p,a) +-- +-- seems slower and is also still suboptimal (i.e. a match that runs from begin +-- to end, one of the cases where string matchers win). + +if not lpeg.print then function lpeg.print(...) print(lpeg.pcode(...)) end end + +-- tracing (only used when we encounter a problem in integration of lpeg in luatex) + +-- some code will move to unicode and string + +-- local lpmatch = lpeg.match +-- local lpprint = lpeg.print +-- local lpp = lpeg.P +-- local lpr = lpeg.R +-- local lps = lpeg.S +-- local lpc = lpeg.C +-- local lpb = lpeg.B +-- local lpv = lpeg.V +-- local lpcf = lpeg.Cf +-- local lpcb = lpeg.Cb +-- local lpcg = lpeg.Cg +-- local lpct = lpeg.Ct +-- local lpcs = lpeg.Cs +-- local lpcc = lpeg.Cc +-- local lpcmt = lpeg.Cmt +-- local lpcarg = lpeg.Carg + +-- function lpeg.match(l,...) print("LPEG MATCH") lpprint(l) return lpmatch(l,...) end + +-- function lpeg.P (l) local p = lpp (l) print("LPEG P =") lpprint(l) return p end +-- function lpeg.R (l) local p = lpr (l) print("LPEG R =") lpprint(l) return p end +-- function lpeg.S (l) local p = lps (l) print("LPEG S =") lpprint(l) return p end +-- function lpeg.C (l) local p = lpc (l) print("LPEG C =") lpprint(l) return p end +-- function lpeg.B (l) local p = lpb (l) print("LPEG B =") lpprint(l) return p end +-- function lpeg.V (l) local p = lpv (l) print("LPEG V =") lpprint(l) return p end +-- function lpeg.Cf (l) local p = lpcf (l) print("LPEG Cf =") lpprint(l) return p end +-- function lpeg.Cb (l) local p = lpcb (l) print("LPEG Cb =") lpprint(l) return p end +-- function lpeg.Cg (l) local p = lpcg (l) print("LPEG Cg =") lpprint(l) return p end +-- function lpeg.Ct (l) local p = lpct (l) print("LPEG Ct =") lpprint(l) return p end +-- function lpeg.Cs (l) local p = lpcs (l) print("LPEG Cs =") lpprint(l) return p end +-- function lpeg.Cc (l) local p = lpcc (l) print("LPEG Cc =") lpprint(l) return p end +-- function lpeg.Cmt (l) local p = lpcmt (l) print("LPEG Cmt =") lpprint(l) return p end +-- function lpeg.Carg (l) local p = lpcarg(l) print("LPEG Carg =") lpprint(l) return p end + +local type, next, tostring = type, next, tostring +local byte, char, gmatch, format = string.byte, string.char, string.gmatch, string.format +----- mod, div = math.mod, math.div +local floor = math.floor + +local P, R, S, V, Ct, C, Cs, Cc, Cp, Cmt = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Cp, lpeg.Cmt +local lpegtype, lpegmatch, lpegprint = lpeg.type, lpeg.match, lpeg.print + +-- let's start with an inspector: + +if setinspector then + setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end) +end + +-- Beware, we predefine a bunch of patterns here and one reason for doing so +-- is that we get consistent behaviour in some of the visualizers. + +lpeg.patterns = lpeg.patterns or { } -- so that we can share +local patterns = lpeg.patterns + +local anything = P(1) +local endofstring = P(-1) +local alwaysmatched = P(true) + +patterns.anything = anything +patterns.endofstring = endofstring +patterns.beginofstring = alwaysmatched +patterns.alwaysmatched = alwaysmatched + +local sign = S('+-') +local zero = P('0') +local digit = R('09') +local octdigit = R("07") +local lowercase = R("az") +local uppercase = R("AZ") +local underscore = P("_") +local hexdigit = digit + lowercase + uppercase +local cr, lf, crlf = P("\r"), P("\n"), P("\r\n") +----- newline = crlf + S("\r\n") -- cr + lf +local newline = P("\r") * (P("\n") + P(true)) + P("\n") +local escaped = P("\\") * anything +local squote = P("'") +local dquote = P('"') +local space = P(" ") +local period = P(".") +local comma = P(",") + +local utfbom_32_be = P('\000\000\254\255') -- 00 00 FE FF +local utfbom_32_le = P('\255\254\000\000') -- FF FE 00 00 +local utfbom_16_be = P('\254\255') -- FE FF +local utfbom_16_le = P('\255\254') -- FF FE +local utfbom_8 = P('\239\187\191') -- EF BB BF +local utfbom = utfbom_32_be + utfbom_32_le + + utfbom_16_be + utfbom_16_le + + utfbom_8 +local utftype = utfbom_32_be * Cc("utf-32-be") + utfbom_32_le * Cc("utf-32-le") + + utfbom_16_be * Cc("utf-16-be") + utfbom_16_le * Cc("utf-16-le") + + utfbom_8 * Cc("utf-8") + alwaysmatched * Cc("utf-8") -- assume utf8 +local utfstricttype = utfbom_32_be * Cc("utf-32-be") + utfbom_32_le * Cc("utf-32-le") + + utfbom_16_be * Cc("utf-16-be") + utfbom_16_le * Cc("utf-16-le") + + utfbom_8 * Cc("utf-8") +local utfoffset = utfbom_32_be * Cc(4) + utfbom_32_le * Cc(4) + + utfbom_16_be * Cc(2) + utfbom_16_le * Cc(2) + + utfbom_8 * Cc(3) + Cc(0) + +local utf8next = R("\128\191") + +patterns.utfbom_32_be = utfbom_32_be +patterns.utfbom_32_le = utfbom_32_le +patterns.utfbom_16_be = utfbom_16_be +patterns.utfbom_16_le = utfbom_16_le +patterns.utfbom_8 = utfbom_8 + +patterns.utf_16_be_nl = P("\000\r\000\n") + P("\000\r") + P("\000\n") -- P("\000\r") * (P("\000\n") + P(true)) + P("\000\n") +patterns.utf_16_le_nl = P("\r\000\n\000") + P("\r\000") + P("\n\000") -- P("\r\000") * (P("\n\000") + P(true)) + P("\n\000") + +patterns.utf_32_be_nl = P("\000\000\000\r\000\000\000\n") + P("\000\000\000\r") + P("\000\000\000\n") +patterns.utf_32_le_nl = P("\r\000\000\000\n\000\000\000") + P("\r\000\000\000") + P("\n\000\000\000") + +patterns.utf8one = R("\000\127") +patterns.utf8two = R("\194\223") * utf8next +patterns.utf8three = R("\224\239") * utf8next * utf8next +patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next +patterns.utfbom = utfbom +patterns.utftype = utftype +patterns.utfstricttype = utfstricttype +patterns.utfoffset = utfoffset + +local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four +local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false) + +local utf8character = P(1) * R("\128\191")^0 -- unchecked but fast + +patterns.utf8 = utf8char +patterns.utf8char = utf8char +patterns.utf8character = utf8character -- this one can be used in most cases so we might use that one +patterns.validutf8 = validutf8char +patterns.validutf8char = validutf8char + +local eol = S("\n\r") +local spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto) +local whitespace = eol + spacer +local nonspacer = 1 - spacer +local nonwhitespace = 1 - whitespace + +patterns.eol = eol +patterns.spacer = spacer +patterns.whitespace = whitespace +patterns.nonspacer = nonspacer +patterns.nonwhitespace = nonwhitespace + +local stripper = spacer ^0 * C((spacer ^0 * nonspacer ^1)^0) -- from example by roberto +local fullstripper = whitespace^0 * C((whitespace^0 * nonwhitespace^1)^0) + +----- collapser = Cs(spacer^0/"" * ((spacer^1 * endofstring / "") + (spacer^1/" ") + P(1))^0) +local collapser = Cs(spacer^0/"" * nonspacer^0 * ((spacer^0/" " * nonspacer^1)^0)) + +local b_collapser = Cs( whitespace^0 /"" * (nonwhitespace^1 + whitespace^1/" ")^0) +local e_collapser = Cs((whitespace^1 * P(-1)/"" + nonwhitespace^1 + whitespace^1/" ")^0) +local m_collapser = Cs( (nonwhitespace^1 + whitespace^1/" ")^0) + +local b_stripper = Cs( spacer^0 /"" * (nonspacer^1 + spacer^1/" ")^0) +local e_stripper = Cs((spacer^1 * P(-1)/"" + nonspacer^1 + spacer^1/" ")^0) +local m_stripper = Cs( (nonspacer^1 + spacer^1/" ")^0) + +patterns.stripper = stripper +patterns.fullstripper = fullstripper +patterns.collapser = collapser + +patterns.b_collapser = b_collapser +patterns.m_collapser = m_collapser +patterns.e_collapser = e_collapser + +patterns.b_stripper = b_stripper +patterns.m_stripper = m_stripper +patterns.e_stripper = e_stripper + +patterns.lowercase = lowercase +patterns.uppercase = uppercase +patterns.letter = patterns.lowercase + patterns.uppercase +patterns.space = space +patterns.tab = P("\t") +patterns.spaceortab = patterns.space + patterns.tab +patterns.newline = newline +patterns.emptyline = newline^1 +patterns.equal = P("=") +patterns.comma = comma +patterns.commaspacer = comma * spacer^0 +patterns.period = period +patterns.colon = P(":") +patterns.semicolon = P(";") +patterns.underscore = underscore +patterns.escaped = escaped +patterns.squote = squote +patterns.dquote = dquote +patterns.nosquote = (escaped + (1-squote))^0 +patterns.nodquote = (escaped + (1-dquote))^0 +patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"") -- will change to C in the middle +patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"") -- will change to C in the middle +patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble +patterns.unspacer = ((patterns.spacer^1)/"")^0 + +patterns.singlequoted = squote * patterns.nosquote * squote +patterns.doublequoted = dquote * patterns.nodquote * dquote +patterns.quoted = patterns.doublequoted + patterns.singlequoted + +patterns.digit = digit +patterns.octdigit = octdigit +patterns.hexdigit = hexdigit +patterns.sign = sign +patterns.cardinal = digit^1 +patterns.integer = sign^-1 * digit^1 +patterns.unsigned = digit^0 * period * digit^1 +patterns.float = sign^-1 * patterns.unsigned +patterns.cunsigned = digit^0 * comma * digit^1 +patterns.cpunsigned = digit^0 * (period + comma) * digit^1 +patterns.cfloat = sign^-1 * patterns.cunsigned +patterns.cpfloat = sign^-1 * patterns.cpunsigned +patterns.number = patterns.float + patterns.integer +patterns.cnumber = patterns.cfloat + patterns.integer +patterns.cpnumber = patterns.cpfloat + patterns.integer +patterns.oct = zero * octdigit^1 +patterns.octal = patterns.oct +patterns.HEX = zero * P("X") * (digit+uppercase)^1 +patterns.hex = zero * P("x") * (digit+lowercase)^1 +patterns.hexadecimal = zero * S("xX") * hexdigit^1 + +patterns.hexafloat = sign^-1 + * zero * S("xX") + * (hexdigit^0 * period * hexdigit^1 + hexdigit^1 * period * hexdigit^0 + hexdigit^1) + * (S("pP") * sign^-1 * hexdigit^1)^-1 +patterns.decafloat = sign^-1 + * (digit^0 * period * digit^1 + digit^1 * period * digit^0 + digit^1) + * S("eE") * sign^-1 * digit^1 + +patterns.propername = (uppercase + lowercase + underscore) * (uppercase + lowercase + underscore + digit)^0 * endofstring + +patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1 +patterns.beginline = #(1-newline) + +patterns.longtostring = Cs(whitespace^0/"" * ((patterns.quoted + nonwhitespace^1 + whitespace^1/"" * (P(-1) + Cc(" ")))^0)) + +local function anywhere(pattern) --slightly adapted from website + return P { P(pattern) + 1 * V(1) } +end + +lpeg.anywhere = anywhere + +function lpeg.instringchecker(p) + p = anywhere(p) + return function(str) + return lpegmatch(p,str) and true or false + end +end + +function lpeg.splitter(pattern, action) + return (((1-P(pattern))^1)/action+1)^0 +end + +function lpeg.tsplitter(pattern, action) + return Ct((((1-P(pattern))^1)/action+1)^0) +end + +-- probleem: separator can be lpeg and that does not hash too well, but +-- it's quite okay as the key is then not garbage collected + +local splitters_s, splitters_m, splitters_t = { }, { }, { } + +local function splitat(separator,single) + local splitter = (single and splitters_s[separator]) or splitters_m[separator] + if not splitter then + separator = P(separator) + local other = C((1 - separator)^0) + if single then + local any = anything + splitter = other * (separator * C(any^0) + "") -- ? + splitters_s[separator] = splitter + else + splitter = other * (separator * other)^0 + splitters_m[separator] = splitter + end + end + return splitter +end + +local function tsplitat(separator) + local splitter = splitters_t[separator] + if not splitter then + splitter = Ct(splitat(separator)) + splitters_t[separator] = splitter + end + return splitter +end + +lpeg.splitat = splitat +lpeg.tsplitat = tsplitat + +function string.splitup(str,separator) + if not separator then + separator = "," + end + return lpegmatch(splitters_m[separator] or splitat(separator),str) +end + +-- local p = splitat("->",false) print(lpegmatch(p,"oeps->what->more")) -- oeps what more +-- local p = splitat("->",true) print(lpegmatch(p,"oeps->what->more")) -- oeps what->more +-- local p = splitat("->",false) print(lpegmatch(p,"oeps")) -- oeps +-- local p = splitat("->",true) print(lpegmatch(p,"oeps")) -- oeps + +local cache = { } + +function lpeg.split(separator,str) + local c = cache[separator] + if not c then + c = tsplitat(separator) + cache[separator] = c + end + return lpegmatch(c,str) +end + +function string.split(str,separator) + if separator then + local c = cache[separator] + if not c then + c = tsplitat(separator) + cache[separator] = c + end + return lpegmatch(c,str) + else + return { str } + end +end + +local spacing = patterns.spacer^0 * newline -- sort of strip +local empty = spacing * Cc("") +local nonempty = Cs((1-spacing)^1) * spacing^-1 +local content = (empty + nonempty)^1 + +patterns.textline = content + +local linesplitter = tsplitat(newline) + +patterns.linesplitter = linesplitter + +function string.splitlines(str) + return lpegmatch(linesplitter,str) +end + +-- lpeg.splitters = cache -- no longer public + +local cache = { } + +function lpeg.checkedsplit(separator,str) + local c = cache[separator] + if not c then + separator = P(separator) + local other = C((1 - separator)^1) + c = Ct(separator^0 * other * (separator^1 * other)^0) + cache[separator] = c + end + return lpegmatch(c,str) +end + +function string.checkedsplit(str,separator) + local c = cache[separator] + if not c then + separator = P(separator) + local other = C((1 - separator)^1) + c = Ct(separator^0 * other * (separator^1 * other)^0) + cache[separator] = c + end + return lpegmatch(c,str) +end + +-- from roberto's site: + +local function f2(s) local c1, c2 = byte(s,1,2) return c1 * 64 + c2 - 12416 end +local function f3(s) local c1, c2, c3 = byte(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end +local function f4(s) local c1, c2, c3, c4 = byte(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end + +local utf8byte = patterns.utf8one/byte + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4 + +patterns.utf8byte = utf8byte + +--~ local str = " a b c d " + +--~ local s = lpeg.stripper(lpeg.R("az")) print("["..lpegmatch(s,str).."]") +--~ local s = lpeg.keeper(lpeg.R("az")) print("["..lpegmatch(s,str).."]") +--~ local s = lpeg.stripper("ab") print("["..lpegmatch(s,str).."]") +--~ local s = lpeg.keeper("ab") print("["..lpegmatch(s,str).."]") + +local cache = { } + +function lpeg.stripper(str) + if type(str) == "string" then + local s = cache[str] + if not s then + s = Cs(((S(str)^1)/"" + 1)^0) + cache[str] = s + end + return s + else + return Cs(((str^1)/"" + 1)^0) + end +end + +local cache = { } + +function lpeg.keeper(str) + if type(str) == "string" then + local s = cache[str] + if not s then + s = Cs((((1-S(str))^1)/"" + 1)^0) + cache[str] = s + end + return s + else + return Cs((((1-str)^1)/"" + 1)^0) + end +end + +function lpeg.frontstripper(str) -- or pattern (yet undocumented) + return (P(str) + P(true)) * Cs(anything^0) +end + +function lpeg.endstripper(str) -- or pattern (yet undocumented) + return Cs((1 - P(str) * endofstring)^0) +end + +-- Just for fun I looked at the used bytecode and +-- p = (p and p + pp) or pp gets one more (testset). + +-- todo: cache when string + +function lpeg.replacer(one,two,makefunction,isutf) -- in principle we should sort the keys + local pattern + local u = isutf and utf8char or 1 + if type(one) == "table" then + local no = #one + local p = P(false) + if no == 0 then + for k, v in next, one do + p = p + P(k) / v + end + pattern = Cs((p + u)^0) + elseif no == 1 then + local o = one[1] + one, two = P(o[1]), o[2] + -- pattern = Cs(((1-one)^1 + one/two)^0) + pattern = Cs((one/two + u)^0) + else + for i=1,no do + local o = one[i] + p = p + P(o[1]) / o[2] + end + pattern = Cs((p + u)^0) + end + else + pattern = Cs((P(one)/(two or "") + u)^0) + end + if makefunction then + return function(str) + return lpegmatch(pattern,str) + end + else + return pattern + end +end + +-- local pattern1 = P(1-P(pattern))^0 * P(pattern) : test for not nil +-- local pattern2 = (P(pattern) * Cc(true) + P(1))^0 : test for true (could be faster, but not much) + +function lpeg.finder(lst,makefunction,isutf) -- beware: slower than find with 'patternless finds' + local pattern + if type(lst) == "table" then + pattern = P(false) + if #lst == 0 then + for k, v in next, lst do + pattern = pattern + P(k) -- ignore key, so we can use a replacer table + end + else + for i=1,#lst do + pattern = pattern + P(lst[i]) + end + end + else + pattern = P(lst) + end + if isutf then + pattern = ((utf8char or 1)-pattern)^0 * pattern + else + pattern = (1-pattern)^0 * pattern + end + if makefunction then + return function(str) + return lpegmatch(pattern,str) + end + else + return pattern + end +end + +-- print(lpeg.match(lpeg.replacer("e","a"),"test test")) +-- print(lpeg.match(lpeg.replacer{{"e","a"}},"test test")) +-- print(lpeg.match(lpeg.replacer({ e = "a", t = "x" }),"test test")) + +local splitters_f, splitters_s = { }, { } + +function lpeg.firstofsplit(separator) -- always return value + local splitter = splitters_f[separator] + if not splitter then + local pattern = P(separator) + splitter = C((1 - pattern)^0) + splitters_f[separator] = splitter + end + return splitter +end + +function lpeg.secondofsplit(separator) -- nil if not split + local splitter = splitters_s[separator] + if not splitter then + local pattern = P(separator) + splitter = (1 - pattern)^0 * pattern * C(anything^0) + splitters_s[separator] = splitter + end + return splitter +end + +local splitters_s, splitters_p = { }, { } + +function lpeg.beforesuffix(separator) -- nil if nothing but empty is ok + local splitter = splitters_s[separator] + if not splitter then + local pattern = P(separator) + splitter = C((1 - pattern)^0) * pattern * endofstring + splitters_s[separator] = splitter + end + return splitter +end + +function lpeg.afterprefix(separator) -- nil if nothing but empty is ok + local splitter = splitters_p[separator] + if not splitter then + local pattern = P(separator) + splitter = pattern * C(anything^0) + splitters_p[separator] = splitter + end + return splitter +end + +function lpeg.balancer(left,right) + left, right = P(left), P(right) + return P { left * ((1 - left - right) + V(1))^0 * right } +end + +-- print(1,lpegmatch(lpeg.firstofsplit(":"),"bc:de")) +-- print(2,lpegmatch(lpeg.firstofsplit(":"),":de")) -- empty +-- print(3,lpegmatch(lpeg.firstofsplit(":"),"bc")) +-- print(4,lpegmatch(lpeg.secondofsplit(":"),"bc:de")) +-- print(5,lpegmatch(lpeg.secondofsplit(":"),"bc:")) -- empty +-- print(6,lpegmatch(lpeg.secondofsplit(":",""),"bc")) +-- print(7,lpegmatch(lpeg.secondofsplit(":"),"bc")) +-- print(9,lpegmatch(lpeg.secondofsplit(":","123"),"bc")) + +-- -- slower: +-- +-- function lpeg.counter(pattern) +-- local n, pattern = 0, (lpeg.P(pattern)/function() n = n + 1 end + lpeg.anything)^0 +-- return function(str) n = 0 ; lpegmatch(pattern,str) ; return n end +-- end + +local nany = utf8char/"" + +function lpeg.counter(pattern) + pattern = Cs((P(pattern)/" " + nany)^0) + return function(str) + return #lpegmatch(pattern,str) + end +end + +-- utf extensies + +utf = utf or (unicode and unicode.utf8) or { } + +local utfcharacters = utf and utf.characters or string.utfcharacters +local utfgmatch = utf and utf.gmatch +local utfchar = utf and utf.char + +lpeg.UP = lpeg.P + +if utfcharacters then + + function lpeg.US(str) + local p = P(false) + for uc in utfcharacters(str) do + p = p + P(uc) + end + return p + end + + +elseif utfgmatch then + + function lpeg.US(str) + local p = P(false) + for uc in utfgmatch(str,".") do + p = p + P(uc) + end + return p + end + +else + + function lpeg.US(str) + local p = P(false) + local f = function(uc) + p = p + P(uc) + end + lpegmatch((utf8char/f)^0,str) + return p + end + +end + +local range = utf8byte * utf8byte + Cc(false) -- utf8byte is already a capture + +function lpeg.UR(str,more) + local first, last + if type(str) == "number" then + first = str + last = more or first + else + first, last = lpegmatch(range,str) + if not last then + return P(str) + end + end + if first == last then + return P(str) + elseif utfchar and (last - first < 8) then -- a somewhat arbitrary criterium + local p = P(false) + for i=first,last do + p = p + P(utfchar(i)) + end + return p -- nil when invalid range + else + local f = function(b) + return b >= first and b <= last + end + -- tricky, these nested captures + return utf8byte / f -- nil when invalid range + end +end + +-- print(lpeg.match(lpeg.Cs((C(lpeg.UR("αω"))/{ ["χ"] = "OEPS" })^0),"αωχαω")) + +-- lpeg.print(lpeg.R("ab","cd","gh")) +-- lpeg.print(lpeg.P("a","b","c")) +-- lpeg.print(lpeg.S("a","b","c")) + +-- print(lpeg.count("äáàa",lpeg.P("á") + lpeg.P("à"))) +-- print(lpeg.count("äáàa",lpeg.UP("áà"))) +-- print(lpeg.count("äáàa",lpeg.US("àá"))) +-- print(lpeg.count("äáàa",lpeg.UR("aá"))) +-- print(lpeg.count("äáàa",lpeg.UR("àá"))) +-- print(lpeg.count("äáàa",lpeg.UR(0x0000,0xFFFF))) + +function lpeg.is_lpeg(p) + return p and lpegtype(p) == "pattern" +end + +function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then") -- assume proper order + if type(list) ~= "table" then + list = { list, ... } + end + -- table.sort(list) -- longest match first + local p = P(list[1]) + for l=2,#list do + p = p + P(list[l]) + end + return p +end + +-- For the moment here, but it might move to utilities. Beware, we need to +-- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we +-- loop back from the end cq. prepend. + +local sort = table.sort + +local function copyindexed(old) + local new = { } + for i=1,#old do + new[i] = old + end + return new +end + +local function sortedkeys(tab) + local keys, s = { }, 0 + for key,_ in next, tab do + s = s + 1 + keys[s] = key + end + sort(keys) + return keys +end + +function lpeg.append(list,pp,delayed,checked) + local p = pp + if #list > 0 then + local keys = copyindexed(list) + sort(keys) + for i=#keys,1,-1 do + local k = keys[i] + if p then + p = P(k) + p + else + p = P(k) + end + end + elseif delayed then -- hm, it looks like the lpeg parser resolves anyway + local keys = sortedkeys(list) + if p then + for i=1,#keys,1 do + local k = keys[i] + local v = list[k] + p = P(k)/list + p + end + else + for i=1,#keys do + local k = keys[i] + local v = list[k] + if p then + p = P(k) + p + else + p = P(k) + end + end + if p then + p = p / list + end + end + elseif checked then + -- problem: substitution gives a capture + local keys = sortedkeys(list) + for i=1,#keys do + local k = keys[i] + local v = list[k] + if p then + if k == v then + p = P(k) + p + else + p = P(k)/v + p + end + else + if k == v then + p = P(k) + else + p = P(k)/v + end + end + end + else + local keys = sortedkeys(list) + for i=1,#keys do + local k = keys[i] + local v = list[k] + if p then + p = P(k)/v + p + else + p = P(k)/v + end + end + end + return p +end + +-- inspect(lpeg.append({ a = "1", aa = "1", aaa = "1" } ,nil,true)) +-- inspect(lpeg.append({ ["degree celsius"] = "1", celsius = "1", degree = "1" } ,nil,true)) + +-- function lpeg.exact_match(words,case_insensitive) +-- local pattern = concat(words) +-- if case_insensitive then +-- local pattern = S(upper(characters)) + S(lower(characters)) +-- local list = { } +-- for i=1,#words do +-- list[lower(words[i])] = true +-- end +-- return Cmt(pattern^1, function(_,i,s) +-- return list[lower(s)] and i +-- end) +-- else +-- local pattern = S(concat(words)) +-- local list = { } +-- for i=1,#words do +-- list[words[i]] = true +-- end +-- return Cmt(pattern^1, function(_,i,s) +-- return list[s] and i +-- end) +-- end +-- end + +-- experiment: + +-- local function make(t) +-- local p +-- local keys = sortedkeys(t) +-- for i=1,#keys do +-- local k = keys[i] +-- local v = t[k] +-- if not p then +-- if next(v) then +-- p = P(k) * make(v) +-- else +-- p = P(k) +-- end +-- else +-- if next(v) then +-- p = p + P(k) * make(v) +-- else +-- p = p + P(k) +-- end +-- end +-- end +-- return p +-- end + +-- local function make(t) +-- local p = P(false) +-- local keys = sortedkeys(t) +-- for i=1,#keys do +-- local k = keys[i] +-- local v = t[k] +-- if next(v) then +-- p = p + P(k) * make(v) +-- else +-- p = p + P(k) +-- end +-- end +-- return p +-- end + +-- function lpeg.utfchartabletopattern(list) -- goes to util-lpg +-- local tree = { } +-- for i=1,#list do +-- local t = tree +-- for c in gmatch(list[i],".") do +-- local tc = t[c] +-- if not tc then +-- tc = { } +-- t[c] = tc +-- end +-- t = tc +-- end +-- end +-- return make(tree) +-- end + +local function make(t,hash) + local p = P(false) + local keys = sortedkeys(t) + for i=1,#keys do + local k = keys[i] + local v = t[k] + local h = hash[v] + if h then + if next(v) then + p = p + P(k) * (make(v,hash) + P(true)) + else + p = p + P(k) * P(true) + end + else + if next(v) then + p = p + P(k) * make(v,hash) + else + p = p + P(k) + end + end + end + return p +end + +function lpeg.utfchartabletopattern(list) -- goes to util-lpg + local tree = { } + local hash = { } + local n = #list + if n == 0 then + -- we could always use this branch + for s in next, list do + local t = tree + for c in gmatch(s,".") do + local tc = t[c] + if not tc then + tc = { } + t[c] = tc + end + t = tc + end + hash[t] = s + end + else + for i=1,n do + local t = tree + local s = list[i] + for c in gmatch(s,".") do + local tc = t[c] + if not tc then + tc = { } + t[c] = tc + end + t = tc + end + hash[t] = s + end + end + return make(tree,hash) +end + +-- inspect ( lpeg.utfchartabletopattern { +-- utfchar(0x00A0), -- nbsp +-- utfchar(0x2000), -- enquad +-- utfchar(0x2001), -- emquad +-- utfchar(0x2002), -- enspace +-- utfchar(0x2003), -- emspace +-- utfchar(0x2004), -- threeperemspace +-- utfchar(0x2005), -- fourperemspace +-- utfchar(0x2006), -- sixperemspace +-- utfchar(0x2007), -- figurespace +-- utfchar(0x2008), -- punctuationspace +-- utfchar(0x2009), -- breakablethinspace +-- utfchar(0x200A), -- hairspace +-- utfchar(0x200B), -- zerowidthspace +-- utfchar(0x202F), -- narrownobreakspace +-- utfchar(0x205F), -- math thinspace +-- } ) + +-- a few handy ones: +-- +-- faster than find(str,"[\n\r]") when match and # > 7 and always faster when # > 3 + +patterns.containseol = lpeg.finder(eol) -- (1-eol)^0 * eol + +-- The next pattern^n variant is based on an approach suggested +-- by Roberto: constructing a big repetition in chunks. +-- +-- Being sparse is not needed, and only complicate matters and +-- the number of redundant entries is not that large. + +local function nextstep(n,step,result) + local m = n % step -- mod(n,step) + local d = floor(n/step) -- div(n,step) + if d > 0 then + local v = V(tostring(step)) + local s = result.start + for i=1,d do + if s then + s = v * s + else + s = v + end + end + result.start = s + end + if step > 1 and result.start then + local v = V(tostring(step/2)) + result[tostring(step)] = v * v + end + if step > 0 then + return nextstep(m,step/2,result) + else + return result + end +end + +function lpeg.times(pattern,n) + return P(nextstep(n,2^16,{ "start", ["1"] = pattern })) +end + +-- local p = lpeg.Cs((1 - lpeg.times(lpeg.P("AB"),25))^1) +-- local s = "12" .. string.rep("AB",20) .. "34" .. string.rep("AB",30) .. "56" +-- inspect(p) +-- print(lpeg.match(p,s)) + +-- moved here (before util-str) + +----- digit = R("09") +----- period = P(".") +----- zero = P("0") +local trailingzeros = zero^0 * -digit -- suggested by Roberto R +local case_1 = period * trailingzeros / "" +local case_2 = period * (digit - trailingzeros)^1 * (trailingzeros / "") +local number = digit^1 * (case_1 + case_2) +local stripper = Cs((number + 1)^0) + +lpeg.patterns.stripzeros = stripper + +-- local sample = "bla 11.00 bla 11 bla 0.1100 bla 1.00100 bla 0.00 bla 0.001 bla 1.1100 bla 0.100100100 bla 0.00100100100" +-- collectgarbage("collect") +-- str = string.rep(sample,10000) +-- local ts = os.clock() +-- lpegmatch(stripper,str) +-- print(#str, os.clock()-ts, lpegmatch(stripper,sample)) + +-- for practical reasone we keep this here: + +local byte_to_HEX = { } +local byte_to_hex = { } +local byte_to_dec = { } -- for md5 +local hex_to_byte = { } + +for i=0,255 do + local H = format("%02X",i) + local h = format("%02x",i) + local d = format("%03i",i) + local c = char(i) + byte_to_HEX[c] = H + byte_to_hex[c] = h + byte_to_dec[c] = d + hex_to_byte[h] = c + hex_to_byte[H] = c +end + +local hextobyte = P(2)/hex_to_byte +local bytetoHEX = P(1)/byte_to_HEX +local bytetohex = P(1)/byte_to_hex +local bytetodec = P(1)/byte_to_dec +local hextobytes = Cs(hextobyte^0) +local bytestoHEX = Cs(bytetoHEX^0) +local bytestohex = Cs(bytetohex^0) +local bytestodec = Cs(bytetodec^0) + +patterns.hextobyte = hextobyte +patterns.bytetoHEX = bytetoHEX +patterns.bytetohex = bytetohex +patterns.bytetodec = bytetodec +patterns.hextobytes = hextobytes +patterns.bytestoHEX = bytestoHEX +patterns.bytestohex = bytestohex +patterns.bytestodec = bytestodec + +function string.toHEX(s) + if not s or s == "" then + return s + else + return lpegmatch(bytestoHEX,s) + end +end + +function string.tohex(s) + if not s or s == "" then + return s + else + return lpegmatch(bytestohex,s) + end +end + +function string.todec(s) + if not s or s == "" then + return s + else + return lpegmatch(bytestodec,s) + end +end + +function string.tobytes(s) + if not s or s == "" then + return s + else + return lpegmatch(hextobytes,s) + end +end + +-- local h = "ADFE0345" +-- local b = lpegmatch(patterns.hextobytes,h) +-- print(h,b,string.tohex(b),string.toHEX(b)) diff --git a/src/fontloader/misc/fontloader-l-lua.lua b/src/fontloader/misc/fontloader-l-lua.lua new file mode 100644 index 0000000..9565f48 --- /dev/null +++ b/src/fontloader/misc/fontloader-l-lua.lua @@ -0,0 +1,167 @@ +if not modules then modules = { } end modules ['l-lua'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- potential issues with 5.3: + +-- i'm not sure yet if the int/float change is good for luatex + +-- math.min +-- math.max +-- tostring +-- tonumber +-- utf.* +-- bit32 + +-- compatibility hacksand helpers + +local major, minor = string.match(_VERSION,"^[^%d]+(%d+)%.(%d+).*$") + +_MAJORVERSION = tonumber(major) or 5 +_MINORVERSION = tonumber(minor) or 1 +_LUAVERSION = _MAJORVERSION + _MINORVERSION/10 + +-- lpeg + +if not lpeg then + lpeg = require("lpeg") +end + +-- basics: + +if loadstring then + + local loadnormal = load + + function load(first,...) + if type(first) == "string" then + return loadstring(first,...) + else + return loadnormal(first,...) + end + end + +else + + loadstring = load + +end + +-- table: + +-- At some point it was announced that i[pairs would be dropped, which makes +-- sense. As we already used the for loop and # in most places the impact on +-- ConTeXt was not that large; the remaining ipairs already have been replaced. +-- Hm, actually ipairs was retained, but we no longer use it anyway (nor +-- pairs). +-- +-- Just in case, we provide the fallbacks as discussed in Programming +-- in Lua (http://www.lua.org/pil/7.3.html): + +if not ipairs then + + -- for k, v in ipairs(t) do ... end + -- for k=1,#t do local v = t[k] ... end + + local function iterate(a,i) + i = i + 1 + local v = a[i] + if v ~= nil then + return i, v --, nil + end + end + + function ipairs(a) + return iterate, a, 0 + end + +end + +if not pairs then + + -- for k, v in pairs(t) do ... end + -- for k, v in next, t do ... end + + function pairs(t) + return next, t -- , nil + end + +end + +-- The unpack function has been moved to the table table, and for compatiility +-- reasons we provide both now. + +if not table.unpack then + + table.unpack = _G.unpack + +elseif not unpack then + + _G.unpack = table.unpack + +end + +-- package: + +-- if not package.seachers then +-- +-- package.searchers = package.loaders -- 5.2 +-- +-- elseif not package.loaders then +-- +-- package.loaders = package.searchers +-- +-- end + +if not package.loaders then -- brr, searchers is a special "loadlib function" userdata type + + package.loaders = package.searchers + +end + +-- moved from util-deb to here: + +local print, select, tostring = print, select, tostring + +local inspectors = { } + +function setinspector(inspector) -- global function + inspectors[#inspectors+1] = inspector +end + +function inspect(...) -- global function + for s=1,select("#",...) do + local value = select(s,...) + local done = false + for i=1,#inspectors do + done = inspectors[i](value) + if done then + break + end + end + if not done then + print(tostring(value)) + end + end +end + +-- + +local dummy = function() end + +function optionalrequire(...) + local ok, result = xpcall(require,dummy,...) + if ok then + return result + end +end + +-- nice for non ascii scripts (this might move): + +if lua then + lua.mask = load([[τεχ = 1]]) and "utf" or "ascii" +end diff --git a/src/fontloader/misc/fontloader-l-math.lua b/src/fontloader/misc/fontloader-l-math.lua new file mode 100644 index 0000000..43f60b5 --- /dev/null +++ b/src/fontloader/misc/fontloader-l-math.lua @@ -0,0 +1,34 @@ +if not modules then modules = { } end modules ['l-math'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local floor, sin, cos, tan = math.floor, math.sin, math.cos, math.tan + +if not math.round then + function math.round(x) return floor(x + 0.5) end +end + +if not math.div then + function math.div(n,m) return floor(n/m) end +end + +if not math.mod then + function math.mod(n,m) return n % m end +end + +local pipi = 2*math.pi/360 + +if not math.sind then + function math.sind(d) return sin(d*pipi) end + function math.cosd(d) return cos(d*pipi) end + function math.tand(d) return tan(d*pipi) end +end + +if not math.odd then + function math.odd (n) return n % 2 ~= 0 end + function math.even(n) return n % 2 == 0 end +end diff --git a/src/fontloader/misc/fontloader-l-string.lua b/src/fontloader/misc/fontloader-l-string.lua new file mode 100644 index 0000000..3b1a000 --- /dev/null +++ b/src/fontloader/misc/fontloader-l-string.lua @@ -0,0 +1,212 @@ +if not modules then modules = { } end modules ['l-string'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local string = string +local sub, gmatch, format, char, byte, rep, lower = string.sub, string.gmatch, string.format, string.char, string.byte, string.rep, string.lower +local lpegmatch, patterns = lpeg.match, lpeg.patterns +local P, S, C, Ct, Cc, Cs = lpeg.P, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cc, lpeg.Cs + +-- Some functions are already defined in l-lpeg and maybe some from here will +-- move there (unless we also expose caches). + +-- if not string.split then +-- +-- function string.split(str,pattern) +-- local t = { } +-- if #str > 0 then +-- local n = 1 +-- for s in gmatch(str..pattern,"(.-)"..pattern) do +-- t[n] = s +-- n = n + 1 +-- end +-- end +-- return t +-- end +-- +-- end + +-- function string.unquoted(str) +-- return (gsub(str,"^([\"\'])(.*)%1$","%2")) -- interesting pattern +-- end + +local unquoted = patterns.squote * C(patterns.nosquote) * patterns.squote + + patterns.dquote * C(patterns.nodquote) * patterns.dquote + +function string.unquoted(str) + return lpegmatch(unquoted,str) or str +end + +-- print(string.unquoted("test")) +-- print(string.unquoted([["t\"est"]])) +-- print(string.unquoted([["t\"est"x]])) +-- print(string.unquoted("\'test\'")) +-- print(string.unquoted('"test"')) +-- print(string.unquoted('"test"')) + +function string.quoted(str) + return format("%q",str) -- always double quote +end + +function string.count(str,pattern) -- variant 3 + local n = 0 + for _ in gmatch(str,pattern) do -- not for utf + n = n + 1 + end + return n +end + +function string.limit(str,n,sentinel) -- not utf proof + if #str > n then + sentinel = sentinel or "..." + return sub(str,1,(n-#sentinel)) .. sentinel + else + return str + end +end + +local stripper = patterns.stripper +local fullstripper = patterns.fullstripper +local collapser = patterns.collapser +local longtostring = patterns.longtostring + +function string.strip(str) + return lpegmatch(stripper,str) or "" +end + +function string.fullstrip(str) + return lpegmatch(fullstripper,str) or "" +end + +function string.collapsespaces(str) + return lpegmatch(collapser,str) or "" +end + +function string.longtostring(str) + return lpegmatch(longtostring,str) or "" +end + +-- function string.is_empty(str) +-- return not find(str,"%S") +-- end + +local pattern = P(" ")^0 * P(-1) + +-- patterns.onlyspaces = pattern + +function string.is_empty(str) + if str == "" then + return true + else + return lpegmatch(pattern,str) and true or false + end +end + +-- if not string.escapedpattern then +-- +-- local patterns_escapes = { +-- ["%"] = "%%", +-- ["."] = "%.", +-- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*", +-- ["["] = "%[", ["]"] = "%]", +-- ["("] = "%(", [")"] = "%)", +-- -- ["{"] = "%{", ["}"] = "%}" +-- -- ["^"] = "%^", ["$"] = "%$", +-- } +-- +-- local simple_escapes = { +-- ["-"] = "%-", +-- ["."] = "%.", +-- ["?"] = ".", +-- ["*"] = ".*", +-- } +-- +-- function string.escapedpattern(str,simple) +-- return (gsub(str,".",simple and simple_escapes or patterns_escapes)) +-- end +-- +-- function string.topattern(str,lowercase,strict) +-- if str == "" then +-- return ".*" +-- else +-- str = gsub(str,".",simple_escapes) +-- if lowercase then +-- str = lower(str) +-- end +-- if strict then +-- return "^" .. str .. "$" +-- else +-- return str +-- end +-- end +-- end +-- +-- end + +--- needs checking + +local anything = patterns.anything +local allescapes = Cc("%") * S(".-+%?()[]*") -- also {} and ^$ ? +local someescapes = Cc("%") * S(".-+%()[]") -- also {} and ^$ ? +local matchescapes = Cc(".") * S("*?") -- wildcard and single match + +local pattern_a = Cs ( ( allescapes + anything )^0 ) +local pattern_b = Cs ( ( someescapes + matchescapes + anything )^0 ) +local pattern_c = Cs ( Cc("^") * ( someescapes + matchescapes + anything )^0 * Cc("$") ) + +function string.escapedpattern(str,simple) + return lpegmatch(simple and pattern_b or pattern_a,str) +end + +function string.topattern(str,lowercase,strict) + if str=="" or type(str) ~= "string" then + return ".*" + elseif strict then + str = lpegmatch(pattern_c,str) + else + str = lpegmatch(pattern_b,str) + end + if lowercase then + return lower(str) + else + return str + end +end + +-- print(string.escapedpattern("12+34*.tex",false)) +-- print(string.escapedpattern("12+34*.tex",true)) +-- print(string.topattern ("12+34*.tex",false,false)) +-- print(string.topattern ("12+34*.tex",false,true)) + +function string.valid(str,default) + return (type(str) == "string" and str ~= "" and str) or default or nil +end + +-- handy fallback + +string.itself = function(s) return s end + +-- also handy (see utf variant) + +local pattern = Ct(C(1)^0) -- string and not utf ! + +function string.totable(str) + return lpegmatch(pattern,str) +end + +-- handy from within tex: + +local replacer = lpeg.replacer("@","%%") -- Watch the escaped % in lpeg! + +function string.tformat(fmt,...) + return format(lpegmatch(replacer,fmt),...) +end + +-- obsolete names: + +string.quote = string.quoted +string.unquote = string.unquoted diff --git a/src/fontloader/misc/fontloader-l-table.lua b/src/fontloader/misc/fontloader-l-table.lua new file mode 100644 index 0000000..3eb8b85 --- /dev/null +++ b/src/fontloader/misc/fontloader-l-table.lua @@ -0,0 +1,1173 @@ +if not modules then modules = { } end modules ['l-table'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local type, next, tostring, tonumber, ipairs, select = type, next, tostring, tonumber, ipairs, select +local table, string = table, string +local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove +local format, lower, dump = string.format, string.lower, string.dump +local getmetatable, setmetatable = getmetatable, setmetatable +local getinfo = debug.getinfo +local lpegmatch, patterns = lpeg.match, lpeg.patterns +local floor = math.floor + +-- extra functions, some might go (when not used) +-- +-- we could serialize using %a but that won't work well is in the code we mostly use +-- floats and as such we get unequality e.g. in version comparisons + +local stripper = patterns.stripper + +function table.strip(tab) + local lst, l = { }, 0 + for i=1,#tab do + local s = lpegmatch(stripper,tab[i]) or "" + if s == "" then + -- skip this one + else + l = l + 1 + lst[l] = s + end + end + return lst +end + +function table.keys(t) + if t then + local keys, k = { }, 0 + for key, _ in next, t do + k = k + 1 + keys[k] = key + end + return keys + else + return { } + end +end + +local function compare(a,b) + local ta, tb = type(a), type(b) -- needed, else 11 < 2 + if ta == tb then + return a < b + else + return tostring(a) < tostring(b) -- not that efficient + end +end + +local function sortedkeys(tab) + if tab then + local srt, category, s = { }, 0, 0 -- 0=unknown 1=string, 2=number 3=mixed + for key,_ in next, tab do + s = s + 1 + srt[s] = key + if category == 3 then + -- no further check + else + local tkey = type(key) + if tkey == "string" then + category = (category == 2 and 3) or 1 + elseif tkey == "number" then + category = (category == 1 and 3) or 2 + else + category = 3 + end + end + end + if category == 0 or category == 3 then + sort(srt,compare) + else + sort(srt) + end + return srt + else + return { } + end +end + +local function sortedhashonly(tab) + if tab then + local srt, s = { }, 0 + for key,_ in next, tab do + if type(key) == "string" then + s = s + 1 + srt[s] = key + end + end + sort(srt) + return srt + else + return { } + end +end + +local function sortedindexonly(tab) + if tab then + local srt, s = { }, 0 + for key,_ in next, tab do + if type(key) == "number" then + s = s + 1 + srt[s] = key + end + end + sort(srt) + return srt + else + return { } + end +end + +local function sortedhashkeys(tab,cmp) -- fast one + if tab then + local srt, s = { }, 0 + for key,_ in next, tab do + if key then + s= s + 1 + srt[s] = key + end + end + sort(srt,cmp) + return srt + else + return { } + end +end + +function table.allkeys(t) + local keys = { } + for k, v in next, t do + for k, v in next, v do + keys[k] = true + end + end + return sortedkeys(keys) +end + +table.sortedkeys = sortedkeys +table.sortedhashonly = sortedhashonly +table.sortedindexonly = sortedindexonly +table.sortedhashkeys = sortedhashkeys + +local function nothing() end + +local function sortedhash(t,cmp) + if t then + local s + if cmp then + -- it would be nice if the sort function would accept a third argument (or nicer, an optional first) + s = sortedhashkeys(t,function(a,b) return cmp(t,a,b) end) + else + s = sortedkeys(t) -- the robust one + end + local n = 0 + local m = #s + local function kv() -- (s) + if n < m then + n = n + 1 + local k = s[n] + return k, t[k] + end + end + return kv -- , s + else + return nothing + end +end + +table.sortedhash = sortedhash +table.sortedpairs = sortedhash -- obsolete + +function table.append(t,list) + local n = #t + for i=1,#list do + n = n + 1 + t[n] = list[i] + end + return t +end + +function table.prepend(t, list) + local nl = #list + local nt = nl + #t + for i=#t,1,-1 do + t[nt] = t[i] + nt = nt - 1 + end + for i=1,#list do + t[i] = list[i] + end + return t +end + +-- function table.merge(t, ...) -- first one is target +-- t = t or { } +-- local lst = { ... } +-- for i=1,#lst do +-- for k, v in next, lst[i] do +-- t[k] = v +-- end +-- end +-- return t +-- end + +function table.merge(t, ...) -- first one is target + t = t or { } + for i=1,select("#",...) do + for k, v in next, (select(i,...)) do + t[k] = v + end + end + return t +end + +-- function table.merged(...) +-- local tmp, lst = { }, { ... } +-- for i=1,#lst do +-- for k, v in next, lst[i] do +-- tmp[k] = v +-- end +-- end +-- return tmp +-- end + +function table.merged(...) + local t = { } + for i=1,select("#",...) do + for k, v in next, (select(i,...)) do + t[k] = v + end + end + return t +end + +-- function table.imerge(t, ...) +-- local lst, nt = { ... }, #t +-- for i=1,#lst do +-- local nst = lst[i] +-- for j=1,#nst do +-- nt = nt + 1 +-- t[nt] = nst[j] +-- end +-- end +-- return t +-- end + +function table.imerge(t, ...) + local nt = #t + for i=1,select("#",...) do + local nst = select(i,...) + for j=1,#nst do + nt = nt + 1 + t[nt] = nst[j] + end + end + return t +end + +-- function table.imerged(...) +-- local tmp, ntmp, lst = { }, 0, {...} +-- for i=1,#lst do +-- local nst = lst[i] +-- for j=1,#nst do +-- ntmp = ntmp + 1 +-- tmp[ntmp] = nst[j] +-- end +-- end +-- return tmp +-- end + +function table.imerged(...) + local tmp, ntmp = { }, 0 + for i=1,select("#",...) do + local nst = select(i,...) + for j=1,#nst do + ntmp = ntmp + 1 + tmp[ntmp] = nst[j] + end + end + return tmp +end + +local function fastcopy(old,metatabletoo) -- fast one + if old then + local new = { } + for k, v in next, old do + if type(v) == "table" then + new[k] = fastcopy(v,metatabletoo) -- was just table.copy + else + new[k] = v + end + end + if metatabletoo then + -- optional second arg + local mt = getmetatable(old) + if mt then + setmetatable(new,mt) + end + end + return new + else + return { } + end +end + +-- todo : copy without metatable + +local function copy(t, tables) -- taken from lua wiki, slightly adapted + tables = tables or { } + local tcopy = {} + if not tables[t] then + tables[t] = tcopy + end + for i,v in next, t do -- brrr, what happens with sparse indexed + if type(i) == "table" then + if tables[i] then + i = tables[i] + else + i = copy(i, tables) + end + end + if type(v) ~= "table" then + tcopy[i] = v + elseif tables[v] then + tcopy[i] = tables[v] + else + tcopy[i] = copy(v, tables) + end + end + local mt = getmetatable(t) + if mt then + setmetatable(tcopy,mt) + end + return tcopy +end + +table.fastcopy = fastcopy +table.copy = copy + +function table.derive(parent) -- for the moment not public + local child = { } + if parent then + setmetatable(child,{ __index = parent }) + end + return child +end + +function table.tohash(t,value) + local h = { } + if t then + if value == nil then value = true end + for _, v in next, t do -- no ipairs here + h[v] = value + end + end + return h +end + +function table.fromhash(t) + local hsh, h = { }, 0 + for k, v in next, t do -- no ipairs here + if v then + h = h + 1 + hsh[h] = k + end + end + return hsh +end + +local noquotes, hexify, handle, reduce, compact, inline, functions + +local reserved = table.tohash { -- intercept a language inconvenience: no reserved words as key + 'and', 'break', 'do', 'else', 'elseif', 'end', 'false', 'for', 'function', 'if', + 'in', 'local', 'nil', 'not', 'or', 'repeat', 'return', 'then', 'true', 'until', 'while', + 'NaN', 'goto', +} + +local function simple_table(t) + if #t > 0 then + local n = 0 + for _,v in next, t do + n = n + 1 + end + if n == #t then + local tt, nt = { }, 0 + for i=1,#t do + local v = t[i] + local tv = type(v) + if tv == "number" then + nt = nt + 1 + if hexify then + tt[nt] = format("0x%X",v) + else + tt[nt] = tostring(v) -- tostring not needed + end + elseif tv == "string" then + nt = nt + 1 + tt[nt] = format("%q",v) + elseif tv == "boolean" then + nt = nt + 1 + tt[nt] = v and "true" or "false" + else + tt = nil + break + end + end + return tt + end + end + return nil +end + +-- Because this is a core function of mkiv I moved some function calls +-- inline. +-- +-- twice as fast in a test: +-- +-- local propername = lpeg.P(lpeg.R("AZ","az","__") * lpeg.R("09","AZ","az", "__")^0 * lpeg.P(-1) ) + +-- problem: there no good number_to_string converter with the best resolution + +-- probably using .. is faster than format +-- maybe split in a few cases (yes/no hexify) + +-- todo: %g faster on numbers than %s + +-- we can speed this up with repeaters and formatters but we haven't defined them +-- yet + +local propername = patterns.propername -- was find(name,"^%a[%w%_]*$") + +local function dummy() end + +local function do_serialize(root,name,depth,level,indexed) + if level > 0 then + depth = depth .. " " + if indexed then + handle(format("%s{",depth)) + else + local tn = type(name) + if tn == "number" then + if hexify then + handle(format("%s[0x%X]={",depth,name)) + else + handle(format("%s[%s]={",depth,name)) + end + elseif tn == "string" then + if noquotes and not reserved[name] and lpegmatch(propername,name) then + handle(format("%s%s={",depth,name)) + else + handle(format("%s[%q]={",depth,name)) + end + elseif tn == "boolean" then + handle(format("%s[%s]={",depth,name and "true" or "false")) + else + handle(format("%s{",depth)) + end + end + end + -- we could check for k (index) being number (cardinal) + if root and next(root) then + -- local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone) + -- if compact then + -- -- NOT: for k=1,#root do (we need to quit at nil) + -- for k,v in ipairs(root) do -- can we use next? + -- if not first then first = k end + -- last = last + 1 + -- end + -- end + local first, last = nil, 0 + if compact then + last = #root + for k=1,last do + if root[k] == nil then + last = k - 1 + break + end + end + if last > 0 then + first = 1 + end + end + local sk = sortedkeys(root) + for i=1,#sk do + local k = sk[i] + local v = root[k] + --~ if v == root then + -- circular + --~ else + local tv, tk = type(v), type(k) + if compact and first and tk == "number" and k >= first and k <= last then + if tv == "number" then + if hexify then + handle(format("%s 0x%X,",depth,v)) + else + handle(format("%s %s,",depth,v)) -- %.99g + end + elseif tv == "string" then + if reduce and tonumber(v) then + handle(format("%s %s,",depth,v)) + else + handle(format("%s %q,",depth,v)) + end + elseif tv == "table" then + if not next(v) then + handle(format("%s {},",depth)) + elseif inline then -- and #t > 0 + local st = simple_table(v) + if st then + handle(format("%s { %s },",depth,concat(st,", "))) + else + do_serialize(v,k,depth,level+1,true) + end + else + do_serialize(v,k,depth,level+1,true) + end + elseif tv == "boolean" then + handle(format("%s %s,",depth,v and "true" or "false")) + elseif tv == "function" then + if functions then + handle(format('%s load(%q),',depth,dump(v))) -- maybe strip + else + handle(format('%s "function",',depth)) + end + else + handle(format("%s %q,",depth,tostring(v))) + end + elseif k == "__p__" then -- parent + if false then + handle(format("%s __p__=nil,",depth)) + end + elseif tv == "number" then + if tk == "number" then + if hexify then + handle(format("%s [0x%X]=0x%X,",depth,k,v)) + else + handle(format("%s [%s]=%s,",depth,k,v)) -- %.99g + end + elseif tk == "boolean" then + if hexify then + handle(format("%s [%s]=0x%X,",depth,k and "true" or "false",v)) + else + handle(format("%s [%s]=%s,",depth,k and "true" or "false",v)) -- %.99g + end + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + if hexify then + handle(format("%s %s=0x%X,",depth,k,v)) + else + handle(format("%s %s=%s,",depth,k,v)) -- %.99g + end + else + if hexify then + handle(format("%s [%q]=0x%X,",depth,k,v)) + else + handle(format("%s [%q]=%s,",depth,k,v)) -- %.99g + end + end + elseif tv == "string" then + if reduce and tonumber(v) then + if tk == "number" then + if hexify then + handle(format("%s [0x%X]=%s,",depth,k,v)) + else + handle(format("%s [%s]=%s,",depth,k,v)) + end + elseif tk == "boolean" then + handle(format("%s [%s]=%s,",depth,k and "true" or "false",v)) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s=%s,",depth,k,v)) + else + handle(format("%s [%q]=%s,",depth,k,v)) + end + else + if tk == "number" then + if hexify then + handle(format("%s [0x%X]=%q,",depth,k,v)) + else + handle(format("%s [%s]=%q,",depth,k,v)) + end + elseif tk == "boolean" then + handle(format("%s [%s]=%q,",depth,k and "true" or "false",v)) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s=%q,",depth,k,v)) + else + handle(format("%s [%q]=%q,",depth,k,v)) + end + end + elseif tv == "table" then + if not next(v) then + if tk == "number" then + if hexify then + handle(format("%s [0x%X]={},",depth,k)) + else + handle(format("%s [%s]={},",depth,k)) + end + elseif tk == "boolean" then + handle(format("%s [%s]={},",depth,k and "true" or "false")) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s={},",depth,k)) + else + handle(format("%s [%q]={},",depth,k)) + end + elseif inline then + local st = simple_table(v) + if st then + if tk == "number" then + if hexify then + handle(format("%s [0x%X]={ %s },",depth,k,concat(st,", "))) + else + handle(format("%s [%s]={ %s },",depth,k,concat(st,", "))) + end + elseif tk == "boolean" then + handle(format("%s [%s]={ %s },",depth,k and "true" or "false",concat(st,", "))) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s={ %s },",depth,k,concat(st,", "))) + else + handle(format("%s [%q]={ %s },",depth,k,concat(st,", "))) + end + else + do_serialize(v,k,depth,level+1) + end + else + do_serialize(v,k,depth,level+1) + end + elseif tv == "boolean" then + if tk == "number" then + if hexify then + handle(format("%s [0x%X]=%s,",depth,k,v and "true" or "false")) + else + handle(format("%s [%s]=%s,",depth,k,v and "true" or "false")) + end + elseif tk == "boolean" then + handle(format("%s [%s]=%s,",depth,tostring(k),v and "true" or "false")) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s=%s,",depth,k,v and "true" or "false")) + else + handle(format("%s [%q]=%s,",depth,k,v and "true" or "false")) + end + elseif tv == "function" then + if functions then + local f = getinfo(v).what == "C" and dump(dummy) or dump(v) -- maybe strip + -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v) -- maybe strip + if tk == "number" then + if hexify then + handle(format("%s [0x%X]=load(%q),",depth,k,f)) + else + handle(format("%s [%s]=load(%q),",depth,k,f)) + end + elseif tk == "boolean" then + handle(format("%s [%s]=load(%q),",depth,k and "true" or "false",f)) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s=load(%q),",depth,k,f)) + else + handle(format("%s [%q]=load(%q),",depth,k,f)) + end + end + else + if tk == "number" then + if hexify then + handle(format("%s [0x%X]=%q,",depth,k,tostring(v))) + else + handle(format("%s [%s]=%q,",depth,k,tostring(v))) + end + elseif tk == "boolean" then + handle(format("%s [%s]=%q,",depth,k and "true" or "false",tostring(v))) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s=%q,",depth,k,tostring(v))) + else + handle(format("%s [%q]=%q,",depth,k,tostring(v))) + end + end + --~ end + end + end + if level > 0 then + handle(format("%s},",depth)) + end +end + +-- replacing handle by a direct t[#t+1] = ... (plus test) is not much +-- faster (0.03 on 1.00 for zapfino.tma) + +local function serialize(_handle,root,name,specification) -- handle wins + local tname = type(name) + if type(specification) == "table" then + noquotes = specification.noquotes + hexify = specification.hexify + handle = _handle or specification.handle or print + reduce = specification.reduce or false + functions = specification.functions + compact = specification.compact + inline = specification.inline and compact + if functions == nil then + functions = true + end + if compact == nil then + compact = true + end + if inline == nil then + inline = compact + end + else + noquotes = false + hexify = false + handle = _handle or print + reduce = false + compact = true + inline = true + functions = true + end + if tname == "string" then + if name == "return" then + handle("return {") + else + handle(name .. "={") + end + elseif tname == "number" then + if hexify then + handle(format("[0x%X]={",name)) + else + handle("[" .. name .. "]={") + end + elseif tname == "boolean" then + if name then + handle("return {") + else + handle("{") + end + else + handle("t={") + end + if root then + -- The dummy access will initialize a table that has a delayed initialization + -- using a metatable. (maybe explicitly test for metatable) + if getmetatable(root) then -- todo: make this an option, maybe even per subtable + local dummy = root._w_h_a_t_e_v_e_r_ + root._w_h_a_t_e_v_e_r_ = nil + end + -- Let's forget about empty tables. + if next(root) then + do_serialize(root,name,"",0) + end + end + handle("}") +end + +-- A version with formatters is some 20% faster than using format (because formatters are +-- much faster) but of course, inlining the format using .. is then again faster .. anyway, +-- as we do some pretty printing as well there is not that much to gain unless we make a +-- 'fast' ugly variant as well. But, we would have to move the formatter to l-string then. + +-- name: +-- +-- true : return { } +-- false : { } +-- nil : t = { } +-- string : string = { } +-- "return" : return { } +-- number : [number] = { } + +function table.serialize(root,name,specification) + local t, n = { }, 0 + local function flush(s) + n = n + 1 + t[n] = s + end + serialize(flush,root,name,specification) + return concat(t,"\n") +end + +-- local a = { e = { 1,2,3,4,5,6}, a = 1, b = 2, c = "ccc", d = { a = 1, b = 2, c = "ccc", d = { a = 1, b = 2, c = "ccc" } } } +-- local t = os.clock() +-- for i=1,10000 do +-- table.serialize(a) +-- end +-- print(os.clock()-t,table.serialize(a)) + +table.tohandle = serialize + +-- sometimes tables are real use (zapfino extra pro is some 85M) in which +-- case a stepwise serialization is nice; actually, we could consider: +-- +-- for line in table.serializer(root,name,reduce,noquotes) do +-- ...(line) +-- end +-- +-- so this is on the todo list + +local maxtab = 2*1024 + +function table.tofile(filename,root,name,specification) + local f = io.open(filename,'w') + if f then + if maxtab > 1 then + local t, n = { }, 0 + local function flush(s) + n = n + 1 + t[n] = s + if n > maxtab then + f:write(concat(t,"\n"),"\n") -- hm, write(sometable) should be nice + t, n = { }, 0 -- we could recycle t if needed + end + end + serialize(flush,root,name,specification) + f:write(concat(t,"\n"),"\n") + else + local function flush(s) + f:write(s,"\n") + end + serialize(flush,root,name,specification) + end + f:close() + io.flush() + end +end + +local function flattened(t,f,depth) -- also handles { nil, 1, nil, 2 } + if f == nil then + f = { } + depth = 0xFFFF + elseif tonumber(f) then + -- assume that only two arguments are given + depth = f + f = { } + elseif not depth then + depth = 0xFFFF + end + for k, v in next, t do + if type(k) ~= "number" then + if depth > 0 and type(v) == "table" then + flattened(v,f,depth-1) + else + f[#f+1] = v + end + end + end + for k=1,#t do + local v = t[k] + if depth > 0 and type(v) == "table" then + flattened(v,f,depth-1) + else + f[#f+1] = v + end + end + return f +end + +table.flattened = flattened + +local function unnest(t,f) -- only used in mk, for old times sake + if not f then -- and only relevant for token lists + f = { } -- this one can become obsolete + end + for i=1,#t do + local v = t[i] + if type(v) == "table" then + if type(v[1]) == "table" then + unnest(v,f) + else + f[#f+1] = v + end + else + f[#f+1] = v + end + end + return f +end + +function table.unnest(t) -- bad name + return unnest(t) +end + +local function are_equal(a,b,n,m) -- indexed + if a and b and #a == #b then + n = n or 1 + m = m or #a + for i=n,m do + local ai, bi = a[i], b[i] + if ai==bi then + -- same + elseif type(ai) == "table" and type(bi) == "table" then + if not are_equal(ai,bi) then + return false + end + else + return false + end + end + return true + else + return false + end +end + +local function identical(a,b) -- assumes same structure + for ka, va in next, a do + local vb = b[ka] + if va == vb then + -- same + elseif type(va) == "table" and type(vb) == "table" then + if not identical(va,vb) then + return false + end + else + return false + end + end + return true +end + +table.identical = identical +table.are_equal = are_equal + +local function sparse(old,nest,keeptables) + local new = { } + for k, v in next, old do + if not (v == "" or v == false) then + if nest and type(v) == "table" then + v = sparse(v,nest) + if keeptables or next(v) then + new[k] = v + end + else + new[k] = v + end + end + end + return new +end + +table.sparse = sparse + +function table.compact(t) + return sparse(t,true,true) +end + +function table.contains(t, v) + if t then + for i=1, #t do + if t[i] == v then + return i + end + end + end + return false +end + +function table.count(t) + local n = 0 + for k, v in next, t do + n = n + 1 + end + return n +end + +function table.swapped(t,s) -- hash + local n = { } + if s then + for k, v in next, s do + n[k] = v + end + end + for k, v in next, t do + n[v] = k + end + return n +end + +function table.mirrored(t) -- hash + local n = { } + for k, v in next, t do + n[v] = k + n[k] = v + end + return n +end + +function table.reversed(t) + if t then + local tt, tn = { }, #t + if tn > 0 then + local ttn = 0 + for i=tn,1,-1 do + ttn = ttn + 1 + tt[ttn] = t[i] + end + end + return tt + end +end + +function table.reverse(t) + if t then + local n = #t + for i=1,floor(n/2) do + local j = n - i + 1 + t[i], t[j] = t[j], t[i] + end + return t + end +end + +function table.sequenced(t,sep,simple) -- hash only + if not t then + return "" + end + local n = #t + local s = { } + if n > 0 then + -- indexed + for i=1,n do + s[i] = tostring(t[i]) + end + else + -- hashed + n = 0 + for k, v in sortedhash(t) do + if simple then + if v == true then + n = n + 1 + s[n] = k + elseif v and v~= "" then + n = n + 1 + s[n] = k .. "=" .. tostring(v) + end + else + n = n + 1 + s[n] = k .. "=" .. tostring(v) + end + end + end + return concat(s,sep or " | ") +end + +function table.print(t,...) + if type(t) ~= "table" then + print(tostring(t)) + else + serialize(print,t,...) + end +end + +if setinspector then + setinspector(function(v) if type(v) == "table" then serialize(print,v,"table") return true end end) +end + +-- -- -- obsolete but we keep them for a while and might comment them later -- -- -- + +-- roughly: copy-loop : unpack : sub == 0.9 : 0.4 : 0.45 (so in critical apps, use unpack) + +function table.sub(t,i,j) + return { unpack(t,i,j) } +end + +-- slower than #t on indexed tables (#t only returns the size of the numerically indexed slice) + +function table.is_empty(t) + return not t or not next(t) +end + +function table.has_one_entry(t) + return t and not next(t,next(t)) +end + +-- new + +function table.loweredkeys(t) -- maybe utf + local l = { } + for k, v in next, t do + l[lower(k)] = v + end + return l +end + +-- new, might move (maybe duplicate) + +function table.unique(old) + local hash = { } + local new = { } + local n = 0 + for i=1,#old do + local oi = old[i] + if not hash[oi] then + n = n + 1 + new[n] = oi + hash[oi] = true + end + end + return new +end + +function table.sorted(t,...) + sort(t,...) + return t -- still sorts in-place +end + +-- + +function table.values(t,s) -- optional sort flag + if t then + local values, keys, v = { }, { }, 0 + for key, value in next, t do + if not keys[value] then + v = v + 1 + values[v] = value + keys[k] = key + end + end + if s then + sort(values) + end + return values + else + return { } + end +end + +-- maybe this will move to util-tab.lua + +-- for k, v in table.filtered(t,pattern) do ... end +-- for k, v in table.filtered(t,pattern,true) do ... end +-- for k, v in table.filtered(t,pattern,true,cmp) do ... end + +function table.filtered(t,pattern,sort,cmp) + if t and type(pattern) == "string" then + if sort then + local s + if cmp then + -- it would be nice if the sort function would accept a third argument (or nicer, an optional first) + s = sortedhashkeys(t,function(a,b) return cmp(t,a,b) end) + else + s = sortedkeys(t) -- the robust one + end + local n = 0 + local m = #s + local function kv(s) + while n < m do + n = n + 1 + local k = s[n] + if find(k,pattern) then + return k, t[k] + end + end + end + return kv, s + else + local n = next(t) + local function iterator() + while n do + local k = n + n = next(t,k) + if find(k,pattern) then + return k, t[k] + end + end + end + return iterator, t + end + else + return nothing + end +end diff --git a/src/fontloader/misc/fontloader-languages.lua b/src/fontloader/misc/fontloader-languages.lua new file mode 100644 index 0000000..1ea8c1f --- /dev/null +++ b/src/fontloader/misc/fontloader-languages.lua @@ -0,0 +1,45 @@ +if not modules then modules = { } end modules ['luatex-languages'] = { + version = 1.001, + comment = "companion to luatex-languages.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- We borrow from ConTeXt. + +languages = languages or { } + +local loaded = { } + +function languages.loadpatterns(tag) + if not loaded[tag] then + loaded[tag] = 0 + local filename = kpse.find_file("lang-" .. tag .. ".lua") + if filename and filename == "" then + print("") + else + local whatever = loadfile(filename) + if type(whatever) == "function" then + whatever = whatever() + if type(whatever) == "table" then + local characters = whatever.patterns.characters or "" + local patterns = whatever.patterns.data or "" + local exceptions = whatever.exceptions.data or "" + local language = lang.new() + for b in string.utfvalues(characters) do + tex.setlccode(b,b) + end + lang.patterns(language, patterns) + lang.hyphenation(language, exceptions) + loaded[tag] = lang.id(language) + else + print("") + end + else + print("") + end + end + end + return loaded[tag] +end diff --git a/src/fontloader/misc/fontloader-languages.tex b/src/fontloader/misc/fontloader-languages.tex new file mode 100644 index 0000000..9778da3 --- /dev/null +++ b/src/fontloader/misc/fontloader-languages.tex @@ -0,0 +1,17 @@ +%D \module +%D [ file=luatex-fonts, +%D version=2009.12.01, +%D title=\LUATEX\ Support Macros, +%D subtitle=Generic \OPENTYPE\ Font Handler, +%D author=Hans Hagen, +%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}] + +%D Cf. discussion on \CONTEXT\ list: + +\directlua { + dofile(kpse.find_file("luatex-languages.lua","tex")) +} + +\def\loadpatterns#1{\directlua{tex.language = languages.loadpatterns("#1")}} + +\endinput diff --git a/src/fontloader/misc/fontloader-math.lua b/src/fontloader/misc/fontloader-math.lua new file mode 100644 index 0000000..c316182 --- /dev/null +++ b/src/fontloader/misc/fontloader-math.lua @@ -0,0 +1,53 @@ +if not modules then modules = { } end modules ['luatex-math'] = { + version = 1.001, + comment = "companion to luatex-math.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local gaps = { + [0x1D455] = 0x0210E, + [0x1D49D] = 0x0212C, + [0x1D4A0] = 0x02130, + [0x1D4A1] = 0x02131, + [0x1D4A3] = 0x0210B, + [0x1D4A4] = 0x02110, + [0x1D4A7] = 0x02112, + [0x1D4A8] = 0x02133, + [0x1D4AD] = 0x0211B, + [0x1D4BA] = 0x0212F, + [0x1D4BC] = 0x0210A, + [0x1D4C4] = 0x02134, + [0x1D506] = 0x0212D, + [0x1D50B] = 0x0210C, + [0x1D50C] = 0x02111, + [0x1D515] = 0x0211C, + [0x1D51D] = 0x02128, + [0x1D53A] = 0x02102, + [0x1D53F] = 0x0210D, + [0x1D545] = 0x02115, + [0x1D547] = 0x02119, + [0x1D548] = 0x0211A, + [0x1D549] = 0x0211D, + [0x1D551] = 0x02124, +} + + +local function fixmath(tfmdata,key,value) + if value then + local characters = tfmdata.characters + for gap, mess in pairs(gaps) do + characters[gap] = characters[mess] + end + end +end + +fonts.handlers.otf.features.register { + name = "fixmath", + description = "math font fixing", + manipulators = { + base = fixmath, + node = fixmath, + } +} diff --git a/src/fontloader/misc/fontloader-math.tex b/src/fontloader/misc/fontloader-math.tex new file mode 100644 index 0000000..604b4a1 --- /dev/null +++ b/src/fontloader/misc/fontloader-math.tex @@ -0,0 +1,1874 @@ +%D \module +%D [ file=luatex-math, +%D version=2013.04.29, +%D title=\LUATEX\ Support Macros, +%D subtitle=An exmaple of math, +%D author=Hans Hagen, +%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}] + +%D This module is in no way a complete plain math implementation. I made this file +%D because I needed it for a tutorial for (mostly) plain \TEX\ users. There are +%D several ways to support math in \LUATEX, and this is just one of them. It was the +%D quickest hack I could come up with and it stays somewhat close to the traditional +%D approach (and thereby far from the \CONTEXT\ way). This file is mainly meant for +%D Boguslaw Jackowski. + +% we provide a remap feature + +\directlua{dofile(kpse.find_file('luatex-math.lua'))} + +% a bunch of fonts: + +\let \teni = \relax +\let \seveni = \relax +\let \fivei = \relax +\let \tensy = \relax +\let \sevensy = \relax +\let \fivesy = \relax +\let \tenex = \relax +\let \sevenbf = \relax +\let \fivebf = \relax + +\def\latinmodern + {\font\tenrm = file:lmroman10-regular.otf:+liga;+kern;+tlig;+trep at 10pt + \font\sevenrm = file:lmroman7-regular.otf:+liga;+kern;+tlig;+trep at 7pt + \font\fiverm = file:lmroman5-regular.otf:+liga;+kern;+tlig;+trep at 5pt + % + \font\tentt = file:lmmono10-regular.otf at 10pt + \font\tensl = file:lmromanslant10-regular.otf:+liga;+kern;+tlig;+trep at 10pt + \font\tenit = file:lmroman10-italic.otf:+liga;+kern;+tlig;+trep at 10pt + \font\tenbf = file:lmroman10-bold.otf:+liga;+kern;+tlig;+trep at 10pt + \font\tenbi = file:lmroman10-bolditalic.otf:+liga;+kern;+tlig;+trep at 10pt + % + \font\mathfonttextupright = file:latinmodern-math.otf:ssty=0;fixmath=yes at 10pt + \font\mathfontscriptupright = file:latinmodern-math.otf:ssty=1;fixmath=yes at 7pt + \font\mathfontscriptscriptupright = file:latinmodern-math.otf:ssty=2;fixmath=yes at 5pt + % + \textfont 0 = \mathfonttextupright + \scriptfont 0 = \mathfontscriptupright + \scriptscriptfont 0 = \mathfontscriptscriptupright + % + \tenrm} + +\def\lucidabright + {\font\tenrm = file:lucidabrightot.otf:+liga;+kern;+tlig;+trep at 10pt + \font\sevenrm = file:lucidabrightot.otf:+liga;+kern;+tlig;+trep at 7pt + \font\fiverm = file:lucidabrightot.otf:+liga;+kern;+tlig;+trep at 5pt + % + \font\tentt = file:lucidabrightot.otf at 10pt + \font\tenit = file:lucidabrightot.otf:+liga;+kern;+tlig;+trep at 10pt + \font\tenit = file:lucidabrightot-italic.otf:+liga;+kern;+tlig;+trep at 10pt + \font\tenbf = file:lucidabrightot-demi.otf:+liga;+kern;+tlig;+trep at 10pt + \font\tenbi = file:lucidabrightot-demiitalic.otf:+liga;+kern;+tlig;+trep at 10pt + % + \font\mathfonttextupright = file:lucidabrightmathot.otf:ssty=0;fixmath=yes at 10pt + \font\mathfontscriptupright = file:lucidabrightmathot.otf:ssty=1;fixmath=yes at 7pt + \font\mathfontscriptscriptupright = file:lucidabrightmathot.otf:ssty=2;fixmath=yes at 5pt + % + \textfont 0 = \mathfonttextupright + \scriptfont 0 = \mathfontscriptupright + \scriptscriptfont 0 = \mathfontscriptscriptupright + % + \tenrm} + +\directlua { + if arguments["mtx:lucidabright"] then + tex.print("\string\\lucidabright") + else + tex.print("\string\\latinmodern") + end +} + +\newtoks\everymathrm +\newtoks\everymathmit +\newtoks\everymathcal +\newtoks\everymathit +\newtoks\everymathsl +\newtoks\everymathbf +\newtoks\everymathbi +\newtoks\everymathtt + +\def\rm{\fam0\relax\the\everymathrm\relax\tenrm\relax} +\def\it{\fam0\relax\the\everymathit\relax\tenit\relax} +\def\sl{\fam0\relax\the\everymathsl\relax\tensl\relax} +\def\bf{\fam0\relax\the\everymathbf\relax\tenbf\relax} +\def\bi{\fam0\relax\the\everymathbi\relax\tenbi\relax} +\def\tt{\fam0\relax\the\everymathtt\relax\tentt\relax} + +\let\mit \relax % use names or \Uchar or define a vector +\let\cal \relax % idem, i'm not in the mood for this now +\let\oldstyle\relax % no longer misuse of math mode + +% tex is fast enough for this kind of assignments: + +\everymathrm {% + \Umathcode"0041="0"0"0041% + \Umathcode"0042="0"0"0042% + \Umathcode"0043="0"0"0043% + \Umathcode"0044="0"0"0044% + \Umathcode"0045="0"0"0045% + \Umathcode"0046="0"0"0046% + \Umathcode"0047="0"0"0047% + \Umathcode"0048="0"0"0048% + \Umathcode"0049="0"0"0049% + \Umathcode"004A="0"0"004A% + \Umathcode"004B="0"0"004B% + \Umathcode"004C="0"0"004C% + \Umathcode"004D="0"0"004D% + \Umathcode"004E="0"0"004E% + \Umathcode"004F="0"0"004F% + \Umathcode"0050="0"0"0050% + \Umathcode"0051="0"0"0051% + \Umathcode"0052="0"0"0052% + \Umathcode"0053="0"0"0053% + \Umathcode"0054="0"0"0054% + \Umathcode"0055="0"0"0055% + \Umathcode"0056="0"0"0056% + \Umathcode"0057="0"0"0057% + \Umathcode"0058="0"0"0058% + \Umathcode"0059="0"0"0059% + \Umathcode"005A="0"0"005A% + \Umathcode"0061="0"0"0061% + \Umathcode"0062="0"0"0062% + \Umathcode"0063="0"0"0063% + \Umathcode"0064="0"0"0064% + \Umathcode"0065="0"0"0065% + \Umathcode"0066="0"0"0066% + \Umathcode"0067="0"0"0067% + \Umathcode"0068="0"0"0068% + \Umathcode"0069="0"0"0069% + \Umathcode"006A="0"0"006A% + \Umathcode"006B="0"0"006B% + \Umathcode"006C="0"0"006C% + \Umathcode"006D="0"0"006D% + \Umathcode"006E="0"0"006E% + \Umathcode"006F="0"0"006F% + \Umathcode"0070="0"0"0070% + \Umathcode"0071="0"0"0071% + \Umathcode"0072="0"0"0072% + \Umathcode"0073="0"0"0073% + \Umathcode"0074="0"0"0074% + \Umathcode"0075="0"0"0075% + \Umathcode"0076="0"0"0076% + \Umathcode"0077="0"0"0077% + \Umathcode"0078="0"0"0078% + \Umathcode"0079="0"0"0079% + \Umathcode"007A="0"0"007A% + \Umathcode"0391="0"0"0391% + \Umathcode"0392="0"0"0392% + \Umathcode"0393="0"0"0393% + \Umathcode"0394="0"0"0394% + \Umathcode"0395="0"0"0395% + \Umathcode"0396="0"0"0396% + \Umathcode"0397="0"0"0397% + \Umathcode"0398="0"0"0398% + \Umathcode"0399="0"0"0399% + \Umathcode"039A="0"0"039A% + \Umathcode"039B="0"0"039B% + \Umathcode"039C="0"0"039C% + \Umathcode"039D="0"0"039D% + \Umathcode"039E="0"0"039E% + \Umathcode"039F="0"0"039F% + \Umathcode"03A0="0"0"03A0% + \Umathcode"03A1="0"0"03A1% + \Umathcode"03A3="0"0"03A3% + \Umathcode"03A4="0"0"03A4% + \Umathcode"03A5="0"0"03A5% + \Umathcode"03A6="0"0"03A6% + \Umathcode"03A7="0"0"03A7% + \Umathcode"03A8="0"0"03A8% + \Umathcode"03A9="0"0"03A9% + \Umathcode"03B1="0"0"03B1% + \Umathcode"03B2="0"0"03B2% + \Umathcode"03B3="0"0"03B3% + \Umathcode"03B4="0"0"03B4% + \Umathcode"03B5="0"0"03B5% + \Umathcode"03B6="0"0"03B6% + \Umathcode"03B7="0"0"03B7% + \Umathcode"03B8="0"0"03B8% + \Umathcode"03B9="0"0"03B9% + \Umathcode"03BA="0"0"03BA% + \Umathcode"03BB="0"0"03BB% + \Umathcode"03BC="0"0"03BC% + \Umathcode"03BD="0"0"03BD% + \Umathcode"03BE="0"0"03BE% + \Umathcode"03BF="0"0"03BF% + \Umathcode"03C0="0"0"03C0% + \Umathcode"03C1="0"0"03C1% + \Umathcode"03C2="0"0"03C2% + \Umathcode"03C3="0"0"03C3% + \Umathcode"03C4="0"0"03C4% + \Umathcode"03C5="0"0"03C5% + \Umathcode"03C6="0"0"03C6% + \Umathcode"03C7="0"0"03C7% + \Umathcode"03C8="0"0"03C8% + \Umathcode"03C9="0"0"03C9% + \Umathcode"03D1="0"0"03D1% + \Umathcode"03D5="0"0"03D5% + \Umathcode"03D6="0"0"03D6% + \Umathcode"03F0="0"0"03F0% + \Umathcode"03F1="0"0"03F1% + \Umathcode"03F4="0"0"03F4% + \Umathcode"03F5="0"0"03F5% + \Umathcode"2202="0"0"2202% + \Umathcode"2207="0"0"2207% + \relax +} + +\everymathmit {% + % not done +} + +\everymathcal {% + % not done +} + +\everymathit {% + \Umathcode"0041="0"0"1D434% + \Umathcode"0042="0"0"1D435% + \Umathcode"0043="0"0"1D436% + \Umathcode"0044="0"0"1D437% + \Umathcode"0045="0"0"1D438% + \Umathcode"0046="0"0"1D439% + \Umathcode"0047="0"0"1D43A% + \Umathcode"0048="0"0"1D43B% + \Umathcode"0049="0"0"1D43C% + \Umathcode"004A="0"0"1D43D% + \Umathcode"004B="0"0"1D43E% + \Umathcode"004C="0"0"1D43F% + \Umathcode"004D="0"0"1D440% + \Umathcode"004E="0"0"1D441% + \Umathcode"004F="0"0"1D442% + \Umathcode"0050="0"0"1D443% + \Umathcode"0051="0"0"1D444% + \Umathcode"0052="0"0"1D445% + \Umathcode"0053="0"0"1D446% + \Umathcode"0054="0"0"1D447% + \Umathcode"0055="0"0"1D448% + \Umathcode"0056="0"0"1D449% + \Umathcode"0057="0"0"1D44A% + \Umathcode"0058="0"0"1D44B% + \Umathcode"0059="0"0"1D44C% + \Umathcode"005A="0"0"1D44D% + \Umathcode"0061="0"0"1D44E% + \Umathcode"0062="0"0"1D44F% + \Umathcode"0063="0"0"1D450% + \Umathcode"0064="0"0"1D451% + \Umathcode"0065="0"0"1D452% + \Umathcode"0066="0"0"1D453% + \Umathcode"0067="0"0"1D454% + \Umathcode"0068="0"0"0210E% + \Umathcode"0069="0"0"1D456% + \Umathcode"006A="0"0"1D457% + \Umathcode"006B="0"0"1D458% + \Umathcode"006C="0"0"1D459% + \Umathcode"006D="0"0"1D45A% + \Umathcode"006E="0"0"1D45B% + \Umathcode"006F="0"0"1D45C% + \Umathcode"0070="0"0"1D45D% + \Umathcode"0071="0"0"1D45E% + \Umathcode"0072="0"0"1D45F% + \Umathcode"0073="0"0"1D460% + \Umathcode"0074="0"0"1D461% + \Umathcode"0075="0"0"1D462% + \Umathcode"0076="0"0"1D463% + \Umathcode"0077="0"0"1D464% + \Umathcode"0078="0"0"1D465% + \Umathcode"0079="0"0"1D466% + \Umathcode"007A="0"0"1D467% + \Umathcode"0391="0"0"1D6E2% + \Umathcode"0392="0"0"1D6E3% + \Umathcode"0393="0"0"1D6E4% + \Umathcode"0394="0"0"1D6E5% + \Umathcode"0395="0"0"1D6E6% + \Umathcode"0396="0"0"1D6E7% + \Umathcode"0397="0"0"1D6E8% + \Umathcode"0398="0"0"1D6E9% + \Umathcode"0399="0"0"1D6EA% + \Umathcode"039A="0"0"1D6EB% + \Umathcode"039B="0"0"1D6EC% + \Umathcode"039C="0"0"1D6ED% + \Umathcode"039D="0"0"1D6EE% + \Umathcode"039E="0"0"1D6EF% + \Umathcode"039F="0"0"1D6F0% + \Umathcode"03A0="0"0"1D6F1% + \Umathcode"03A1="0"0"1D6F2% + \Umathcode"03A3="0"0"1D6F4% + \Umathcode"03A4="0"0"1D6F5% + \Umathcode"03A5="0"0"1D6F6% + \Umathcode"03A6="0"0"1D6F7% + \Umathcode"03A7="0"0"1D6F8% + \Umathcode"03A8="0"0"1D6F9% + \Umathcode"03A9="0"0"1D6FA% + \Umathcode"03B1="0"0"1D6FC% + \Umathcode"03B2="0"0"1D6FD% + \Umathcode"03B3="0"0"1D6FE% + \Umathcode"03B4="0"0"1D6FF% + \Umathcode"03B5="0"0"1D700% + \Umathcode"03B6="0"0"1D701% + \Umathcode"03B7="0"0"1D702% + \Umathcode"03B8="0"0"1D703% + \Umathcode"03B9="0"0"1D704% + \Umathcode"03BA="0"0"1D705% + \Umathcode"03BB="0"0"1D706% + \Umathcode"03BC="0"0"1D707% + \Umathcode"03BD="0"0"1D708% + \Umathcode"03BE="0"0"1D709% + \Umathcode"03BF="0"0"1D70A% + \Umathcode"03C0="0"0"1D70B% + \Umathcode"03C1="0"0"1D70C% + \Umathcode"03C2="0"0"1D70D% + \Umathcode"03C3="0"0"1D70E% + \Umathcode"03C4="0"0"1D70F% + \Umathcode"03C5="0"0"1D710% + \Umathcode"03C6="0"0"1D711% + \Umathcode"03C7="0"0"1D712% + \Umathcode"03C8="0"0"1D713% + \Umathcode"03C9="0"0"1D714% + \Umathcode"03D1="0"0"1D717% + \Umathcode"03D5="0"0"1D719% + \Umathcode"03D6="0"0"1D71B% + \Umathcode"03F0="0"0"1D718% + \Umathcode"03F1="0"0"1D71A% + \Umathcode"03F4="0"0"1D6F3% + \Umathcode"03F5="0"0"1D716% + \Umathcode"2202="0"0"1D715% + \Umathcode"2207="0"0"1D6FB% + \relax +} + +\everymathsl {% + \the\everymathit +} + +\everymathbf {% + \Umathcode"0030="0"0"1D7CE% + \Umathcode"0031="0"0"1D7CF% + \Umathcode"0032="0"0"1D7D0% + \Umathcode"0033="0"0"1D7D1% + \Umathcode"0034="0"0"1D7D2% + \Umathcode"0035="0"0"1D7D3% + \Umathcode"0036="0"0"1D7D4% + \Umathcode"0037="0"0"1D7D5% + \Umathcode"0038="0"0"1D7D6% + \Umathcode"0039="0"0"1D7D7% + \Umathcode"0041="0"0"1D400% + \Umathcode"0042="0"0"1D401% + \Umathcode"0043="0"0"1D402% + \Umathcode"0044="0"0"1D403% + \Umathcode"0045="0"0"1D404% + \Umathcode"0046="0"0"1D405% + \Umathcode"0047="0"0"1D406% + \Umathcode"0048="0"0"1D407% + \Umathcode"0049="0"0"1D408% + \Umathcode"004A="0"0"1D409% + \Umathcode"004B="0"0"1D40A% + \Umathcode"004C="0"0"1D40B% + \Umathcode"004D="0"0"1D40C% + \Umathcode"004E="0"0"1D40D% + \Umathcode"004F="0"0"1D40E% + \Umathcode"0050="0"0"1D40F% + \Umathcode"0051="0"0"1D410% + \Umathcode"0052="0"0"1D411% + \Umathcode"0053="0"0"1D412% + \Umathcode"0054="0"0"1D413% + \Umathcode"0055="0"0"1D414% + \Umathcode"0056="0"0"1D415% + \Umathcode"0057="0"0"1D416% + \Umathcode"0058="0"0"1D417% + \Umathcode"0059="0"0"1D418% + \Umathcode"005A="0"0"1D419% + \Umathcode"0061="0"0"1D41A% + \Umathcode"0062="0"0"1D41B% + \Umathcode"0063="0"0"1D41C% + \Umathcode"0064="0"0"1D41D% + \Umathcode"0065="0"0"1D41E% + \Umathcode"0066="0"0"1D41F% + \Umathcode"0067="0"0"1D420% + \Umathcode"0068="0"0"1D421% + \Umathcode"0069="0"0"1D422% + \Umathcode"006A="0"0"1D423% + \Umathcode"006B="0"0"1D424% + \Umathcode"006C="0"0"1D425% + \Umathcode"006D="0"0"1D426% + \Umathcode"006E="0"0"1D427% + \Umathcode"006F="0"0"1D428% + \Umathcode"0070="0"0"1D429% + \Umathcode"0071="0"0"1D42A% + \Umathcode"0072="0"0"1D42B% + \Umathcode"0073="0"0"1D42C% + \Umathcode"0074="0"0"1D42D% + \Umathcode"0075="0"0"1D42E% + \Umathcode"0076="0"0"1D42F% + \Umathcode"0077="0"0"1D430% + \Umathcode"0078="0"0"1D431% + \Umathcode"0079="0"0"1D432% + \Umathcode"007A="0"0"1D433% + \Umathcode"0391="0"0"1D6A8% + \Umathcode"0392="0"0"1D6A9% + \Umathcode"0393="0"0"1D6AA% + \Umathcode"0394="0"0"1D6AB% + \Umathcode"0395="0"0"1D6AC% + \Umathcode"0396="0"0"1D6AD% + \Umathcode"0397="0"0"1D6AE% + \Umathcode"0398="0"0"1D6AF% + \Umathcode"0399="0"0"1D6B0% + \Umathcode"039A="0"0"1D6B1% + \Umathcode"039B="0"0"1D6B2% + \Umathcode"039C="0"0"1D6B3% + \Umathcode"039D="0"0"1D6B4% + \Umathcode"039E="0"0"1D6B5% + \Umathcode"039F="0"0"1D6B6% + \Umathcode"03A0="0"0"1D6B7% + \Umathcode"03A1="0"0"1D6B8% + \Umathcode"03A3="0"0"1D6BA% + \Umathcode"03A4="0"0"1D6BB% + \Umathcode"03A5="0"0"1D6BC% + \Umathcode"03A6="0"0"1D6BD% + \Umathcode"03A7="0"0"1D6BE% + \Umathcode"03A8="0"0"1D6BF% + \Umathcode"03A9="0"0"1D6C0% + \Umathcode"03B1="0"0"1D6C2% + \Umathcode"03B2="0"0"1D6C3% + \Umathcode"03B3="0"0"1D6C4% + \Umathcode"03B4="0"0"1D6C5% + \Umathcode"03B5="0"0"1D6C6% + \Umathcode"03B6="0"0"1D6C7% + \Umathcode"03B7="0"0"1D6C8% + \Umathcode"03B8="0"0"1D6C9% + \Umathcode"03B9="0"0"1D6CA% + \Umathcode"03BA="0"0"1D6CB% + \Umathcode"03BB="0"0"1D6CC% + \Umathcode"03BC="0"0"1D6CD% + \Umathcode"03BD="0"0"1D6CE% + \Umathcode"03BE="0"0"1D6CF% + \Umathcode"03BF="0"0"1D6D0% + \Umathcode"03C0="0"0"1D6D1% + \Umathcode"03C1="0"0"1D6D2% + \Umathcode"03C2="0"0"1D6D3% + \Umathcode"03C3="0"0"1D6D4% + \Umathcode"03C4="0"0"1D6D5% + \Umathcode"03C5="0"0"1D6D6% + \Umathcode"03C6="0"0"1D6D7% + \Umathcode"03C7="0"0"1D6D8% + \Umathcode"03C8="0"0"1D6D9% + \Umathcode"03C9="0"0"1D6DA% + \Umathcode"03D1="0"0"1D6DD% + \Umathcode"03D5="0"0"1D6DF% + \Umathcode"03D6="0"0"1D6E1% + \Umathcode"03F0="0"0"1D6DE% + \Umathcode"03F1="0"0"1D6E0% + \Umathcode"03F4="0"0"1D6B9% + \Umathcode"03F5="0"0"1D6DC% + \Umathcode"2202="0"0"1D6DB% + \Umathcode"2207="0"0"1D6C1% + \relax +} + +\everymathbi {% + \Umathcode"0030="0"0"1D7CE% + \Umathcode"0031="0"0"1D7CF% + \Umathcode"0032="0"0"1D7D0% + \Umathcode"0033="0"0"1D7D1% + \Umathcode"0034="0"0"1D7D2% + \Umathcode"0035="0"0"1D7D3% + \Umathcode"0036="0"0"1D7D4% + \Umathcode"0037="0"0"1D7D5% + \Umathcode"0038="0"0"1D7D6% + \Umathcode"0039="0"0"1D7D7% + \Umathcode"0041="0"0"1D468% + \Umathcode"0042="0"0"1D469% + \Umathcode"0043="0"0"1D46A% + \Umathcode"0044="0"0"1D46B% + \Umathcode"0045="0"0"1D46C% + \Umathcode"0046="0"0"1D46D% + \Umathcode"0047="0"0"1D46E% + \Umathcode"0048="0"0"1D46F% + \Umathcode"0049="0"0"1D470% + \Umathcode"004A="0"0"1D471% + \Umathcode"004B="0"0"1D472% + \Umathcode"004C="0"0"1D473% + \Umathcode"004D="0"0"1D474% + \Umathcode"004E="0"0"1D475% + \Umathcode"004F="0"0"1D476% + \Umathcode"0050="0"0"1D477% + \Umathcode"0051="0"0"1D478% + \Umathcode"0052="0"0"1D479% + \Umathcode"0053="0"0"1D47A% + \Umathcode"0054="0"0"1D47B% + \Umathcode"0055="0"0"1D47C% + \Umathcode"0056="0"0"1D47D% + \Umathcode"0057="0"0"1D47E% + \Umathcode"0058="0"0"1D47F% + \Umathcode"0059="0"0"1D480% + \Umathcode"005A="0"0"1D481% + \Umathcode"0061="0"0"1D482% + \Umathcode"0062="0"0"1D483% + \Umathcode"0063="0"0"1D484% + \Umathcode"0064="0"0"1D485% + \Umathcode"0065="0"0"1D486% + \Umathcode"0066="0"0"1D487% + \Umathcode"0067="0"0"1D488% + \Umathcode"0068="0"0"1D489% + \Umathcode"0069="0"0"1D48A% + \Umathcode"006A="0"0"1D48B% + \Umathcode"006B="0"0"1D48C% + \Umathcode"006C="0"0"1D48D% + \Umathcode"006D="0"0"1D48E% + \Umathcode"006E="0"0"1D48F% + \Umathcode"006F="0"0"1D490% + \Umathcode"0070="0"0"1D491% + \Umathcode"0071="0"0"1D492% + \Umathcode"0072="0"0"1D493% + \Umathcode"0073="0"0"1D494% + \Umathcode"0074="0"0"1D495% + \Umathcode"0075="0"0"1D496% + \Umathcode"0076="0"0"1D497% + \Umathcode"0077="0"0"1D498% + \Umathcode"0078="0"0"1D499% + \Umathcode"0079="0"0"1D49A% + \Umathcode"007A="0"0"1D49B% + \Umathcode"0391="0"0"1D71C% + \Umathcode"0392="0"0"1D71D% + \Umathcode"0393="0"0"1D71E% + \Umathcode"0394="0"0"1D71F% + \Umathcode"0395="0"0"1D720% + \Umathcode"0396="0"0"1D721% + \Umathcode"0397="0"0"1D722% + \Umathcode"0398="0"0"1D723% + \Umathcode"0399="0"0"1D724% + \Umathcode"039A="0"0"1D725% + \Umathcode"039B="0"0"1D726% + \Umathcode"039C="0"0"1D727% + \Umathcode"039D="0"0"1D728% + \Umathcode"039E="0"0"1D729% + \Umathcode"039F="0"0"1D72A% + \Umathcode"03A0="0"0"1D72B% + \Umathcode"03A1="0"0"1D72C% + \Umathcode"03A3="0"0"1D72E% + \Umathcode"03A4="0"0"1D72F% + \Umathcode"03A5="0"0"1D730% + \Umathcode"03A6="0"0"1D731% + \Umathcode"03A7="0"0"1D732% + \Umathcode"03A8="0"0"1D733% + \Umathcode"03A9="0"0"1D734% + \Umathcode"03B1="0"0"1D736% + \Umathcode"03B2="0"0"1D737% + \Umathcode"03B3="0"0"1D738% + \Umathcode"03B4="0"0"1D739% + \Umathcode"03B5="0"0"1D73A% + \Umathcode"03B6="0"0"1D73B% + \Umathcode"03B7="0"0"1D73C% + \Umathcode"03B8="0"0"1D73D% + \Umathcode"03B9="0"0"1D73E% + \Umathcode"03BA="0"0"1D73F% + \Umathcode"03BB="0"0"1D740% + \Umathcode"03BC="0"0"1D741% + \Umathcode"03BD="0"0"1D742% + \Umathcode"03BE="0"0"1D743% + \Umathcode"03BF="0"0"1D744% + \Umathcode"03C0="0"0"1D745% + \Umathcode"03C1="0"0"1D746% + \Umathcode"03C2="0"0"1D747% + \Umathcode"03C3="0"0"1D748% + \Umathcode"03C4="0"0"1D749% + \Umathcode"03C5="0"0"1D74A% + \Umathcode"03C6="0"0"1D74B% + \Umathcode"03C7="0"0"1D74C% + \Umathcode"03C8="0"0"1D74D% + \Umathcode"03C9="0"0"1D74E% + \Umathcode"03D1="0"0"1D751% + \Umathcode"03D5="0"0"1D753% + \Umathcode"03D6="0"0"1D755% + \Umathcode"03F0="0"0"1D752% + \Umathcode"03F1="0"0"1D754% + \Umathcode"03F4="0"0"1D72D% + \Umathcode"03F5="0"0"1D750% + \Umathcode"2202="0"0"1D74F% + \Umathcode"2207="0"0"1D735% + \relax +} + +\everymathtt {% + % not done +} + +\Udelcode "00021 = "0 "00021 +\Udelcode "00028 = "0 "00028 +\Udelcode "00028 = "0 "00028 +\Udelcode "00029 = "0 "00029 +\Udelcode "00029 = "0 "00029 +\Udelcode "0002F = "0 "0002F +\Udelcode "0002F = "0 "0002F +\Udelcode "0002F = "0 "02044 +\Udelcode "0003F = "0 "0003F +\Udelcode "0005B = "0 "0005B +\Udelcode "0005B = "0 "0005B +\Udelcode "0005D = "0 "0005D +\Udelcode "0005D = "0 "0005D +\Udelcode "0007B = "0 "0007B +\Udelcode "0007B = "0 "0007B +\Udelcode "0007C = "0 "0007C +\Udelcode "0007C = "0 "0007C +\Udelcode "0007C = "0 "0007C +\Udelcode "0007C = "0 "0007C +\Udelcode "0007C = "0 "0007C +\Udelcode "0007D = "0 "0007D +\Udelcode "0007D = "0 "0007D +\Udelcode "02016 = "0 "02016 +\Udelcode "02016 = "0 "02016 +\Udelcode "02016 = "0 "02016 +\Udelcode "02016 = "0 "02016 +\Udelcode "02016 = "0 "02016 +\Udelcode "02044 = "0 "02044 +\Udelcode "02044 = "0 "02044 +\Udelcode "02308 = "0 "02308 +\Udelcode "02308 = "0 "02308 +\Udelcode "02308 = "0 "02308 +\Udelcode "02308 = "0 "02308 +\Udelcode "02308 = "0 "02308 +\Udelcode "02309 = "0 "02309 +\Udelcode "02309 = "0 "02309 +\Udelcode "02309 = "0 "02309 +\Udelcode "02309 = "0 "02309 +\Udelcode "02309 = "0 "02309 +\Udelcode "0230A = "0 "0230A +\Udelcode "0230A = "0 "0230A +\Udelcode "0230B = "0 "0230B +\Udelcode "0230B = "0 "0230B +\Udelcode "0231C = "0 "0231C +\Udelcode "0231C = "0 "0231C +\Udelcode "0231D = "0 "0231D +\Udelcode "0231D = "0 "0231D +\Udelcode "0231E = "0 "0231E +\Udelcode "0231E = "0 "0231E +\Udelcode "0231F = "0 "0231F +\Udelcode "0231F = "0 "0231F +\Udelcode "023B0 = "0 "023B0 +\Udelcode "023B0 = "0 "023B0 +\Udelcode "023B1 = "0 "023B1 +\Udelcode "023B1 = "0 "023B1 +\Udelcode "027E6 = "0 "027E6 +\Udelcode "027E6 = "0 "027E6 +\Udelcode "027E7 = "0 "027E7 +\Udelcode "027E7 = "0 "027E7 +\Udelcode "027E8 = "0 "027E8 +\Udelcode "027E8 = "0 "027E8 +\Udelcode "027E9 = "0 "027E9 +\Udelcode "027E9 = "0 "027E9 +\Udelcode "027EA = "0 "027EA +\Udelcode "027EA = "0 "027EA +\Udelcode "027EB = "0 "027EB +\Udelcode "027EB = "0 "027EB +\Udelcode "027EE = "0 "027EE +\Udelcode "027EE = "0 "027EE +\Udelcode "027EF = "0 "027EF +\Udelcode "027EF = "0 "027EF + +\Umathcode "00021 = "5 "0 "00021 +\Umathcode "00022 = "0 "0 "00022 +\Umathcode "00027 = "0 "0 "00027 +\Umathcode "00028 = "4 "0 "00028 +\Umathcode "00029 = "5 "0 "00029 +\Umathcode "0002A = "2 "0 "02217 +\Umathcode "0002B = "2 "0 "0002B +\Umathcode "0002C = "6 "0 "0002C +\Umathcode "0002D = "2 "0 "02212 +\Umathcode "0002E = "6 "0 "0002E +\Umathcode "0002F = "4 "0 "02044 +\Umathcode "0003A = "3 "0 "0003A +\Umathcode "0003B = "6 "0 "0003B +\Umathcode "0003C = "3 "0 "0003C +\Umathcode "0003D = "3 "0 "0003D +\Umathcode "0003E = "3 "0 "0003E +\Umathcode "0003F = "5 "0 "0003F +\Umathcode "0005B = "4 "0 "0005B +\Umathcode "0005C = "0 "0 "0005C +\Umathcode "0005D = "5 "0 "0005D +\Umathcode "0007B = "4 "0 "0007B +\Umathcode "0007C = "0 "0 "0007C +\Umathcode "0007D = "5 "0 "0007D +\Umathcode "000A5 = "0 "0 "000A5 +\Umathcode "000A7 = "0 "0 "000A7 +\Umathcode "000AC = "0 "0 "000AC +\Umathcode "000B1 = "2 "0 "000B1 +\Umathcode "000B6 = "0 "0 "000B6 +\Umathcode "000B7 = "2 "0 "000B7 +\Umathcode "000D7 = "2 "0 "000D7 +\Umathcode "000F0 = "0 "0 "000F0 +\Umathcode "000F7 = "2 "0 "000F7 +\Umathcode "00338 = "3 "0 "00338 +\Umathcode "003F0 = "0 "0 "003F0 +\Umathcode "02016 = "0 "0 "02016 +\Umathcode "02020 = "2 "0 "02020 +\Umathcode "02021 = "2 "0 "02021 +\Umathcode "02022 = "2 "0 "02022 +\Umathcode "02026 = "0 "0 "02026 +\Umathcode "02032 = "0 "0 "02032 +\Umathcode "02033 = "0 "0 "02033 +\Umathcode "02034 = "0 "0 "02034 +\Umathcode "02044 = "0 "0 "02044 +\Umathcode "0207A = "2 "0 "0207A +\Umathcode "0207B = "2 "0 "0207B +\Umathcode "020DD = "0 "0 "020DD +\Umathcode "020DE = "0 "0 "020DE +\Umathcode "020DF = "0 "0 "020DF +\Umathcode "02111 = "0 "0 "02111 +\Umathcode "02113 = "0 "0 "02113 +\Umathcode "02118 = "0 "0 "02118 +\Umathcode "0211C = "0 "0 "0211C +\Umathcode "02132 = "0 "0 "02132 +\Umathcode "02135 = "0 "0 "02135 +\Umathcode "02136 = "0 "0 "02136 +\Umathcode "02137 = "0 "0 "02137 +\Umathcode "02138 = "0 "0 "02138 +\Umathcode "02141 = "0 "0 "02141 +\Umathcode "02142 = "0 "0 "02142 +\Umathcode "02143 = "0 "0 "02143 +\Umathcode "02144 = "0 "0 "02144 +\Umathcode "02145 = "0 "0 "02145 +\Umathcode "02146 = "0 "0 "02146 +\Umathcode "02147 = "0 "0 "02147 +\Umathcode "02148 = "0 "0 "02148 +\Umathcode "02149 = "0 "0 "02149 +\Umathcode "0214A = "0 "0 "0214A +\Umathcode "0214B = "2 "0 "0214B +\Umathcode "02190 = "3 "0 "02190 +\Umathcode "02191 = "3 "0 "02191 +\Umathcode "02192 = "3 "0 "02192 +\Umathcode "02193 = "3 "0 "02193 +\Umathcode "02194 = "3 "0 "02194 +\Umathcode "02195 = "3 "0 "02195 +\Umathcode "02196 = "3 "0 "02196 +\Umathcode "02197 = "3 "0 "02197 +\Umathcode "02198 = "3 "0 "02198 +\Umathcode "02199 = "3 "0 "02199 +\Umathcode "0219A = "3 "0 "0219A +\Umathcode "0219B = "3 "0 "0219B +\Umathcode "0219C = "3 "0 "0219C +\Umathcode "0219D = "3 "0 "0219D +\Umathcode "0219E = "3 "0 "0219E +\Umathcode "0219F = "3 "0 "0219F +\Umathcode "021A0 = "3 "0 "021A0 +\Umathcode "021A1 = "3 "0 "021A1 +\Umathcode "021A2 = "3 "0 "021A2 +\Umathcode "021A3 = "3 "0 "021A3 +\Umathcode "021A4 = "3 "0 "021A4 +\Umathcode "021A5 = "3 "0 "021A5 +\Umathcode "021A6 = "3 "0 "021A6 +\Umathcode "021A7 = "3 "0 "021A7 +\Umathcode "021A8 = "0 "0 "021A8 +\Umathcode "021A9 = "3 "0 "021A9 +\Umathcode "021AA = "3 "0 "021AA +\Umathcode "021AB = "3 "0 "021AB +\Umathcode "021AC = "3 "0 "021AC +\Umathcode "021AD = "3 "0 "021AD +\Umathcode "021AE = "3 "0 "021AE +\Umathcode "021AF = "3 "0 "021AF +\Umathcode "021B0 = "3 "0 "021B0 +\Umathcode "021B1 = "3 "0 "021B1 +\Umathcode "021B2 = "3 "0 "021B2 +\Umathcode "021B3 = "3 "0 "021B3 +\Umathcode "021B4 = "0 "0 "021B4 +\Umathcode "021B5 = "0 "0 "021B5 +\Umathcode "021B6 = "3 "0 "021B6 +\Umathcode "021B7 = "3 "0 "021B7 +\Umathcode "021B8 = "3 "0 "021B8 +\Umathcode "021B9 = "3 "0 "021B9 +\Umathcode "021BA = "3 "0 "021BA +\Umathcode "021BB = "3 "0 "021BB +\Umathcode "021BC = "3 "0 "021BC +\Umathcode "021BD = "3 "0 "021BD +\Umathcode "021BE = "3 "0 "021BE +\Umathcode "021BF = "3 "0 "021BF +\Umathcode "021C0 = "3 "0 "021C0 +\Umathcode "021C1 = "3 "0 "021C1 +\Umathcode "021C2 = "3 "0 "021C2 +\Umathcode "021C3 = "3 "0 "021C3 +\Umathcode "021C4 = "3 "0 "021C4 +\Umathcode "021C5 = "3 "0 "021C5 +\Umathcode "021C6 = "3 "0 "021C6 +\Umathcode "021C7 = "3 "0 "021C7 +\Umathcode "021C8 = "3 "0 "021C8 +\Umathcode "021C9 = "3 "0 "021C9 +\Umathcode "021CA = "3 "0 "021CA +\Umathcode "021CB = "3 "0 "021CB +\Umathcode "021CC = "3 "0 "021CC +\Umathcode "021CD = "3 "0 "021CD +\Umathcode "021CE = "3 "0 "021CE +\Umathcode "021CF = "3 "0 "021CF +\Umathcode "021D0 = "3 "0 "021D0 +\Umathcode "021D1 = "3 "0 "021D1 +\Umathcode "021D2 = "3 "0 "021D2 +\Umathcode "021D3 = "3 "0 "021D3 +\Umathcode "021D4 = "3 "0 "021D4 +\Umathcode "021D5 = "3 "0 "021D5 +\Umathcode "021D6 = "3 "0 "021D6 +\Umathcode "021D7 = "3 "0 "021D7 +\Umathcode "021D8 = "3 "0 "021D8 +\Umathcode "021D9 = "3 "0 "021D9 +\Umathcode "021DA = "3 "0 "021DA +\Umathcode "021DB = "3 "0 "021DB +\Umathcode "021DC = "3 "0 "021DC +\Umathcode "021DD = "3 "0 "021DD +\Umathcode "021DE = "3 "0 "021DE +\Umathcode "021DF = "3 "0 "021DF +\Umathcode "021E0 = "3 "0 "021E0 +\Umathcode "021E1 = "3 "0 "021E1 +\Umathcode "021E2 = "3 "0 "021E2 +\Umathcode "021E3 = "3 "0 "021E3 +\Umathcode "021E4 = "3 "0 "021E4 +\Umathcode "021E5 = "3 "0 "021E5 +\Umathcode "021E6 = "0 "0 "021E6 +\Umathcode "021E7 = "0 "0 "021E7 +\Umathcode "021E8 = "0 "0 "021E8 +\Umathcode "021E9 = "0 "0 "021E9 +\Umathcode "021EB = "0 "0 "021EB +\Umathcode "021F4 = "3 "0 "021F4 +\Umathcode "021F5 = "3 "0 "021F5 +\Umathcode "021F6 = "3 "0 "021F6 +\Umathcode "021F7 = "3 "0 "021F7 +\Umathcode "021F8 = "3 "0 "021F8 +\Umathcode "021F9 = "3 "0 "021F9 +\Umathcode "021FA = "3 "0 "021FA +\Umathcode "021FB = "3 "0 "021FB +\Umathcode "021FC = "3 "0 "021FC +\Umathcode "021FD = "3 "0 "021FD +\Umathcode "021FE = "3 "0 "021FE +\Umathcode "021FF = "3 "0 "021FF +\Umathcode "02200 = "0 "0 "02200 +\Umathcode "02201 = "0 "0 "02201 +\Umathcode "02202 = "0 "0 "02202 +\Umathcode "02203 = "0 "0 "02203 +\Umathcode "02204 = "0 "0 "02204 +\Umathcode "02205 = "0 "0 "02205 +\Umathcode "02208 = "3 "0 "02208 +\Umathcode "02209 = "3 "0 "02209 +\Umathcode "0220B = "3 "0 "0220B +\Umathcode "0220C = "3 "0 "0220C +\Umathcode "0220F = "1 "0 "0220F +\Umathcode "02210 = "1 "0 "02210 +\Umathcode "02211 = "1 "0 "02211 +\Umathcode "02212 = "2 "0 "02212 +\Umathcode "02213 = "2 "0 "02213 +\Umathcode "02214 = "2 "0 "02214 +\Umathcode "02216 = "2 "0 "02216 +\Umathcode "02217 = "2 "0 "02217 +\Umathcode "02218 = "2 "0 "02218 +\Umathcode "02219 = "2 "0 "02219 +\Umathcode "0221D = "3 "0 "0221D +\Umathcode "0221E = "0 "0 "0221E +\Umathcode "0221F = "0 "0 "0221F +\Umathcode "02220 = "0 "0 "02220 +\Umathcode "02221 = "0 "0 "02221 +\Umathcode "02222 = "0 "0 "02222 +\Umathcode "02223 = "2 "0 "02223 +\Umathcode "02224 = "2 "0 "02224 +\Umathcode "02225 = "3 "0 "02225 +\Umathcode "02226 = "3 "0 "02226 +\Umathcode "02227 = "2 "0 "02227 +\Umathcode "02228 = "2 "0 "02228 +\Umathcode "02229 = "2 "0 "02229 +\Umathcode "0222A = "2 "0 "0222A +\Umathcode "0222B = "1 "0 "0222B +\Umathcode "0222C = "1 "0 "0222C +\Umathcode "0222D = "1 "0 "0222D +\Umathcode "0222E = "1 "0 "0222E +\Umathcode "0222F = "1 "0 "0222F +\Umathcode "02230 = "1 "0 "02230 +\Umathcode "02231 = "1 "0 "02231 +\Umathcode "02232 = "1 "0 "02232 +\Umathcode "02233 = "1 "0 "02233 +\Umathcode "02234 = "3 "0 "02234 +\Umathcode "02235 = "3 "0 "02235 +\Umathcode "02236 = "6 "0 "02236 +\Umathcode "02237 = "3 "0 "02237 +\Umathcode "02238 = "2 "0 "02238 +\Umathcode "02239 = "3 "0 "02239 +\Umathcode "0223C = "3 "0 "0223C +\Umathcode "0223D = "3 "0 "0223D +\Umathcode "02240 = "2 "0 "02240 +\Umathcode "02241 = "3 "0 "02241 +\Umathcode "02242 = "3 "0 "02242 +\Umathcode "02243 = "3 "0 "02243 +\Umathcode "02244 = "3 "0 "02244 +\Umathcode "02245 = "3 "0 "02245 +\Umathcode "02246 = "3 "0 "02246 +\Umathcode "02247 = "3 "0 "02247 +\Umathcode "02248 = "3 "0 "02248 +\Umathcode "02249 = "3 "0 "02249 +\Umathcode "0224A = "3 "0 "0224A +\Umathcode "0224C = "3 "0 "0224C +\Umathcode "0224D = "3 "0 "0224D +\Umathcode "0224E = "3 "0 "0224E +\Umathcode "02250 = "3 "0 "02250 +\Umathcode "02251 = "3 "0 "02251 +\Umathcode "02252 = "3 "0 "02252 +\Umathcode "02253 = "3 "0 "02253 +\Umathcode "02254 = "3 "0 "02254 +\Umathcode "02255 = "3 "0 "02255 +\Umathcode "02256 = "3 "0 "02256 +\Umathcode "02257 = "3 "0 "02257 +\Umathcode "02259 = "3 "0 "02259 +\Umathcode "0225A = "3 "0 "0225A +\Umathcode "0225B = "3 "0 "0225B +\Umathcode "0225C = "3 "0 "0225C +\Umathcode "0225D = "3 "0 "0225D +\Umathcode "0225E = "3 "0 "0225E +\Umathcode "0225F = "3 "0 "0225F +\Umathcode "02260 = "3 "0 "02260 +\Umathcode "02261 = "3 "0 "02261 +\Umathcode "02262 = "3 "0 "02262 +\Umathcode "02263 = "3 "0 "02263 +\Umathcode "02264 = "3 "0 "02264 +\Umathcode "02265 = "3 "0 "02265 +\Umathcode "02266 = "3 "0 "02266 +\Umathcode "02267 = "3 "0 "02267 +\Umathcode "02268 = "3 "0 "02268 +\Umathcode "02269 = "3 "0 "02269 +\Umathcode "0226A = "3 "0 "0226A +\Umathcode "0226B = "3 "0 "0226B +\Umathcode "0226C = "3 "0 "0226C +\Umathcode "0226D = "3 "0 "0226D +\Umathcode "0226E = "3 "0 "0226E +\Umathcode "0226F = "3 "0 "0226F +\Umathcode "02270 = "3 "0 "02270 +\Umathcode "02271 = "3 "0 "02271 +\Umathcode "02272 = "3 "0 "02272 +\Umathcode "02273 = "3 "0 "02273 +\Umathcode "02274 = "3 "0 "02274 +\Umathcode "02275 = "3 "0 "02275 +\Umathcode "02276 = "3 "0 "02276 +\Umathcode "02277 = "3 "0 "02277 +\Umathcode "02278 = "3 "0 "02278 +\Umathcode "02279 = "3 "0 "02279 +\Umathcode "0227A = "3 "0 "0227A +\Umathcode "0227B = "3 "0 "0227B +\Umathcode "0227C = "3 "0 "0227C +\Umathcode "0227D = "3 "0 "0227D +\Umathcode "0227E = "3 "0 "0227E +\Umathcode "0227F = "3 "0 "0227F +\Umathcode "02280 = "3 "0 "02280 +\Umathcode "02281 = "3 "0 "02281 +\Umathcode "02282 = "3 "0 "02282 +\Umathcode "02283 = "3 "0 "02283 +\Umathcode "02284 = "3 "0 "02284 +\Umathcode "02285 = "3 "0 "02285 +\Umathcode "02286 = "3 "0 "02286 +\Umathcode "02287 = "3 "0 "02287 +\Umathcode "02288 = "3 "0 "02288 +\Umathcode "02289 = "3 "0 "02289 +\Umathcode "0228A = "3 "0 "0228A +\Umathcode "0228B = "3 "0 "0228B +\Umathcode "0228E = "2 "0 "0228E +\Umathcode "0228F = "3 "0 "0228F +\Umathcode "02290 = "3 "0 "02290 +\Umathcode "02291 = "2 "0 "02291 +\Umathcode "02292 = "2 "0 "02292 +\Umathcode "02293 = "2 "0 "02293 +\Umathcode "02294 = "2 "0 "02294 +\Umathcode "02295 = "2 "0 "02295 +\Umathcode "02296 = "2 "0 "02296 +\Umathcode "02297 = "2 "0 "02297 +\Umathcode "02298 = "2 "0 "02298 +\Umathcode "02299 = "2 "0 "02299 +\Umathcode "0229A = "2 "0 "0229A +\Umathcode "0229B = "2 "0 "0229B +\Umathcode "0229C = "2 "0 "0229C +\Umathcode "0229D = "2 "0 "0229D +\Umathcode "0229E = "2 "0 "0229E +\Umathcode "0229F = "2 "0 "0229F +\Umathcode "022A0 = "2 "0 "022A0 +\Umathcode "022A1 = "2 "0 "022A1 +\Umathcode "022A2 = "3 "0 "022A2 +\Umathcode "022A3 = "3 "0 "022A3 +\Umathcode "022A4 = "0 "0 "022A4 +\Umathcode "022A5 = "0 "0 "022A5 +\Umathcode "022A7 = "3 "0 "022A7 +\Umathcode "022A8 = "3 "0 "022A8 +\Umathcode "022A9 = "3 "0 "022A9 +\Umathcode "022AA = "3 "0 "022AA +\Umathcode "022AB = "3 "0 "022AB +\Umathcode "022AC = "3 "0 "022AC +\Umathcode "022AD = "3 "0 "022AD +\Umathcode "022AE = "3 "0 "022AE +\Umathcode "022AF = "3 "0 "022AF +\Umathcode "022B2 = "2 "0 "022B2 +\Umathcode "022B3 = "2 "0 "022B3 +\Umathcode "022B8 = "3 "0 "022B8 +\Umathcode "022BA = "2 "0 "022BA +\Umathcode "022BB = "2 "0 "022BB +\Umathcode "022BC = "2 "0 "022BC +\Umathcode "022C0 = "1 "0 "022C0 +\Umathcode "022C1 = "1 "0 "022C1 +\Umathcode "022C2 = "1 "0 "022C2 +\Umathcode "022C3 = "1 "0 "022C3 +\Umathcode "022C4 = "2 "0 "022C4 +\Umathcode "022C5 = "2 "0 "022C5 +\Umathcode "022C6 = "2 "0 "022C6 +\Umathcode "022C7 = "2 "0 "022C7 +\Umathcode "022C8 = "3 "0 "022C8 +\Umathcode "022C9 = "2 "0 "022C9 +\Umathcode "022CA = "2 "0 "022CA +\Umathcode "022CB = "2 "0 "022CB +\Umathcode "022CC = "2 "0 "022CC +\Umathcode "022CE = "2 "0 "022CE +\Umathcode "022CF = "2 "0 "022CF +\Umathcode "022D0 = "3 "0 "022D0 +\Umathcode "022D1 = "3 "0 "022D1 +\Umathcode "022D2 = "2 "0 "022D2 +\Umathcode "022D3 = "2 "0 "022D3 +\Umathcode "022D4 = "3 "0 "022D4 +\Umathcode "022D6 = "2 "0 "022D6 +\Umathcode "022D7 = "2 "0 "022D7 +\Umathcode "022D8 = "3 "0 "022D8 +\Umathcode "022D9 = "3 "0 "022D9 +\Umathcode "022DA = "3 "0 "022DA +\Umathcode "022DB = "3 "0 "022DB +\Umathcode "022DC = "3 "0 "022DC +\Umathcode "022DD = "3 "0 "022DD +\Umathcode "022DE = "3 "0 "022DE +\Umathcode "022DF = "3 "0 "022DF +\Umathcode "022E0 = "3 "0 "022E0 +\Umathcode "022E1 = "3 "0 "022E1 +\Umathcode "022E2 = "3 "0 "022E2 +\Umathcode "022E3 = "3 "0 "022E3 +\Umathcode "022E4 = "3 "0 "022E4 +\Umathcode "022E5 = "3 "0 "022E5 +\Umathcode "022E6 = "3 "0 "022E6 +\Umathcode "022E7 = "3 "0 "022E7 +\Umathcode "022E8 = "3 "0 "022E8 +\Umathcode "022E9 = "3 "0 "022E9 +\Umathcode "022EA = "3 "0 "022EA +\Umathcode "022EB = "3 "0 "022EB +\Umathcode "022EC = "3 "0 "022EC +\Umathcode "022ED = "3 "0 "022ED +\Umathcode "022EE = "0 "0 "022EE +\Umathcode "022EF = "0 "0 "022EF +\Umathcode "022F0 = "0 "0 "022F0 +\Umathcode "022F1 = "0 "0 "022F1 +\Umathcode "02300 = "0 "0 "02300 +\Umathcode "02308 = "4 "0 "02308 +\Umathcode "02309 = "5 "0 "02309 +\Umathcode "0230A = "4 "0 "0230A +\Umathcode "0230B = "5 "0 "0230B +\Umathcode "0231C = "4 "0 "0231C +\Umathcode "0231D = "5 "0 "0231D +\Umathcode "0231E = "4 "0 "0231E +\Umathcode "0231F = "5 "0 "0231F +\Umathcode "02322 = "3 "0 "02322 +\Umathcode "02323 = "3 "0 "02323 +\Umathcode "023B0 = "4 "0 "023B0 +\Umathcode "023B1 = "5 "0 "023B1 +\Umathcode "024C7 = "0 "0 "024C7 +\Umathcode "024C8 = "0 "0 "024C8 +\Umathcode "025A0 = "0 "0 "025A0 +\Umathcode "025A1 = "0 "0 "025A1 +\Umathcode "025A2 = "0 "0 "025A2 +\Umathcode "025B2 = "2 "0 "025B2 +\Umathcode "025B3 = "0 "0 "025B3 +\Umathcode "025B6 = "2 "0 "025B6 +\Umathcode "025B7 = "2 "0 "025B7 +\Umathcode "025BC = "2 "0 "025BC +\Umathcode "025BD = "2 "0 "025BD +\Umathcode "025C0 = "2 "0 "025C0 +\Umathcode "025C1 = "2 "0 "025C1 +\Umathcode "025CA = "0 "0 "025CA +\Umathcode "025EF = "2 "0 "025EF +\Umathcode "02605 = "0 "0 "02605 +\Umathcode "02660 = "0 "0 "02660 +\Umathcode "02661 = "0 "0 "02661 +\Umathcode "02662 = "0 "0 "02662 +\Umathcode "02663 = "0 "0 "02663 +\Umathcode "02666 = "0 "0 "02666 +\Umathcode "0266D = "0 "0 "0266D +\Umathcode "0266E = "0 "0 "0266E +\Umathcode "0266F = "0 "0 "0266F +\Umathcode "02713 = "0 "0 "02713 +\Umathcode "02720 = "0 "0 "02720 +\Umathcode "027E6 = "4 "0 "027E6 +\Umathcode "027E7 = "5 "0 "027E7 +\Umathcode "027E8 = "4 "0 "027E8 +\Umathcode "027E9 = "5 "0 "027E9 +\Umathcode "027EA = "4 "0 "027EA +\Umathcode "027EB = "5 "0 "027EB +\Umathcode "027EE = "4 "0 "027EE +\Umathcode "027EF = "5 "0 "027EF +\Umathcode "027F5 = "3 "0 "027F5 +\Umathcode "027F6 = "3 "0 "027F6 +\Umathcode "027F7 = "3 "0 "027F7 +\Umathcode "027F8 = "3 "0 "027F8 +\Umathcode "027F9 = "3 "0 "027F9 +\Umathcode "027FA = "3 "0 "027FA +\Umathcode "027FB = "3 "0 "027FB +\Umathcode "027FC = "3 "0 "027FC +\Umathcode "027FD = "3 "0 "027FD +\Umathcode "027FE = "3 "0 "027FE +\Umathcode "027FF = "3 "0 "027FF +\Umathcode "02906 = "3 "0 "02906 +\Umathcode "02907 = "3 "0 "02907 +\Umathcode "0290A = "3 "0 "0290A +\Umathcode "0290B = "3 "0 "0290B +\Umathcode "0290C = "3 "0 "0290C +\Umathcode "0290D = "3 "0 "0290D +\Umathcode "02911 = "3 "0 "02911 +\Umathcode "02916 = "3 "0 "02916 +\Umathcode "02917 = "3 "0 "02917 +\Umathcode "02921 = "3 "0 "02921 +\Umathcode "02922 = "3 "0 "02922 +\Umathcode "02923 = "3 "0 "02923 +\Umathcode "02924 = "3 "0 "02924 +\Umathcode "02925 = "3 "0 "02925 +\Umathcode "02926 = "3 "0 "02926 +\Umathcode "02A00 = "1 "0 "02A00 +\Umathcode "02A01 = "1 "0 "02A01 +\Umathcode "02A02 = "1 "0 "02A02 +\Umathcode "02A03 = "1 "0 "02A03 +\Umathcode "02A04 = "1 "0 "02A04 +\Umathcode "02A05 = "1 "0 "02A05 +\Umathcode "02A06 = "1 "0 "02A06 +\Umathcode "02A09 = "1 "0 "02A09 +\Umathcode "02A3F = "2 "0 "02A3F +\Umathcode "02A7D = "3 "0 "02A7D +\Umathcode "02A7E = "3 "0 "02A7E +\Umathcode "02A85 = "3 "0 "02A85 +\Umathcode "02A86 = "3 "0 "02A86 +\Umathcode "02A87 = "3 "0 "02A87 +\Umathcode "02A88 = "3 "0 "02A88 +\Umathcode "02A89 = "3 "0 "02A89 +\Umathcode "02A8A = "3 "0 "02A8A +\Umathcode "02A8B = "3 "0 "02A8B +\Umathcode "02A8C = "3 "0 "02A8C +\Umathcode "02A95 = "3 "0 "02A95 +\Umathcode "02A96 = "3 "0 "02A96 +\Umathcode "02AAF = "3 "0 "02AAF +\Umathcode "02AB0 = "3 "0 "02AB0 +\Umathcode "02AB1 = "3 "0 "02AB1 +\Umathcode "02AB2 = "3 "0 "02AB2 +\Umathcode "02AB3 = "3 "0 "02AB3 +\Umathcode "02AB4 = "3 "0 "02AB4 +\Umathcode "02AB5 = "3 "0 "02AB5 +\Umathcode "02AB6 = "3 "0 "02AB6 +\Umathcode "02AB7 = "3 "0 "02AB7 +\Umathcode "02AB8 = "3 "0 "02AB8 +\Umathcode "02AB9 = "3 "0 "02AB9 +\Umathcode "02ABA = "3 "0 "02ABA +\Umathcode "02AC5 = "3 "0 "02AC5 +\Umathcode "02AC6 = "3 "0 "02AC6 +\Umathcode "02ACB = "3 "0 "02ACB +\Umathcode "02ACC = "3 "0 "02ACC +\Umathcode "12035 = "0 "0 "12035 +\Umathcode "1D6A4 = "0 "0 "1D6A4 +\Umathcode "1D6A5 = "0 "0 "1D6A5 +\Umathcode "1D6FB = "0 "0 "1D6FB +\Umathcode "1D717 = "0 "0 "1D717 +\Umathcode "1D718 = "0 "0 "1D718 + +% gaps .. done in lua (as example) + +% \Umathcode "1D455 = "0 "0 "0210E +% \Umathcode "1D49D = "0 "0 "0212C +% \Umathcode "1D4A0 = "0 "0 "02130 +% \Umathcode "1D4A1 = "0 "0 "02131 +% \Umathcode "1D4A3 = "0 "0 "0210B +% \Umathcode "1D4A4 = "0 "0 "02110 +% \Umathcode "1D4A7 = "0 "0 "02112 +% \Umathcode "1D4A8 = "0 "0 "02133 +% \Umathcode "1D4AD = "0 "0 "0211B +% \Umathcode "1D4BA = "0 "0 "0212F +% \Umathcode "1D4BC = "0 "0 "0210A +% \Umathcode "1D4C4 = "0 "0 "02134 +% \Umathcode "1D506 = "0 "0 "0212D +% \Umathcode "1D50B = "0 "0 "0210C +% \Umathcode "1D50C = "0 "0 "02111 +% \Umathcode "1D515 = "0 "0 "0211C +% \Umathcode "1D51D = "0 "0 "02128 +% \Umathcode "1D53A = "0 "0 "02102 +% \Umathcode "1D53F = "0 "0 "0210D +% \Umathcode "1D545 = "0 "0 "02115 +% \Umathcode "1D547 = "0 "0 "02119 +% \Umathcode "1D548 = "0 "0 "0211A +% \Umathcode "1D549 = "0 "0 "0211D +% \Umathcode "1D551 = "0 "0 "02124 + +% initialization + +\the\everymathit + +% a couple of definitions (we could also use \mathchardef): + +\def\acute {\Umathaccent"0"0"0000B4 } +\def\acwopencirclearrow {\Umathchar "3"0"0021BA } +\def\aleph {\Umathchar "0"0"002135 } +\def\Alpha {\Umathchar "0"0"000391 } +\def\alpha {\Umathchar "0"0"0003B1 } +\def\amalg {\Umathchar "2"0"002A3F } +\def\angle {\Umathchar "0"0"002220 } +\def\Angstrom {\Umathchar "0"0"00212B } +\def\approx {\Umathchar "3"0"002248 } +\def\approxEq {\Umathchar "3"0"002245 } +\def\approxeq {\Umathchar "3"0"00224A } +\def\approxnEq {\Umathchar "3"0"002247 } +\def\arrowvert {\Umathchar "0"0"00007C } +\def\Arrowvert {\Umathchar "0"0"002016 } +\def\ast {\Umathchar "2"0"002217 } +\def\ast {\Umathchar "2"0"002217 } +\def\asymp {\Umathchar "3"0"00224D } +\def\backepsilon {\Umathchar "0"0"0003F6 } +\def\backprime {\Umathchar "0"0"012035 } +\def\backsim {\Umathchar "3"0"00223D } +\def\backslash {\Umathchar "0"0"00005C } +\def\bar {\Umathaccent"0"0"0000AF } +\def\barleftarrow {\Umathchar "3"0"0021E4 } +\def\barleftarrowrightarrowbar {\Umathchar "3"0"0021B9 } +\def\barovernorthwestarrow {\Umathchar "3"0"0021B8 } +\def\barwedge {\Umathchar "2"0"0022BC } +\def\because {\Umathchar "3"0"002235 } +\def\Beta {\Umathchar "0"0"000392 } +\def\beta {\Umathchar "0"0"0003B2 } +\def\beth {\Umathchar "0"0"002136 } +\def\between {\Umathchar "3"0"00226C } +\def\bigcap {\Umathchar "1"0"0022C2 } +\def\bigcirc {\Umathchar "2"0"0025EF } +\def\bigcircle {\Umathchar "2"0"0020DD } +\def\bigcircle {\Umathchar "2"0"0020DD } +\def\bigcup {\Umathchar "1"0"0022C3 } +\def\bigdiamond {\Umathchar "0"0"0020DF } +\def\bigodot {\Umathchar "1"0"002A00 } +\def\bigoplus {\Umathchar "1"0"002A01 } +\def\bigotimes {\Umathchar "1"0"002A02 } +\def\bigsqcap {\Umathchar "1"0"002A05 } +\def\bigsqcup {\Umathchar "1"0"002A06 } +\def\bigsquare {\Umathchar "0"0"0020DE } +\def\bigstar {\Umathchar "0"0"002605 } +\def\bigtimes {\Umathchar "1"0"002A09 } +\def\bigtriangledown {\Umathchar "2"0"0025BD } +\def\bigtriangleup {\Umathchar "2"0"0025B3 } +\def\bigudot {\Umathchar "1"0"002A03 } +\def\biguplus {\Umathchar "1"0"002A04 } +\def\bigvee {\Umathchar "1"0"0022C1 } +\def\bigwedge {\Umathchar "1"0"0022C0 } +\def\blacklozenge {\Umathchar "0"0"002666 } +\def\blacksquare {\Umathchar "0"0"0025A0 } +\def\blacktriangle {\Umathchar "2"0"0025B2 } +\def\blacktriangledown {\Umathchar "2"0"0025BC } +\def\blacktriangleleft {\Umathchar "2"0"0025C0 } +\def\blacktriangleright {\Umathchar "2"0"0025B6 } +\def\bot {\Umathchar "0"0"0022A5 } +\def\bowtie {\Umathchar "3"0"0022C8 } +\def\Box {\Umathchar "0"0"0025A1 } +\def\boxdot {\Umathchar "2"0"0022A1 } +\def\boxminus {\Umathchar "2"0"00229F } +\def\boxplus {\Umathchar "2"0"00229E } +\def\boxtimes {\Umathchar "2"0"0022A0 } +%def\braceld {\Umathchar "0"0"000000 } +%def\bracerd {\Umathchar "0"0"000000 } +%def\bracelu {\Umathchar "0"0"000000 } +%def\braceru {\Umathchar "0"0"000000 } +\def\breve {\Umathaccent"0"0"0002D8 } +\def\bullet {\Umathchar "2"0"002022 } +\def\bullet {\Umathchar "2"0"002022 } +\def\Bumpeq {\Umathchar "3"0"00224E } +\def\cap {\Umathchar "2"0"002229 } +\def\Cap {\Umathchar "2"0"0022D2 } +\def\carriagereturn {\Umathchar "0"0"0021B5 } +\def\cdot {\Umathchar "2"0"0022C5 } +\def\cdotp {\Umathchar "6"0"0022C5 } +\def\cdots {\Umathchar "0"0"0022EF } +\def\centerdot {\Umathchar "2"0"0000B7 } +\def\check {\Umathaccent"0"0"0002C7 } +\def\checkmark {\Umathchar "0"0"002713 } +\def\Chi {\Umathchar "0"0"0003A7 } +\def\chi {\Umathchar "0"0"0003C7 } +\def\circ {\Umathchar "2"0"002218 } +\def\circeq {\Umathchar "3"0"002257 } +\def\circlearrowleft {\Umathchar "3"0"0021BB } +\def\circlearrowright {\Umathchar "3"0"0021BA } +\def\circledast {\Umathchar "2"0"00229B } +\def\circledcirc {\Umathchar "2"0"00229A } +\def\circleddash {\Umathchar "2"0"00229D } +\def\circledequals {\Umathchar "2"0"00229C } +\def\circledR {\Umathchar "0"0"0024C7 } +\def\circledS {\Umathchar "0"0"0024C8 } +\def\circleonrightarrow {\Umathchar "3"0"0021F4 } +\def\clubsuit {\Umathchar "0"0"002663 } +\def\colon {\Umathchar "6"0"002236 } +\def\colonequals {\Umathchar "3"0"002254 } +\def\complement {\Umathchar "0"0"002201 } +\def\complexes {\Umathchar "0"0"002102 } +\def\cong {\Umathchar "3"0"002245 } +\def\coprod {\Umathchar "1"0"002210 } +\def\cup {\Umathchar "2"0"00222A } +\def\Cup {\Umathchar "2"0"0022D3 } +\def\curlyeqprec {\Umathchar "3"0"0022DE } +\def\curlyeqsucc {\Umathchar "3"0"0022DF } +\def\curlyvee {\Umathchar "2"0"0022CE } +\def\curlywedge {\Umathchar "2"0"0022CF } +\def\curvearrowleft {\Umathchar "3"0"0021B6 } +\def\curvearrowright {\Umathchar "3"0"0021B7 } +\def\cwopencirclearrow {\Umathchar "3"0"0021BB } +\def\dag {\Umathchar "0"0"002020 } +\def\dagger {\Umathchar "2"0"002020 } +\def\daleth {\Umathchar "0"0"002138 } +\def\dasharrow {\Umathchar "3"0"0021E2 } +\def\dashedleftarrow {\Umathchar "3"0"00290C } +\def\dashedrightarrow {\Umathchar "3"0"00290D } +\def\dashv {\Umathchar "3"0"0022A3 } +\def\ddag {\Umathchar "0"0"002021 } +\def\ddagger {\Umathchar "2"0"002021 } +\def\dddot {\Umathaccent"0"0"0020DB } +\def\ddot {\Umathaccent"0"0"0000A8 } +\def\ddots {\Umathchar "0"0"0022F1 } +\def\Ddownarrow {\Umathchar "3"0"00290B } +\def\definedeq {\Umathchar "3"0"00225D } +\def\Delta {\Umathchar "0"0"000394 } +\def\delta {\Umathchar "0"0"0003B4 } +\def\diamond {\Umathchar "2"0"0022C4 } +\def\diamondsuit {\Umathchar "0"0"002662 } +\def\differentialD {\Umathchar "0"0"002145 } +\def\differentiald {\Umathchar "0"0"002146 } +\def\digamma {\Umathchar "0"0"0003DC } +\def\div {\Umathchar "2"0"0000F7 } +\def\divideontimes {\Umathchar "2"0"0022C7 } +\def\divides {\Umathchar "2"0"002223 } +\def\dot {\Umathaccent"0"0"0002D9 } +\def\doteq {\Umathchar "3"0"002250 } +\def\Doteq {\Umathchar "3"0"002251 } +\def\doteqdot {\Umathchar "3"0"002251 } +\def\dotminus {\Umathchar "2"0"002238 } +\def\dotplus {\Umathchar "2"0"002214 } +\def\dots {\Umathchar "0"0"002026 } +\def\dottedrightarrow {\Umathchar "3"0"002911 } +\def\doublecap {\Umathchar "2"0"0022D2 } +\def\doublecup {\Umathchar "2"0"0022D3 } +\def\doubleprime {\Umathchar "0"0"002033 } +\def\downarrow {\Umathchar "3"0"002193 } +\def\Downarrow {\Umathchar "3"0"0021D3 } +\def\downdasharrow {\Umathchar "3"0"0021E3 } +\def\downdownarrows {\Umathchar "3"0"0021CA } +\def\downharpoonleft {\Umathchar "3"0"0021C3 } +\def\downharpoonright {\Umathchar "3"0"0021C2 } +\def\downuparrows {\Umathchar "3"0"0021F5 } +\def\downwhitearrow {\Umathchar "0"0"0021E9 } +\def\downzigzagarrow {\Umathchar "3"0"0021AF } +\def\ell {\Umathchar "0"0"002113 } +\def\emptyset {\Umathchar "0"0"002205 } +\def\Epsilon {\Umathchar "0"0"000395 } +\def\epsilon {\Umathchar "0"0"0003F5 } +\def\eq {\Umathchar "3"0"00003D } +\def\eqcirc {\Umathchar "3"0"002256 } +\def\eqgtr {\Umathchar "3"0"0022DD } +\def\eqless {\Umathchar "3"0"0022DC } +\def\eqsim {\Umathchar "3"0"002242 } +\def\eqslantgtr {\Umathchar "3"0"002A96 } +\def\eqslantless {\Umathchar "3"0"002A95 } +\def\equalscolon {\Umathchar "3"0"002255 } +\def\equiv {\Umathchar "3"0"002261 } +\def\Eta {\Umathchar "0"0"000397 } +\def\eta {\Umathchar "0"0"0003B7 } +\def\eth {\Umathchar "0"0"0000F0 } +\def\Eulerconst {\Umathchar "0"0"002107 } +\def\exists {\Umathchar "0"0"002203 } +\def\exponentiale {\Umathchar "0"0"002147 } +\def\fallingdotseq {\Umathchar "3"0"002252 } +\def\Finv {\Umathchar "0"0"002132 } +\def\flat {\Umathchar "0"0"00266D } +\def\forall {\Umathchar "0"0"002200 } +\def\frown {\Umathchar "3"0"002322 } +\def\Game {\Umathchar "0"0"002141 } +\def\Gamma {\Umathchar "0"0"000393 } +\def\gamma {\Umathchar "0"0"0003B3 } +\def\ge {\Umathchar "3"0"002265 } +\def\geq {\Umathchar "3"0"002265 } +\def\geqq {\Umathchar "3"0"002267 } +\def\geqslant {\Umathchar "3"0"002A7E } +\def\gets {\Umathchar "3"0"002190 } +\def\gg {\Umathchar "3"0"00226B } +\def\ggg {\Umathchar "3"0"0022D9 } +\def\gggtr {\Umathchar "3"0"0022D9 } +\def\gimel {\Umathchar "0"0"002137 } +\def\gnapprox {\Umathchar "3"0"002A8A } +\def\gneqq {\Umathchar "3"0"002269 } +\def\gnsim {\Umathchar "3"0"0022E7 } +\def\grave {\Umathaccent"0"0"000060 } +\def\gt {\Umathchar "3"0"00003E } +\def\gtrapprox {\Umathchar "3"0"002A86 } +\def\gtrdot {\Umathchar "2"0"0022D7 } +\def\gtreqless {\Umathchar "3"0"0022DB } +\def\gtreqqless {\Umathchar "3"0"002A8C } +\def\gtrless {\Umathchar "3"0"002277 } +\def\gtrsim {\Umathchar "3"0"002273 } +\def\hat {\Umathaccent"0"0"0002C6 } +\def\hbar {\Umathchar "0"0"00210F } +\def\heartsuit {\Umathchar "0"0"002661 } +\def\hookleftarrow {\Umathchar "3"0"0021A9 } +\def\hookrightarrow {\Umathchar "3"0"0021AA } +\def\hslash {\Umathchar "0"0"00210F } +\def\iiint {\Umathchar "1"0"00222D } +\def\iiintop {\Umathchar "0"0"00222D } +\def\iint {\Umathchar "1"0"00222C } +\def\iintop {\Umathchar "0"0"00222C } +\def\Im {\Umathchar "0"0"002111 } +\def\imaginaryi {\Umathchar "0"0"002148 } +\def\imaginaryj {\Umathchar "0"0"002149 } +\def\imath {\Umathchar "0"0"01D6A4 } +\def\imply {\Umathchar "3"0"0021D2 } +\def\in {\Umathchar "0"0"002208 } +\def\infty {\Umathchar "0"0"00221E } +\def\int {\Umathchar "1"0"00222B } +\def\intclockwise {\Umathchar "1"0"002231 } +\def\integers {\Umathchar "0"0"002124 } +\def\intercal {\Umathchar "2"0"0022BA } +\def\intop {\Umathchar "0"0"00222B } +\def\Iota {\Umathchar "0"0"000399 } +\def\iota {\Umathchar "0"0"0003B9 } +\def\jmath {\Umathchar "0"0"01D6A5 } +\def\Join {\Umathchar "3"0"0022C8 } +\def\Kappa {\Umathchar "0"0"00039A } +\def\kappa {\Umathchar "0"0"0003BA } +\def\Lambda {\Umathchar "0"0"00039B } +\def\lambda {\Umathchar "0"0"0003BB } +\def\land {\Umathchar "2"0"002227 } +\def\langle {\Udelimiter "4"0"0027E8 } +\def\lbrace {\Udelimiter "4"0"00007B } +\def\lbrack {\Udelimiter "4"0"00005B } +\def\lceil {\Udelimiter "4"0"002308 } +\def\lceiling {\Udelimiter "4"0"002308 } +\def\ldotp {\Umathchar "6"0"00002E } +\def\ldots {\Umathchar "0"0"002026 } +\def\Ldsh {\Umathchar "3"0"0021B2 } +\def\le {\Umathchar "3"0"002264 } +\def\leadsto {\Umathchar "3"0"0021DD } +\def\leftarrow {\Umathchar "3"0"002190 } +\def\Leftarrow {\Umathchar "3"0"0021D0 } +\def\leftarrowtail {\Umathchar "3"0"0021A2 } +\def\leftarrowtriangle {\Umathchar "3"0"0021FD } +\def\leftdasharrow {\Umathchar "3"0"0021E0 } +\def\leftharpoondown {\Umathchar "3"0"0021BD } +\def\leftharpoonup {\Umathchar "3"0"0021BC } +\def\leftleftarrows {\Umathchar "3"0"0021C7 } +\def\leftrightarrow {\Umathchar "3"0"002194 } +\def\Leftrightarrow {\Umathchar "3"0"0021D4 } +\def\leftrightarrows {\Umathchar "3"0"0021C6 } +\def\leftrightarrowtriangle {\Umathchar "3"0"0021FF } +\def\leftrightharpoons {\Umathchar "3"0"0021CB } +\def\leftrightsquigarrow {\Umathchar "3"0"0021AD } +\def\leftsquigarrow {\Umathchar "3"0"0021DC } +\def\leftthreetimes {\Umathchar "2"0"0022CB } +\def\leftwavearrow {\Umathchar "3"0"00219C } +\def\leftwhitearrow {\Umathchar "0"0"0021E6 } +\def\leq {\Umathchar "3"0"002264 } +\def\leqq {\Umathchar "3"0"002266 } +\def\leqslant {\Umathchar "3"0"002A7D } +\def\lessapprox {\Umathchar "3"0"002A85 } +\def\lessdot {\Umathchar "2"0"0022D6 } +\def\lesseqgtr {\Umathchar "3"0"0022DA } +\def\lesseqqgtr {\Umathchar "3"0"002A8B } +\def\lessgtr {\Umathchar "3"0"002276 } +\def\lesssim {\Umathchar "3"0"002272 } +\def\lfloor {\Udelimiter "4"0"00230A } +\def\lgroup {\Udelimiter "4"0"0027EE } +\def\lhook {\Umathchar "3"0"0FE322 } +\def\lhooknwarrow {\Umathchar "3"0"002923 } +\def\lhooksearrow {\Umathchar "3"0"002925 } +\def\linefeed {\Umathchar "0"0"0021B4 } +\def\ll {\Umathchar "3"0"00226A } +\def\llangle {\Udelimiter "4"0"0027EA } +\def\llbracket {\Udelimiter "4"0"0027E6 } +\def\llcorner {\Udelimiter "4"0"00231E } +\def\Lleftarrow {\Umathchar "3"0"0021DA } +\def\lll {\Umathchar "3"0"0022D8 } +\def\llless {\Umathchar "3"0"0022D8 } +\def\lmoustache {\Udelimiter "4"0"0023B0 } +\def\lnapprox {\Umathchar "3"0"002A89 } +\def\lneq {\Umathchar "3"0"002A87 } +\def\lneqq {\Umathchar "3"0"002268 } +\def\lnot {\Umathchar "0"0"0000AC } +\def\lnsim {\Umathchar "3"0"0022E6 } +\def\longleftarrow {\Umathchar "3"0"0027F5 } +\def\Longleftarrow {\Umathchar "3"0"0027F8 } +\def\longleftrightarrow {\Umathchar "3"0"0027F7 } +\def\Longleftrightarrow {\Umathchar "3"0"0027FA } +\def\longmapsfrom {\Umathchar "3"0"0027FB } +\def\Longmapsfrom {\Umathchar "3"0"0027FD } +\def\longmapsto {\Umathchar "3"0"0027FC } +\def\Longmapsto {\Umathchar "3"0"0027FE } +\def\longrightarrow {\Umathchar "3"0"0027F6 } +\def\Longrightarrow {\Umathchar "3"0"0027F9 } +\def\longrightsquigarrow {\Umathchar "3"0"0027FF } +\def\looparrowleft {\Umathchar "3"0"0021AB } +\def\looparrowright {\Umathchar "3"0"0021AC } +\def\lor {\Umathchar "2"0"002228 } +\def\lozenge {\Umathchar "0"0"0025CA } +\def\lparent {\Udelimiter "4"0"000028 } +\def\lrcorner {\Udelimiter "5"0"00231F } +\def\Lsh {\Umathchar "3"0"0021B0 } +\def\lt {\Umathchar "3"0"00003C } +\def\ltimes {\Umathchar "2"0"0022C9 } +\def\lvert {\Udelimiter "4"0"00007C } +\def\lVert {\Udelimiter "4"0"002016 } +\def\maltese {\Umathchar "0"0"002720 } +\def\mapsdown {\Umathchar "3"0"0021A7 } +\def\mapsfrom {\Umathchar "3"0"0021A4 } +\def\Mapsfrom {\Umathchar "3"0"002906 } +\def\mapsfromchar {\Umathchar "3"0"0FE324 } +\def\mapsto {\Umathchar "3"0"0021A6 } +\def\Mapsto {\Umathchar "3"0"002907 } +\def\mapstochar {\Umathchar "3"0"0FE321 } +\def\mapsup {\Umathchar "3"0"0021A5 } +\def\mathring {\Umathaccent"0"0"0002DA } +\def\measuredangle {\Umathchar "0"0"002221 } +\def\measuredeq {\Umathchar "3"0"00225E } +\def\mho {\Umathchar "0"0"002127 } +\def\mid {\Umathchar "3"0"00007C } +\def\minus {\Umathchar "2"0"002212 } +\def\minuscolon {\Umathchar "2"0"002239 } +\def\models {\Umathchar "3"0"0022A7 } +\def\mp {\Umathchar "2"0"002213 } +\def\Mu {\Umathchar "0"0"00039C } +\def\mu {\Umathchar "0"0"0003BC } +\def\multimap {\Umathchar "3"0"0022B8 } +\def\napprox {\Umathchar "3"0"002249 } +\def\napproxEq {\Umathchar "3"0"002246 } +\def\nasymp {\Umathchar "3"0"00226D } +\def\natural {\Umathchar "0"0"00266E } +\def\naturalnumbers {\Umathchar "0"0"002115 } +\def\ncong {\Umathchar "3"0"002246 } +\def\ndivides {\Umathchar "2"0"002224 } +\def\ne {\Umathchar "3"0"002260 } +\def\nearrow {\Umathchar "3"0"002197 } +\def\Nearrow {\Umathchar "3"0"0021D7 } +\def\neg {\Umathchar "0"0"0000AC } +\def\negativesign {\Umathchar "2"0"00207B } +\def\neq {\Umathchar "3"0"002260 } +\def\nequiv {\Umathchar "3"0"002262 } +\def\neswarrow {\Umathchar "3"0"002922 } +\def\nexists {\Umathchar "0"0"002204 } +\def\ngeq {\Umathchar "3"0"002271 } +\def\ngtr {\Umathchar "3"0"00226F } +\def\ngtrless {\Umathchar "3"0"002279 } +\def\ngtrsim {\Umathchar "3"0"002275 } +\def\nHdownarrow {\Umathchar "3"0"0021DF } +\def\nHuparrow {\Umathchar "3"0"0021DE } +\def\ni {\Umathchar "3"0"00220B } +\def\nin {\Umathchar "3"0"002209 } +\def\nleftarrow {\Umathchar "3"0"00219A } +\def\nLeftarrow {\Umathchar "3"0"0021CD } +\def\nleftrightarrow {\Umathchar "3"0"0021AE } +\def\nLeftrightarrow {\Umathchar "3"0"0021CE } +\def\nleq {\Umathchar "3"0"002270 } +\def\nless {\Umathchar "3"0"00226E } +\def\nlessgtr {\Umathchar "3"0"002278 } +\def\nlesssim {\Umathchar "3"0"002274 } +\def\nmid {\Umathchar "3"0"002224 } +\def\nni {\Umathchar "3"0"00220C } +\def\not {\Umathchar "3"0"000338 } +\def\notin {\Umathchar "3"0"002209 } +\def\nowns {\Umathchar "3"0"00220C } +\def\nparallel {\Umathchar "3"0"002226 } +\def\nprec {\Umathchar "3"0"002280 } +\def\npreccurlyeq {\Umathchar "3"0"0022E0 } +\def\nrightarrow {\Umathchar "3"0"00219B } +\def\nRightarrow {\Umathchar "3"0"0021CF } +\def\nsim {\Umathchar "3"0"002241 } +\def\nsimeq {\Umathchar "3"0"002244 } +\def\nsqsubseteq {\Umathchar "3"0"0022E2 } +\def\nsqsupseteq {\Umathchar "3"0"0022E3 } +\def\nsubset {\Umathchar "3"0"002284 } +\def\nsubseteq {\Umathchar "3"0"002288 } +\def\nsucc {\Umathchar "3"0"002281 } +\def\nsucccurlyeq {\Umathchar "3"0"0022E1 } +\def\nsupset {\Umathchar "3"0"002285 } +\def\nsupseteq {\Umathchar "3"0"002289 } +\def\ntriangleleft {\Umathchar "3"0"0022EB } +\def\ntrianglelefteq {\Umathchar "3"0"0022EC } +\def\ntriangleright {\Umathchar "3"0"0022EA } +\def\ntrianglerighteq {\Umathchar "3"0"0022ED } +\def\Nu {\Umathchar "0"0"00039D } +\def\nu {\Umathchar "0"0"0003BD } +\def\nvdash {\Umathchar "3"0"0022AC } +\def\nvDash {\Umathchar "3"0"0022AD } +\def\nVdash {\Umathchar "3"0"0022AE } +\def\nVDash {\Umathchar "3"0"0022AF } +\def\nvleftarrow {\Umathchar "3"0"0021F7 } +\def\nVleftarrow {\Umathchar "3"0"0021FA } +\def\nvleftrightarrow {\Umathchar "3"0"0021F9 } +\def\nVleftrightarrow {\Umathchar "3"0"0021FC } +\def\nvrightarrow {\Umathchar "3"0"0021F8 } +\def\nVrightarrow {\Umathchar "3"0"0021FB } +\def\nwarrow {\Umathchar "3"0"002196 } +\def\Nwarrow {\Umathchar "3"0"0021D6 } +\def\nwsearrow {\Umathchar "3"0"002921 } +\def\odot {\Umathchar "2"0"002299 } +\def\ohm {\Umathchar "0"0"002126 } +\def\oiiint {\Umathchar "1"0"002230 } +\def\oiint {\Umathchar "1"0"00222F } +\def\oint {\Umathchar "1"0"00222E } +\def\ointclockwise {\Umathchar "1"0"002232 } +\def\ointctrclockwise {\Umathchar "1"0"002233 } +\def\Omega {\Umathchar "0"0"0003A9 } +\def\omega {\Umathchar "0"0"0003C9 } +\def\Omicron {\Umathchar "0"0"00039F } +\def\omicron {\Umathchar "0"0"0003BF } +\def\ominus {\Umathchar "2"0"002296 } +\def\oplus {\Umathchar "2"0"002295 } +\def\oslash {\Umathchar "2"0"002298 } +\def\otimes {\Umathchar "2"0"002297 } +\def\overbar {\Umathaccent"0"0"00203E } +\def\overbrace {\Umathaccent"0"0"0023DE } +\def\overbracket {\Umathaccent"0"0"0023B4 } +\def\overparent {\Umathaccent"0"0"0023DC } +\def\owns {\Umathchar "3"0"00220B } +\def\P {\Umathchar "0"0"0000B6 } +\def\parallel {\Umathchar "3"0"002225 } +\def\partial {\Umathchar "0"0"002202 } +\def\perp {\Umathchar "3"0"0022A5 } +\def\Phi {\Umathchar "0"0"0003A6 } +\def\phi {\Umathchar "0"0"0003D5 } +\def\Pi {\Umathchar "0"0"0003A0 } +\def\pi {\Umathchar "0"0"0003C0 } +\def\pitchfork {\Umathchar "3"0"0022D4 } +\def\Plankconst {\Umathchar "0"0"00210E } +\def\pm {\Umathchar "2"0"0000B1 } +\def\positivesign {\Umathchar "2"0"00207A } +\def\prec {\Umathchar "3"0"00227A } +\def\precapprox {\Umathchar "3"0"002AB7 } +\def\preccurlyeq {\Umathchar "3"0"00227C } +\def\preceq {\Umathchar "3"0"002AAF } +\def\preceqq {\Umathchar "3"0"002AB3 } +\def\precnapprox {\Umathchar "3"0"002AB9 } +\def\precneq {\Umathchar "3"0"002AB1 } +\def\precneqq {\Umathchar "3"0"002AB5 } +\def\precnsim {\Umathchar "3"0"0022E8 } +\def\precsim {\Umathchar "3"0"00227E } +\def\prime {\Umathchar "0"0"002032 } +\def\primes {\Umathchar "0"0"002119 } +\def\prod {\Umathchar "1"0"00220F } +\def\PropertyLine {\Umathchar "0"0"00214A } +\def\propto {\Umathchar "3"0"00221D } +\def\Psi {\Umathchar "0"0"0003A8 } +\def\psi {\Umathchar "0"0"0003C8 } +\def\questionedeq {\Umathchar "3"0"00225F } +\def\rangle {\Udelimiter "5"0"0027E9 } +\def\rationals {\Umathchar "0"0"00211A } +\def\rbrace {\Udelimiter "5"0"00007D } +\def\rbrack {\Udelimiter "5"0"00005D } +\def\rceil {\Udelimiter "5"0"002309 } +\def\rceiling {\Udelimiter "5"0"002309 } +\def\Rdsh {\Umathchar "3"0"0021B3 } +\def\Re {\Umathchar "0"0"00211C } +\def\reals {\Umathchar "0"0"00211D } +\def\Relbar {\Umathchar "3"0"00003D } +\def\relbar {\Umathchar "3"0"002212 } +\def\restriction {\Umathchar "3"0"0021BE } +\def\rfloor {\Udelimiter "5"0"00230B } +\def\rgroup {\Udelimiter "5"0"0027EF } +\def\Rho {\Umathchar "0"0"0003A1 } +\def\rho {\Umathchar "0"0"0003C1 } +\def\rhook {\Umathchar "3"0"0FE323 } +\def\rhooknearrow {\Umathchar "3"0"002924 } +\def\rhookswarrow {\Umathchar "3"0"002926 } +\def\rightangle {\Umathchar "0"0"00221F } +\def\rightarrow {\Umathchar "3"0"002192 } +\def\Rightarrow {\Umathchar "3"0"0021D2 } +\def\rightarrowbar {\Umathchar "3"0"0021E5 } +\def\rightarrowtail {\Umathchar "3"0"0021A3 } +\def\rightarrowtriangle {\Umathchar "3"0"0021FE } +\def\rightdasharrow {\Umathchar "3"0"0021E2 } +\def\rightharpoondown {\Umathchar "3"0"0021C1 } +\def\rightharpoonup {\Umathchar "3"0"0021C0 } +\def\rightleftarrows {\Umathchar "3"0"0021C4 } +\def\rightleftharpoons {\Umathchar "3"0"0021CC } +\def\rightrightarrows {\Umathchar "3"0"0021C9 } +\def\rightsquigarrow {\Umathchar "3"0"0021DD } +\def\rightthreearrows {\Umathchar "3"0"0021F6 } +\def\rightthreetimes {\Umathchar "2"0"0022CC } +\def\rightwavearrow {\Umathchar "3"0"00219D } +\def\rightwhitearrow {\Umathchar "0"0"0021E8 } +\def\risingdotseq {\Umathchar "3"0"002253 } +\def\rmoustache {\Udelimiter "5"0"0023B1 } +\def\rneq {\Umathchar "3"0"002A88 } +\def\rparent {\Udelimiter "5"0"000029 } +\def\rrangle {\Udelimiter "5"0"0027EB } +\def\rrbracket {\Udelimiter "5"0"0027E7 } +\def\Rrightarrow {\Umathchar "3"0"0021DB } +\def\Rsh {\Umathchar "3"0"0021B1 } +\def\rtimes {\Umathchar "2"0"0022CA } +\def\rvert {\Udelimiter "5"0"00007C } +\def\rVert {\Udelimiter "5"0"002016 } +\def\S {\Umathchar "0"0"0000A7 } +\def\searrow {\Umathchar "3"0"002198 } +\def\Searrow {\Umathchar "3"0"0021D8 } +\def\setminus {\Umathchar "2"0"002216 } +\def\sharp {\Umathchar "0"0"00266F } +\def\Sigma {\Umathchar "0"0"0003A3 } +\def\sigma {\Umathchar "0"0"0003C3 } +\def\sim {\Umathchar "3"0"00223C } +\def\simeq {\Umathchar "3"0"002243 } +\def\slash {\Umathchar "0"0"002044 } +\def\smile {\Umathchar "3"0"002323 } +\def\solidus {\Udelimiter "5"0"002044 } +\def\spadesuit {\Umathchar "0"0"002660 } +\def\sphericalangle {\Umathchar "0"0"002222 } +\def\sqcap {\Umathchar "2"0"002293 } +\def\sqcup {\Umathchar "2"0"002294 } +\def\sqsubset {\Umathchar "3"0"00228F } +\def\sqsubseteq {\Umathchar "2"0"002291 } +\def\sqsubsetneq {\Umathchar "3"0"0022E4 } +\def\sqsupset {\Umathchar "3"0"002290 } +\def\sqsupseteq {\Umathchar "2"0"002292 } +\def\sqsupsetneq {\Umathchar "3"0"0022E5 } +\def\square {\Umathchar "0"0"0025A1 } +\def\squaredots {\Umathchar "3"0"002237 } +\def\star {\Umathchar "2"0"0022C6 } +\def\stareq {\Umathchar "3"0"00225B } +\def\subset {\Umathchar "3"0"002282 } +\def\Subset {\Umathchar "3"0"0022D0 } +\def\subseteq {\Umathchar "3"0"002286 } +\def\subseteqq {\Umathchar "3"0"002AC5 } +\def\subsetneq {\Umathchar "3"0"00228A } +\def\subsetneqq {\Umathchar "3"0"002ACB } +\def\succ {\Umathchar "3"0"00227B } +\def\succapprox {\Umathchar "3"0"002AB8 } +\def\succcurlyeq {\Umathchar "3"0"00227D } +\def\succeq {\Umathchar "3"0"002AB0 } +\def\succeqq {\Umathchar "3"0"002AB4 } +\def\succnapprox {\Umathchar "3"0"002ABA } +\def\succneq {\Umathchar "3"0"002AB2 } +\def\succneqq {\Umathchar "3"0"002AB6 } +\def\succnsim {\Umathchar "3"0"0022E9 } +\def\succsim {\Umathchar "3"0"00227F } +\def\sum {\Umathchar "1"0"002211 } +\def\supset {\Umathchar "3"0"002283 } +\def\Supset {\Umathchar "3"0"0022D1 } +\def\supseteq {\Umathchar "3"0"002287 } +\def\supseteqq {\Umathchar "3"0"002AC6 } +\def\supsetneq {\Umathchar "3"0"00228B } +\def\supsetneqq {\Umathchar "3"0"002ACC } +\def\surd {\Umathchar "2"0"00221A } +\def\swarrow {\Umathchar "3"0"002199 } +\def\Swarrow {\Umathchar "3"0"0021D9 } +\def\Tau {\Umathchar "0"0"0003A4 } +\def\tau {\Umathchar "0"0"0003C4 } +\def\therefore {\Umathchar "3"0"002234 } +\def\Theta {\Umathchar "0"0"000398 } +\def\theta {\Umathchar "0"0"0003B8 } +\def\tilde {\Umathaccent"0"0"0002DC } +\def\times {\Umathchar "2"0"0000D7 } +\def\to {\Umathchar "3"0"002192 } +\def\top {\Umathchar "0"0"0022A4 } +\def\triangle {\Umathchar "0"0"0025B3 } +\def\triangledown {\Umathchar "2"0"0025BD } +\def\triangleleft {\Umathchar "2"0"0025C1 } +\def\triangleq {\Umathchar "3"0"00225C } +\def\triangleright {\Umathchar "2"0"0025B7 } +\def\tripleprime {\Umathchar "0"0"002034 } +\def\turnediota {\Umathchar "0"0"002129 } +\def\twoheaddownarrow {\Umathchar "3"0"0021A1 } +\def\twoheadleftarrow {\Umathchar "3"0"00219E } +\def\twoheadrightarrow {\Umathchar "3"0"0021A0 } +\def\twoheadrightarrowtail {\Umathchar "3"0"002916 } +\def\twoheaduparrow {\Umathchar "3"0"00219F } +\def\udots {\Umathchar "0"0"0022F0 } +\def\ulcorner {\Udelimiter "4"0"00231C } +\def\underbar {\Umathaccent bottom "0"0"00203E } +\def\underbrace {\Umathaccent bottom "0"0"0023DF } +\def\underbracket {\Umathaccent bottom "0"0"0023B5 } +\def\underparent {\Umathaccent bottom "0"0"0023DD } +\def\upand {\Umathchar "2"0"00214B } +\def\uparrow {\Umathchar "3"0"002191 } +\def\Uparrow {\Umathchar "3"0"0021D1 } +\def\updasharrow {\Umathchar "3"0"0021E1 } +\def\updownarrow {\Umathchar "3"0"002195 } +\def\Updownarrow {\Umathchar "3"0"0021D5 } +\def\updownarrowbar {\Umathchar "0"0"0021A8 } +\def\updownarrows {\Umathchar "3"0"0021C5 } +\def\upharpoonleft {\Umathchar "3"0"0021BF } +\def\upharpoonright {\Umathchar "3"0"0021BE } +\def\uplus {\Umathchar "2"0"00228E } +\def\Upsilon {\Umathchar "0"0"0003A5 } +\def\upsilon {\Umathchar "0"0"0003C5 } +\def\upuparrows {\Umathchar "3"0"0021C8 } +\def\upwhitearrow {\Umathchar "0"0"0021E7 } +\def\urcorner {\Udelimiter "5"0"00231D } +\def\Uuparrow {\Umathchar "3"0"00290A } +\def\varepsilon {\Umathchar "0"0"0003B5 } +\def\varkappa {\Umathchar "0"0"0003F0 } +\def\varkappa {\Umathchar "0"0"0003F0 } +\def\varnothing {\Umathchar "0"0"002300 } +\def\varphi {\Umathchar "0"0"0003C6 } +\def\varpi {\Umathchar "0"0"0003D6 } +\def\varrho {\Umathchar "0"0"01D71A } +\def\varsigma {\Umathchar "0"0"0003C2 } +\def\vartheta {\Umathchar "0"0"01D717 } +\def\varTheta {\Umathchar "0"0"0003D1 } +\def\vdash {\Umathchar "3"0"0022A2 } +\def\vDash {\Umathchar "3"0"0022A8 } +\def\Vdash {\Umathchar "3"0"0022A9 } +\def\VDash {\Umathchar "3"0"0022AB } +\def\vdots {\Umathchar "0"0"0022EE } +\def\vec {\Umathaccent"0"0"0020D7 } +\def\vee {\Umathchar "2"0"002228 } +\def\veebar {\Umathchar "2"0"0022BB } +\def\veeeq {\Umathchar "3"0"00225A } +\def\vert {\Udelimiter "0"0"00007C } +\def\Vert {\Udelimiter "0"0"002016 } +\def\Vvdash {\Umathchar "3"0"0022AA } +\def\wedge {\Umathchar "2"0"002227 } +\def\wedgeeq {\Umathchar "3"0"002259 } +\def\whitearrowupfrombar {\Umathchar "0"0"0021EB } +\def\widehat {\Umathaccent"0"0"000302 } +\def\widetilde {\Umathaccent"0"0"000303 } +\def\wp {\Umathchar "0"0"002118 } +\def\wr {\Umathchar "2"0"002240 } +\def\Xi {\Umathchar "0"0"00039E } +\def\xi {\Umathchar "0"0"0003BE } +\def\yen {\Umathchar "0"0"0000A5 } +\def\Zeta {\Umathchar "0"0"000396 } +\def\zeta {\Umathchar "0"0"0003B6 } + +% a few definitions: + +\def\sqrt {\Uroot "0 "221A{}} +\def\root#1\of{\Uroot "0 "221A{#1}} + +% \skewchar\teni='177 \skewchar\seveni='177 \skewchar\fivei='177 +% \skewchar\tensy='60 \skewchar\sevensy='60 \skewchar\fivesy='60 + +\chardef\% = "25 +\chardef\& = "26 +\chardef\# = "23 +\chardef\$ = "24 +\chardef\_ = "5F + +\let\ss ß +\let\ae æ +\let\oe œ +\let\o ø +\let\AE Æ +\let\OE Œ +\let\O Ø +\let\i ı +\let\aa å +\let\l ł +\let\L Ł +\let\AA Å +\let\copyright © + +% just use utf + +\def\`#1{\string\`\string{#1\string}} +\def\'#1{\string\'\string{#1\string}} +\def\v#1{\string\v\string{#1\string}} +\def\u#1{\string\u\string{#1\string}} +\def\=#1{\string\=\string{#1\string}} +\def\^#1{\string\^\string{#1\string}} +\def\.#1{\string\.\string{#1\string}} +\def\H#1{\string\H\string{#1\string}} +\def\~#1{\string\~\string{#1\string}} +\def\"#1{\string\"\string{#1\string}} +\def\d#1{\string\d\string{#1\string}} +\def\b#1{\string\b\string{#1\string}} +\def\c#1{\string\c\string{#1\string}} + +\endinput diff --git a/src/fontloader/misc/fontloader-mplib.lua b/src/fontloader/misc/fontloader-mplib.lua new file mode 100644 index 0000000..c6628ac --- /dev/null +++ b/src/fontloader/misc/fontloader-mplib.lua @@ -0,0 +1,491 @@ +if not modules then modules = { } end modules ['luatex-mplib'] = { + version = 1.001, + comment = "companion to luatex-mplib.tex", + author = "Hans Hagen & Taco Hoekwater", + copyright = "ConTeXt Development Team", + license = "public domain", +} + +--[[ldx-- +

This module is a stripped down version of libraries that are used +by . It can be used in other macro packages and/or +serve as an example. Embedding in a macro package is upto others and +normally boils down to inputting supp-mpl.tex.

+--ldx]]-- + +if metapost and metapost.version then + + --[[ldx-- +

Let's silently quit and make sure that no one loads it + manually in .

+ --ldx]]-- + +else + + local format, concat, abs, match = string.format, table.concat, math.abs, string.match + + local mplib = require ('mplib') + local kpse = require ('kpse') + + --[[ldx-- +

We create a namespace and some variables to it. If a namespace is + already defined it wil not be initialized. This permits hooking + in code beforehand.

+ +

We don't make a format automatically. After all, distributions + might have their own preferences and normally a format (mem) file will + have some special place in the tree. Also, there can already + be format files, different memort settings and other nasty pitfalls that + we don't want to interfere with. If you want, you can define a function + metapost.make(name,mem_name) that does the job.

+ --ldx]]-- + + metapost = metapost or { } + metapost.version = 1.00 + metapost.showlog = metapost.showlog or false + metapost.lastlog = "" + + --[[ldx-- +

A few helpers, taken from l-file.lua.

+ --ldx]]-- + + local file = file or { } + + function file.replacesuffix(filename, suffix) + return (string.gsub(filename,"%.[%a%d]+$","")) .. "." .. suffix + end + + function file.stripsuffix(filename) + return (string.gsub(filename,"%.[%a%d]+$","")) + end + + --[[ldx-- +

We use the library unless a finder is already + defined.

+ --ldx]]-- + + local mpkpse = kpse.new("luatex","mpost") + + metapost.finder = metapost.finder or function(name, mode, ftype) + if mode == "w" then + return name + else + return mpkpse:find_file(name,ftype) + end + end + + --[[ldx-- +

You can use your own reported if needed, as long as it handles multiple + arguments and formatted strings.

+ --ldx]]-- + + metapost.report = metapost.report or function(...) + texio.write(format("",format(...))) + end + + --[[ldx-- +

The rest of this module is not documented. More info can be found in the + manual, articles in user group journals and the files that + ship with .

+ --ldx]]-- + + function metapost.resetlastlog() + metapost.lastlog = "" + end + + local mplibone = tonumber(mplib.version()) <= 1.50 + + if mplibone then + + metapost.make = metapost.make or function(name,mem_name,dump) + local t = os.clock() + local mpx = mplib.new { + ini_version = true, + find_file = metapost.finder, + job_name = file.stripsuffix(name) + } + mpx:execute(string.format("input %s ;",name)) + if dump then + mpx:execute("dump ;") + metapost.report("format %s made and dumped for %s in %0.3f seconds",mem_name,name,os.clock()-t) + else + metapost.report("%s read in %0.3f seconds",name,os.clock()-t) + end + return mpx + end + + function metapost.load(name) + local mem_name = file.replacesuffix(name,"mem") + local mpx = mplib.new { + ini_version = false, + mem_name = mem_name, + find_file = metapost.finder + } + if not mpx and type(metapost.make) == "function" then + -- when i have time i'll locate the format and dump + mpx = metapost.make(name,mem_name) + end + if mpx then + metapost.report("using format %s",mem_name,false) + return mpx, nil + else + return nil, { status = 99, error = "out of memory or invalid format" } + end + end + + else + + local preamble = [[ + boolean mplib ; mplib := true ; + let dump = endinput ; + input %s ; + ]] + + metapost.make = metapost.make or function() + end + + function metapost.load(name) + local mpx = mplib.new { + ini_version = true, + find_file = metapost.finder, + } + local result + if not mpx then + result = { status = 99, error = "out of memory"} + else + result = mpx:execute(format(preamble, file.replacesuffix(name,"mp"))) + end + metapost.reporterror(result) + return mpx, result + end + + end + + function metapost.unload(mpx) + if mpx then + mpx:finish() + end + end + + function metapost.reporterror(result) + if not result then + metapost.report("mp error: no result object returned") + elseif result.status > 0 then + local t, e, l = result.term, result.error, result.log + if t then + metapost.report("mp terminal: %s",t) + end + if e then + metapost.report("mp error: %s", e) + end + if not t and not e and l then + metapost.lastlog = metapost.lastlog .. "\n " .. l + metapost.report("mp log: %s",l) + else + metapost.report("mp error: unknown, no error, terminal or log messages") + end + else + return false + end + return true + end + + function metapost.process(mpx, data) + local converted, result = false, {} + mpx = metapost.load(mpx) + if mpx and data then + local result = mpx:execute(data) + if not result then + metapost.report("mp error: no result object returned") + elseif result.status > 0 then + metapost.report("mp error: %s",(result.term or "no-term") .. "\n" .. (result.error or "no-error")) + elseif metapost.showlog then + metapost.lastlog = metapost.lastlog .. "\n" .. result.term + metapost.report("mp info: %s",result.term or "no-term") + elseif result.fig then + converted = metapost.convert(result) + else + metapost.report("mp error: unknown error, maybe no beginfig/endfig") + end + else + metapost.report("mp error: mem file not found") + end + return converted, result + end + + local function getobjects(result,figure,f) + return figure:objects() + end + + function metapost.convert(result, flusher) + metapost.flush(result, flusher) + return true -- done + end + + --[[ldx-- +

We removed some message and tracing code. We might even remove the flusher

+ --ldx]]-- + + local function pdf_startfigure(n,llx,lly,urx,ury) + tex.sprint(format("\\startMPLIBtoPDF{%s}{%s}{%s}{%s}",llx,lly,urx,ury)) + end + + local function pdf_stopfigure() + tex.sprint("\\stopMPLIBtoPDF") + end + + function pdf_literalcode(fmt,...) -- table + tex.sprint(format("\\MPLIBtoPDF{%s}",format(fmt,...))) + end + + function pdf_textfigure(font,size,text,width,height,depth) + text = text:gsub(".","\\hbox{%1}") -- kerning happens in metapost + tex.sprint(format("\\MPLIBtextext{%s}{%s}{%s}{%s}{%s}",font,size,text,0,-( 7200/ 7227)/65536*depth)) + end + + local bend_tolerance = 131/65536 + + local rx, sx, sy, ry, tx, ty, divider = 1, 0, 0, 1, 0, 0, 1 + + local function pen_characteristics(object) + local t = mplib.pen_info(object) + rx, ry, sx, sy, tx, ty = t.rx, t.ry, t.sx, t.sy, t.tx, t.ty + divider = sx*sy - rx*ry + return not (sx==1 and rx==0 and ry==0 and sy==1 and tx==0 and ty==0), t.width + end + + local function concat(px, py) -- no tx, ty here + return (sy*px-ry*py)/divider,(sx*py-rx*px)/divider + end + + local function curved(ith,pth) + local d = pth.left_x - ith.right_x + if abs(ith.right_x - ith.x_coord - d) <= bend_tolerance and abs(pth.x_coord - pth.left_x - d) <= bend_tolerance then + d = pth.left_y - ith.right_y + if abs(ith.right_y - ith.y_coord - d) <= bend_tolerance and abs(pth.y_coord - pth.left_y - d) <= bend_tolerance then + return false + end + end + return true + end + + local function flushnormalpath(path,open) + local pth, ith + for i=1,#path do + pth = path[i] + if not ith then + pdf_literalcode("%f %f m",pth.x_coord,pth.y_coord) + elseif curved(ith,pth) then + pdf_literalcode("%f %f %f %f %f %f c",ith.right_x,ith.right_y,pth.left_x,pth.left_y,pth.x_coord,pth.y_coord) + else + pdf_literalcode("%f %f l",pth.x_coord,pth.y_coord) + end + ith = pth + end + if not open then + local one = path[1] + if curved(pth,one) then + pdf_literalcode("%f %f %f %f %f %f c",pth.right_x,pth.right_y,one.left_x,one.left_y,one.x_coord,one.y_coord ) + else + pdf_literalcode("%f %f l",one.x_coord,one.y_coord) + end + elseif #path == 1 then + -- special case .. draw point + local one = path[1] + pdf_literalcode("%f %f l",one.x_coord,one.y_coord) + end + return t + end + + local function flushconcatpath(path,open) + pdf_literalcode("%f %f %f %f %f %f cm", sx, rx, ry, sy, tx ,ty) + local pth, ith + for i=1,#path do + pth = path[i] + if not ith then + pdf_literalcode("%f %f m",concat(pth.x_coord,pth.y_coord)) + elseif curved(ith,pth) then + local a, b = concat(ith.right_x,ith.right_y) + local c, d = concat(pth.left_x,pth.left_y) + pdf_literalcode("%f %f %f %f %f %f c",a,b,c,d,concat(pth.x_coord, pth.y_coord)) + else + pdf_literalcode("%f %f l",concat(pth.x_coord, pth.y_coord)) + end + ith = pth + end + if not open then + local one = path[1] + if curved(pth,one) then + local a, b = concat(pth.right_x,pth.right_y) + local c, d = concat(one.left_x,one.left_y) + pdf_literalcode("%f %f %f %f %f %f c",a,b,c,d,concat(one.x_coord, one.y_coord)) + else + pdf_literalcode("%f %f l",concat(one.x_coord,one.y_coord)) + end + elseif #path == 1 then + -- special case .. draw point + local one = path[1] + pdf_literalcode("%f %f l",concat(one.x_coord,one.y_coord)) + end + return t + end + + --[[ldx-- +

Support for specials has been removed.

+ --ldx]]-- + + function metapost.flush(result,flusher) + if result then + local figures = result.fig + if figures then + for f=1, #figures do + metapost.report("flushing figure %s",f) + local figure = figures[f] + local objects = getobjects(result,figure,f) + local fignum = tonumber(match(figure:filename(),"([%d]+)$") or figure:charcode() or 0) + local miterlimit, linecap, linejoin, dashed = -1, -1, -1, false + local bbox = figure:boundingbox() + local llx, lly, urx, ury = bbox[1], bbox[2], bbox[3], bbox[4] -- faster than unpack + if urx < llx then + -- invalid + pdf_startfigure(fignum,0,0,0,0) + pdf_stopfigure() + else + pdf_startfigure(fignum,llx,lly,urx,ury) + pdf_literalcode("q") + if objects then + for o=1,#objects do + local object = objects[o] + local objecttype = object.type + if objecttype == "start_bounds" or objecttype == "stop_bounds" then + -- skip + elseif objecttype == "start_clip" then + pdf_literalcode("q") + flushnormalpath(object.path,t,false) + pdf_literalcode("W n") + elseif objecttype == "stop_clip" then + pdf_literalcode("Q") + miterlimit, linecap, linejoin, dashed = -1, -1, -1, false + elseif objecttype == "special" then + -- not supported + elseif objecttype == "text" then + local ot = object.transform -- 3,4,5,6,1,2 + pdf_literalcode("q %f %f %f %f %f %f cm",ot[3],ot[4],ot[5],ot[6],ot[1],ot[2]) + pdf_textfigure(object.font,object.dsize,object.text,object.width,object.height,object.depth) + pdf_literalcode("Q") + else + local cs = object.color + if cs and #cs > 0 then + pdf_literalcode(metapost.colorconverter(cs)) + end + local ml = object.miterlimit + if ml and ml ~= miterlimit then + miterlimit = ml + pdf_literalcode("%f M",ml) + end + local lj = object.linejoin + if lj and lj ~= linejoin then + linejoin = lj + pdf_literalcode("%i j",lj) + end + local lc = object.linecap + if lc and lc ~= linecap then + linecap = lc + pdf_literalcode("%i J",lc) + end + local dl = object.dash + if dl then + local d = format("[%s] %i d",concat(dl.dashes or {}," "),dl.offset) + if d ~= dashed then + dashed = d + pdf_literalcode(dashed) + end + elseif dashed then + pdf_literalcode("[] 0 d") + dashed = false + end + local path = object.path + local transformed, penwidth = false, 1 + local open = path and path[1].left_type and path[#path].right_type + local pen = object.pen + if pen then + if pen.type == 'elliptical' then + transformed, penwidth = pen_characteristics(object) -- boolean, value + pdf_literalcode("%f w",penwidth) + if objecttype == 'fill' then + objecttype = 'both' + end + else -- calculated by mplib itself + objecttype = 'fill' + end + end + if transformed then + pdf_literalcode("q") + end + if path then + if transformed then + flushconcatpath(path,open) + else + flushnormalpath(path,open) + end + if objecttype == "fill" then + pdf_literalcode("h f") + elseif objecttype == "outline" then + pdf_literalcode((open and "S") or "h S") + elseif objecttype == "both" then + pdf_literalcode("h B") + end + end + if transformed then + pdf_literalcode("Q") + end + local path = object.htap + if path then + if transformed then + pdf_literalcode("q") + end + if transformed then + flushconcatpath(path,open) + else + flushnormalpath(path,open) + end + if objecttype == "fill" then + pdf_literalcode("h f") + elseif objecttype == "outline" then + pdf_literalcode((open and "S") or "h S") + elseif objecttype == "both" then + pdf_literalcode("h B") + end + if transformed then + pdf_literalcode("Q") + end + end + if cr then + pdf_literalcode(cr) + end + end + end + end + pdf_literalcode("Q") + pdf_stopfigure() + end + end + end + end + end + + function metapost.colorconverter(cr) + local n = #cr + if n == 4 then + local c, m, y, k = cr[1], cr[2], cr[3], cr[4] + return format("%.3f %.3f %.3f %.3f k %.3f %.3f %.3f %.3f K",c,m,y,k,c,m,y,k), "0 g 0 G" + elseif n == 3 then + local r, g, b = cr[1], cr[2], cr[3] + return format("%.3f %.3f %.3f rg %.3f %.3f %.3f RG",r,g,b,r,g,b), "0 g 0 G" + else + local s = cr[1] + return format("%.3f g %.3f G",s,s), "0 g 0 G" + end + end + +end diff --git a/src/fontloader/misc/fontloader-mplib.tex b/src/fontloader/misc/fontloader-mplib.tex new file mode 100644 index 0000000..8af9f2d --- /dev/null +++ b/src/fontloader/misc/fontloader-mplib.tex @@ -0,0 +1,124 @@ +%D \module +%D [ file=luatex-mplib, +%D version=2009.12.01, +%D title=\LUATEX\ Support Macros, +%D subtitle=\METAPOST\ to \PDF\ conversion, +%D author=Taco Hoekwater \& Hans Hagen, +%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}] + +%D This is the companion to the \LUA\ module \type {supp-mpl.lua}. Further +%D embedding is up to others. A simple example of usage in plain \TEX\ is: +%D +%D \starttyping +%D \pdfoutput=1 +%D +%D \input luatex-mplib.tex +%D +%D \setmplibformat{plain} +%D +%D \mplibcode +%D beginfig(1); +%D draw fullcircle +%D scaled 10cm +%D withcolor red +%D withpen pencircle xscaled 4mm yscaled 2mm rotated 30 ; +%D endfig; +%D \endmplibcode +%D +%D \end +%D \stoptyping + +\def\setmplibformat#1{\def\mplibformat{#1}} + +\def\setupmplibcatcodes + {\catcode`\{=12 % could be optional .. not really needed + \catcode`\}=12 % could be optional .. not really needed + \catcode`\#=12 + \catcode`\^=12 + \catcode`\~=12 + \catcode`\_=12 + \catcode`\%=12 + \catcode`\&=12 + \catcode`\$=12 } + +\def\mplibcode + {\bgroup + \setupmplibcatcodes + \domplibcode} + +\long\def\domplibcode#1\endmplibcode + {\egroup + \directlua{metapost.process('\mplibformat',[[#1]])}} + +%D We default to \type {plain} \METAPOST: + +\def\mplibformat{plain} + +%D We use a dedicated scratchbox: + +\ifx\mplibscratchbox\undefined \newbox\mplibscratchbox \fi + +%D Now load the needed \LUA\ code. + +\directlua{dofile(kpse.find_file('luatex-mplib.lua'))} + +%D The following code takes care of encapsulating the literals: + +\def\startMPLIBtoPDF#1#2#3#4% + {\hbox\bgroup + \xdef\MPllx{#1}\xdef\MPlly{#2}% + \xdef\MPurx{#3}\xdef\MPury{#4}% + \xdef\MPwidth{\the\dimexpr#3bp-#1bp\relax}% + \xdef\MPheight{\the\dimexpr#4bp-#2bp\relax}% + \parskip0pt% + \leftskip0pt% + \parindent0pt% + \everypar{}% + \setbox\mplibscratchbox\vbox\bgroup + \noindent} + +\def\stopMPLIBtoPDF + {\egroup + \setbox\mplibscratchbox\hbox + {\hskip-\MPllx bp% + \raise-\MPlly bp% + \box\mplibscratchbox}% + \setbox\mplibscratchbox\vbox to \MPheight + {\vfill + \hsize\MPwidth + \wd\mplibscratchbox0pt% + \ht\mplibscratchbox0pt% + \dp\mplibscratchbox0pt% + \box\mplibscratchbox}% + \wd\mplibscratchbox\MPwidth + \ht\mplibscratchbox\MPheight + \box\mplibscratchbox + \egroup} + +%D The body of picture, except for text items, is taken care of by: + +\ifnum\pdfoutput>0 + \let\MPLIBtoPDF\pdfliteral +\else + \def\MPLIBtoPDF#1{\special{pdf:literal direct #1}} % not ok yet +\fi + +%D Text items have a special handler: + +\def\MPLIBtextext#1#2#3#4#5% + {\begingroup + \setbox\mplibscratchbox\hbox + {\font\temp=#1 at #2bp% + \temp + #3}% + \setbox\mplibscratchbox\hbox + {\hskip#4 bp% + \raise#5 bp% + \box\mplibscratchbox}% + \wd\mplibscratchbox0pt% + \ht\mplibscratchbox0pt% + \dp\mplibscratchbox0pt% + \box\mplibscratchbox + \endgroup} + +\endinput diff --git a/src/fontloader/misc/fontloader-plain.tex b/src/fontloader/misc/fontloader-plain.tex new file mode 100644 index 0000000..1ea8558 --- /dev/null +++ b/src/fontloader/misc/fontloader-plain.tex @@ -0,0 +1,27 @@ +%D \module +%D [ file=luatex-plain, +%D version=2009.12.01, +%D title=\LUATEX\ Macros, +%D subtitle=Plain Format, +%D author=Hans Hagen, +%D date=\currentdate, +%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}] + +\input plain + +\directlua {tex.enableprimitives('', tex.extraprimitives())} + +\pdfoutput=1 + +\everyjob \expandafter {% + \the\everyjob + \input {luatex-basics}% + \input {luatex-fonts}% + \input {luatex-math}% + \input {luatex-languages}% + \input {luatex-mplib}% +} + +\edef\fmtversion{\fmtversion+luatex} + +\dump diff --git a/src/fontloader/misc/fontloader-preprocessor-test.tex b/src/fontloader/misc/fontloader-preprocessor-test.tex new file mode 100644 index 0000000..857b28f --- /dev/null +++ b/src/fontloader/misc/fontloader-preprocessor-test.tex @@ -0,0 +1,30 @@ +\ifdefined\inputpreprocessed + + \def\TestOne[#1]% + {test one: [#1]\par} + + \def\TestTwo#some% + {test two: #some\par} + + \def\TestThree[#whatever][#more]% + {test three: [#more] and [#whatever]\par} + + \def\TestFour[#one]#two% + {\def\TestFive[#alpha][#one]% + {test four and five: [#one], [#two] and [#alpha]}\par} + + \def\TestSix[#{one}]#{two}% + {test six: [#{one}] and #{two}\par} + + \TestOne [one] + \TestTwo {one} + \TestThree[one][two] + \TestFour [one]{two} + \TestFive [one][two] + \TestSix [one]{two} + +\else + \input{luatex-preprocessor.tex} + \inputpreprocessed{luatex-preprocessor-test.tex} + \expandafter \end +\fi diff --git a/src/fontloader/misc/fontloader-preprocessor.lua b/src/fontloader/misc/fontloader-preprocessor.lua new file mode 100644 index 0000000..8faa0b4 --- /dev/null +++ b/src/fontloader/misc/fontloader-preprocessor.lua @@ -0,0 +1,163 @@ +if not modules then modules = { } end modules ['luatex-preprocessor'] = { + version = 1.001, + comment = "companion to luatex-preprocessor.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +--[[ldx +

This is a stripped down version of the preprocessor. In + we have a bit more, use a different logger, and +use a few optimizations. A few examples are shown at the end.

+--ldx]] + +local rep, sub, gmatch = string.rep, string.sub, string.gmatch +local insert, remove = table.insert, table.remove +local setmetatable = setmetatable + +local stack, top, n, hashes = { }, nil, 0, { } + +local function set(s) + if top then + n = n + 1 + if n > 9 then + texio.write_nl("number of arguments > 9, ignoring: " .. s) + else + local ns = #stack + local h = hashes[ns] + if not h then + h = rep("#",ns) + hashes[ns] = h + end + m = h .. n + top[s] = m + return m + end + end +end + +local function get(s) + local m = top and top[s] or s + return m +end + +local function push() + top = { } + n = 0 + local s = stack[#stack] + if s then + setmetatable(top,{ __index = s }) + end + insert(stack,top) +end + +local function pop() + top = remove(stack) +end + +local leftbrace = lpeg.P("{") +local rightbrace = lpeg.P("}") +local escape = lpeg.P("\\") + +local space = lpeg.P(" ") +local spaces = space^1 +local newline = lpeg.S("\r\n") +local nobrace = 1 - leftbrace - rightbrace + +local name = lpeg.R("AZ","az")^1 +local longname = (leftbrace/"") * (nobrace^1) * (rightbrace/"") +local variable = lpeg.P("#") * lpeg.Cs(name + longname) +local escapedname = escape * name +local definer = escape * (lpeg.P("def") + lpeg.P("egdx") * lpeg.P("def")) +local anything = lpeg.P(1) +local always = lpeg.P(true) + +local pushlocal = always / push +local poplocal = always / pop +local declaration = variable / set +local identifier = variable / get + +local function matcherror(str,pos) + texio.write_nl("runaway definition at: " .. sub(str,pos-30,pos)) +end + +local parser = lpeg.Cs { "converter", + definition = pushlocal + * definer + * escapedname + * (declaration + (1-leftbrace))^0 + * lpeg.V("braced") + * poplocal, + braced = leftbrace + * ( lpeg.V("definition") + + identifier + + lpeg.V("braced") + + nobrace + )^0 + * (rightbrace + lpeg.Cmt(always,matcherror)), + converter = (lpeg.V("definition") + anything)^1, +} + +--[[ldx +

We provide a few commands.

+--ldx]] + +-- local texkpse + +local function find_file(...) + -- texkpse = texkpse or kpse.new("luatex","tex") + -- return texkpse:find_file(...) or "" + return kpse.find_file(...) or "" +end + +commands = commands or { } + +function commands.preprocessed(str) + return lpeg.match(parser,str) +end + +function commands.inputpreprocessed(name) + local name = find_file(name) or "" + if name ~= "" then + -- we could use io.loaddata as it's loaded in luatex-plain + local f = io.open(name,'rb') + if f then + texio.write("("..name) + local d = commands.preprocessed(f:read("*a")) + if d and d ~= "" then + texio.write("processed: " .. name) + for s in gmatch(d,"[^\n\r]+") do + tex.print(s) -- we do a dumb feedback + end + end + f:close() + texio.write(")") + else + tex.error("preprocessor error, invalid file: " .. name) + end + else + tex.error("preprocessor error, unknown file: " .. name) + end +end + +function commands.preprocessfile(oldfile,newfile) -- no checking + if oldfile and oldfile ~= newfile then + local f = io.open(oldfile,'rb') + if f then + local g = io.open(newfile,'wb') + if g then + g:write(lpeg.match(parser,f:read("*a") or "")) + g:close() + end + f:close() + end + end +end + +--~ print(preprocessed([[\def\test#oeps{test:#oeps}]])) +--~ print(preprocessed([[\def\test#oeps{test:#{oeps}}]])) +--~ print(preprocessed([[\def\test#{oeps:1}{test:#{oeps:1}}]])) +--~ print(preprocessed([[\def\test#{oeps}{test:#oeps}]])) +--~ preprocessed([[\def\test#{oeps}{test:#oeps \halign{##\cr #oeps\cr}]]) +--~ print(preprocessed([[\def\test#{oeps}{test:#oeps \halign{##\cr #oeps\cr}}]])) diff --git a/src/fontloader/misc/fontloader-preprocessor.tex b/src/fontloader/misc/fontloader-preprocessor.tex new file mode 100644 index 0000000..03b483f --- /dev/null +++ b/src/fontloader/misc/fontloader-preprocessor.tex @@ -0,0 +1,14 @@ +%D \module +%D [ file=luatex-preprocessor, +%D version=2010.12.02, +%D title=\LUATEX\ Support Macros, +%D subtitle=Generic Preprocessor, +%D author=Hans Hagen, +%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}] + +\directlua{dofile(kpse.find_file('luatex-preprocessor.lua'))} + +\def\inputpreprocessed#1% + {\directlua{commands.inputpreprocessed("#1")}} + +\endinput diff --git a/src/fontloader/misc/fontloader-swiglib-test.lua b/src/fontloader/misc/fontloader-swiglib-test.lua new file mode 100644 index 0000000..db6a729 --- /dev/null +++ b/src/fontloader/misc/fontloader-swiglib-test.lua @@ -0,0 +1,25 @@ +local gm = swiglib("gmwand.core") + +gm.InitializeMagick(".") + +local magick_wand = gm.NewMagickWand() +local drawing_wand = gm.NewDrawingWand() + +gm.MagickSetSize(magick_wand,800,600) +gm.MagickReadImage(magick_wand,"xc:red") + +gm.DrawPushGraphicContext(drawing_wand) + +gm.DrawSetFillColor(drawing_wand,gm.NewPixelWand()) + +-- gm.DrawSetFont(drawing_wand, kpse.findfile("DejaVuSerifBold.ttf")) +-- gm.DrawSetFontSize(drawing_wand, 96) +-- gm.DrawAnnotation(drawing_wand,300,200, "LuaTeX") + +gm.DrawPopGraphicContext(drawing_wand) +gm.MagickDrawImage(magick_wand,drawing_wand) + +gm.MagickWriteImages(magick_wand,"./luatex-swiglib-test.jpg",1) + +gm.DestroyDrawingWand(drawing_wand) +gm.DestroyMagickWand(magick_wand) diff --git a/src/fontloader/misc/fontloader-swiglib-test.tex b/src/fontloader/misc/fontloader-swiglib-test.tex new file mode 100644 index 0000000..d26bb6f --- /dev/null +++ b/src/fontloader/misc/fontloader-swiglib-test.tex @@ -0,0 +1,11 @@ +% luatex --fmt=luatex=plain luatex-swiglib-test.tex + +\input luatex-swiglib.tex + +\directlua { + dofile("luatex-swiglib-test.lua") +} + +\pdfximage {luatex-swiglib-test.jpg} \pdfrefximage\pdflastximage + +\end diff --git a/src/fontloader/misc/fontloader-swiglib.lua b/src/fontloader/misc/fontloader-swiglib.lua new file mode 100644 index 0000000..7ffcdc3 --- /dev/null +++ b/src/fontloader/misc/fontloader-swiglib.lua @@ -0,0 +1,62 @@ +if not modules then modules = { } end modules ['luatex-swiglib'] = { + version = 1.001, + comment = "companion to luatex-swiglib.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local savedrequire = require + +local libsuffix = os.type == "windows" and ".dll" or ".so" + +function requireswiglib(required,version) + local library = package.loaded[required] + if library then + return library + else + local name = string.gsub(required,"%.","/") .. libsuffix + local list = kpse.show_path("clua") + for root in string.gmatch(list,"([^;]+)") do + local full = false + if type(version) == "string" and version ~= "" then + full = root .. "/" .. version .. "/" .. name + full = lfs.isfile(full) and full + end + if not full then + full = root .. "/" .. name + full = lfs.isfile(full) and full + end + if full then + local path, base = string.match(full,"^(.-)([^\\/]+)" .. libsuffix .."$") + local savedlibrary = package.loaded[base] + package.loaded[base] = nil + local savedpath = lfs.currentdir() + lfs.chdir(path) + library = package.loadlib(full,"luaopen_" .. base) + if type(library) == "function" then + library = library() + texio.write("") + end + lfs.chdir(savedpath) + package.loaded[base] = savedlibrary + package.loaded[required] = library + return library + end + end + texio.write("") +end + +function require(name) + if string.find(name,"^swiglib%.") then + return requireswiglib(name) + else + return savedrequire(name) + end +end + +function swiglib(name,version) + return requireswiglib("swiglib." .. name,version) +end diff --git a/src/fontloader/misc/fontloader-swiglib.tex b/src/fontloader/misc/fontloader-swiglib.tex new file mode 100644 index 0000000..7c43775 --- /dev/null +++ b/src/fontloader/misc/fontloader-swiglib.tex @@ -0,0 +1,20 @@ +%D \module +%D [ file=luatex-swiglib, +%D version=2013.03.30, +%D title=\LUATEX\ Support Macros, +%D subtitle=Generic \SWIGLIB\ Font Handler, +%D author=Hans Hagen, +%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}] + +%D This is an experimental setup. Usage: +%D +%D \starttyping +%D local gm = swiglib("gmwand.core") +%D local gm = require("swiglib.gmwand.core") +%D local sq = swiglib("mysql.core") +%D local sq = swiglib("mysql.core","5.6") +%D \stoptyping + +\directlua { + dofile(kpse.find_file("luatex-swiglib.lua","tex")) +} diff --git a/src/fontloader/misc/fontloader-test.tex b/src/fontloader/misc/fontloader-test.tex new file mode 100644 index 0000000..169a260 --- /dev/null +++ b/src/fontloader/misc/fontloader-test.tex @@ -0,0 +1,112 @@ +%D \module +%D [ file=luatex-test, +%D version=2009.12.01, +%D title=\LUATEX\ Support Macros, +%D subtitle=Simple Test File, +%D author=Hans Hagen, +%D date=\currentdate, +%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}] + +%D See \type {luatex-plain.tex} (or on my machine \type {luatex.tex} +%D for how to make a format. + +% You can generate a font database with: +% +% mtxrun --script fonts --reload --save +% +% The file luatex-fonts-names.lua has to be moved to a place +% where kpse can find it. + +\pdfoutput=1 + +\font\testa=file:lmroman10-regular at 12pt \testa \input tufte \par +\font\testb=file:lmroman12-regular:+liga; at 24pt \testb effe flink fietsen \par +\font\testc=file:lmroman12-regular:mode=node;+liga; at 24pt \testc effe flink fietsen \par +\font\testd=name:lmroman10bold at 12pt \testd a bit bold \par + +\font\oeps=cmr10 + +\font\oeps=[lmroman12-regular]:+liga at 30pt \oeps crap +\font\oeps=[lmroman12-regular] at 40pt \oeps more crap + +\font\cidtest=adobesongstd-light + +\font\mathtest=cambria(math) {\mathtest 123} + +\font\gothic=msgothic(ms-gothic) {\gothic whatever} + +\font\testy=file:IranNastaliq.ttf:mode=node;script=arab;language=dflt;+calt;+ccmp;+init;+isol;+medi;+fina;+liga;+rlig;+kern;+mark;+mkmk at 14pt +\testy این یک متن نمونه است با قلم ذر که درست آمده است. + +\pdfprotrudechars2 \pdfadjustspacing2 + +\font\testb=file:lmroman12-regular:+liga;extend=1.5 at 12pt \testb \input tufte \par +\font\testb=file:lmroman12-regular:+liga;slant=0.8 at 12pt \testb \input tufte \par +\font\testb=file:lmroman12-regular:+liga;protrusion=default at 12pt \testb \input tufte \par + +\setmplibformat{plain} + +\mplibcode + beginfig(1) ; + draw fullcircle + scaled 10cm + withcolor red + withpen pencircle xscaled 4mm yscaled 2mm rotated 30 ; + endfig ; +\endmplibcode + +\font\mine=file:luatex-fonts-demo-vf-1.lua at 12pt + +\mine \input tufte \par + +% \font\mine=file:luatex-fonts-demo-vf-2.lua at 12pt \mine [abab] \par +% \font\mine=file:luatex-fonts-demo-vf-3.lua at 12pt \mine [abab] \par + +\font\test=dejavuserif:+kern at 10pt \test + + +\bgroup \hsize 1mm \noindent Циолковский \par \egroup + +\loadpatterns{ru} + +\bgroup \hsize 1mm \noindent Циолковский \par \egroup + +a bit of math + +$\it e=mc^2 \bf e=mc^2 \Uchar"1D49D$ + +$$\left( { {1} \over { {1} \over {x} } } \right) $$ + +$$\sqrt {2} { { {1} \over { {1} \over {x} } } } $$ + +\font\cows=file:koeieletters.afm at 50pt + +\cows Hello World! + +% math test + +\latinmodern + +\def\sqrt{\Uroot "0 "221A{}} + +\def\root#1\of{\Uroot "0 "221A{#1}} + +Inline $\sqrt{x}{1.2}$ math. % same for $\root n of x$ + +$\root3\of x$ + +$\sin{x}$ + +\lucidabright + +\def\sqrt{\Uroot "0 "221A{}} + +\def\root#1\of{\Uroot "0 "221A{#1}} + +Inline $\sqrt{x}{1.2}$ math. % same for $\root n of x$ + +$\root3\of x$ + +$\sin{x}$ + +\end diff --git a/src/fontloader/misc/fontloader-util-str.lua b/src/fontloader/misc/fontloader-util-str.lua new file mode 100644 index 0000000..8529c3a --- /dev/null +++ b/src/fontloader/misc/fontloader-util-str.lua @@ -0,0 +1,1117 @@ +if not modules then modules = { } end modules ['util-str'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +utilities = utilities or {} +utilities.strings = utilities.strings or { } +local strings = utilities.strings + +local format, gsub, rep, sub = string.format, string.gsub, string.rep, string.sub +local load, dump = load, string.dump +local tonumber, type, tostring = tonumber, type, tostring +local unpack, concat = table.unpack, table.concat +local P, V, C, S, R, Ct, Cs, Cp, Carg, Cc = lpeg.P, lpeg.V, lpeg.C, lpeg.S, lpeg.R, lpeg.Ct, lpeg.Cs, lpeg.Cp, lpeg.Carg, lpeg.Cc +local patterns, lpegmatch = lpeg.patterns, lpeg.match +local utfchar, utfbyte = utf.char, utf.byte +----- loadstripped = utilities.lua.loadstripped +----- setmetatableindex = table.setmetatableindex + +local loadstripped = nil + +if _LUAVERSION < 5.2 then + + loadstripped = function(str,shortcuts) + return load(str) + end + +else + + loadstripped = function(str,shortcuts) + if shortcuts then + return load(dump(load(str),true),nil,nil,shortcuts) + else + return load(dump(load(str),true)) + end + end + +end + +-- todo: make a special namespace for the formatter + +if not number then number = { } end -- temp hack for luatex-fonts + +local stripper = patterns.stripzeros + +local function points(n) + n = tonumber(n) + return (not n or n == 0) and "0pt" or lpegmatch(stripper,format("%.5fpt",n/65536)) +end + +local function basepoints(n) + n = tonumber(n) + return (not n or n == 0) and "0bp" or lpegmatch(stripper,format("%.5fbp", n*(7200/7227)/65536)) +end + +number.points = points +number.basepoints = basepoints + +-- str = " \n \ntest \n test\ntest " +-- print("["..string.gsub(string.collapsecrlf(str),"\n","+").."]") + +local rubish = patterns.spaceortab^0 * patterns.newline +local anyrubish = patterns.spaceortab + patterns.newline +local anything = patterns.anything +local stripped = (patterns.spaceortab^1 / "") * patterns.newline +local leading = rubish^0 / "" +local trailing = (anyrubish^1 * patterns.endofstring) / "" +local redundant = rubish^3 / "\n" + +local pattern = Cs(leading * (trailing + redundant + stripped + anything)^0) + +function strings.collapsecrlf(str) + return lpegmatch(pattern,str) +end + +-- The following functions might end up in another namespace. + +local repeaters = { } -- watch how we also moved the -1 in depth-1 to the creator + +function strings.newrepeater(str,offset) + offset = offset or 0 + local s = repeaters[str] + if not s then + s = { } + repeaters[str] = s + end + local t = s[offset] + if t then + return t + end + t = { } + setmetatable(t, { __index = function(t,k) + if not k then + return "" + end + local n = k + offset + local s = n > 0 and rep(str,n) or "" + t[k] = s + return s + end }) + s[offset] = t + return t +end + +-- local dashes = strings.newrepeater("--",-1) +-- print(dashes[2],dashes[3],dashes[1]) + +local extra, tab, start = 0, 0, 4, 0 + +local nspaces = strings.newrepeater(" ") + +string.nspaces = nspaces + +local pattern = + Carg(1) / function(t) + extra, tab, start = 0, t or 7, 1 + end + * Cs(( + Cp() * patterns.tab / function(position) + local current = (position - start + 1) + extra + local spaces = tab-(current-1) % tab + if spaces > 0 then + extra = extra + spaces - 1 + return nspaces[spaces] -- rep(" ",spaces) + else + return "" + end + end + + patterns.newline * Cp() / function(position) + extra, start = 0, position + end + + patterns.anything + )^1) + +function strings.tabtospace(str,tab) + return lpegmatch(pattern,str,1,tab or 7) +end + +-- local t = { +-- "1234567123456712345671234567", +-- "\tb\tc", +-- "a\tb\tc", +-- "aa\tbb\tcc", +-- "aaa\tbbb\tccc", +-- "aaaa\tbbbb\tcccc", +-- "aaaaa\tbbbbb\tccccc", +-- "aaaaaa\tbbbbbb\tcccccc\n aaaaaa\tbbbbbb\tcccccc", +-- "one\n two\nxxx three\nxx four\nx five\nsix", +-- } +-- for k=1,#t do +-- print(strings.tabtospace(t[k])) +-- end + +-- todo: lpeg + +-- function strings.striplong(str) -- strips all leading spaces +-- str = gsub(str,"^%s*","") +-- str = gsub(str,"[\n\r]+ *","\n") +-- return str +-- end + +local newline = patterns.newline +local endofstring = patterns.endofstring +local whitespace = patterns.whitespace +local spacer = patterns.spacer + +local space = spacer^0 +local nospace = space/"" +local endofline = nospace * newline + +local stripend = (whitespace^1 * endofstring)/"" + +local normalline = (nospace * ((1-space*(newline+endofstring))^1) * nospace) + +local stripempty = endofline^1/"" +local normalempty = endofline^1 +local singleempty = endofline * (endofline^0/"") +local doubleempty = endofline * endofline^-1 * (endofline^0/"") + +local stripstart = stripempty^0 + +local p_prune_normal = Cs ( stripstart * ( stripend + normalline + normalempty )^0 ) +local p_prune_collapse = Cs ( stripstart * ( stripend + normalline + doubleempty )^0 ) +local p_prune_noempty = Cs ( stripstart * ( stripend + normalline + singleempty )^0 ) +local p_retain_normal = Cs ( ( normalline + normalempty )^0 ) +local p_retain_collapse = Cs ( ( normalline + doubleempty )^0 ) +local p_retain_noempty = Cs ( ( normalline + singleempty )^0 ) + +-- function striplines(str,prune,collapse,noempty) +-- if prune then +-- if noempty then +-- return lpegmatch(p_prune_noempty,str) or str +-- elseif collapse then +-- return lpegmatch(p_prune_collapse,str) or str +-- else +-- return lpegmatch(p_prune_normal,str) or str +-- end +-- else +-- if noempty then +-- return lpegmatch(p_retain_noempty,str) or str +-- elseif collapse then +-- return lpegmatch(p_retain_collapse,str) or str +-- else +-- return lpegmatch(p_retain_normal,str) or str +-- end +-- end +-- end + +local striplinepatterns = { + ["prune"] = p_prune_normal, + ["prune and collapse"] = p_prune_collapse, -- default + ["prune and no empty"] = p_prune_noempty, + ["retain"] = p_retain_normal, + ["retain and collapse"] = p_retain_collapse, + ["retain and no empty"] = p_retain_noempty, + ["collapse"] = patterns.collapser, -- how about: stripper fullstripper +} + +strings.striplinepatterns = striplinepatterns + +function strings.striplines(str,how) + return str and lpegmatch(how and striplinepatterns[how] or p_prune_collapse,str) or str +end + +-- also see: string.collapsespaces + +strings.striplong = strings.striplines -- for old times sake + +-- local str = table.concat( { +-- " ", +-- " aap", +-- " noot mies", +-- " ", +-- " ", +-- " zus wim jet", +-- "zus wim jet", +-- " zus wim jet", +-- " ", +-- }, "\n") + +-- local str = table.concat( { +-- " aaaa", +-- " bb", +-- " cccccc", +-- }, "\n") + +-- for k, v in table.sortedhash(utilities.strings.striplinepatterns) do +-- logs.report("stripper","method: %s, result: [[%s]]",k,utilities.strings.striplines(str,k)) +-- end + +-- inspect(strings.striplong([[ +-- aaaa +-- bb +-- cccccc +-- ]])) + +function strings.nice(str) + str = gsub(str,"[:%-+_]+"," ") -- maybe more + return str +end + +-- Work in progress. Interesting is that compared to the built-in this is faster in +-- luatex than in luajittex where we have a comparable speed. It only makes sense +-- to use the formatter when a (somewhat) complex format is used a lot. Each formatter +-- is a function so there is some overhead and not all formatted output is worth that +-- overhead. Keep in mind that there is an extra function call involved. In principle +-- we end up with a string concatination so one could inline such a sequence but often +-- at the cost of less readabinity. So, it's a sort of (visual) compromise. Of course +-- there is the benefit of more variants. (Concerning the speed: a simple format like +-- %05fpt is better off with format than with a formatter, but as soon as you put +-- something in front formatters become faster. Passing the pt as extra argument makes +-- formatters behave better. Of course this is rather implementation dependent. Also, +-- when a specific format is only used a few times the overhead in creating it is not +-- compensated by speed.) +-- +-- More info can be found in cld-mkiv.pdf so here I stick to a simple list. +-- +-- integer %...i number +-- integer %...d number +-- unsigned %...u number +-- character %...c number +-- hexadecimal %...x number +-- HEXADECIMAL %...X number +-- octal %...o number +-- string %...s string number +-- float %...f number +-- checked float %...F number +-- exponential %...e number +-- exponential %...E number +-- autofloat %...g number +-- autofloat %...G number +-- utf character %...c number +-- force tostring %...S any +-- force tostring %Q any +-- force tonumber %N number (strip leading zeros) +-- signed number %I number +-- rounded number %r number +-- 0xhexadecimal %...h character number +-- 0xHEXADECIMAL %...H character number +-- U+hexadecimal %...u character number +-- U+HEXADECIMAL %...U character number +-- points %p number (scaled points) +-- basepoints %b number (scaled points) +-- table concat %...t table +-- table concat %{.}t table +-- serialize %...T sequenced (no nested tables) +-- serialize %{.}T sequenced (no nested tables) +-- boolean (logic) %l boolean +-- BOOLEAN %L boolean +-- whitespace %...w +-- automatic %...a 'whatever' (string, table, ...) +-- automatic %...A "whatever" (string, table, ...) + +local n = 0 + +-- we are somewhat sloppy in parsing prefixes as it's not that critical + +-- hard to avoid but we can collect them in a private namespace if needed + +-- inline the next two makes no sense as we only use this in logging + +local sequenced = table.sequenced + +function string.autodouble(s,sep) + if s == nil then + return '""' + end + local t = type(s) + if t == "number" then + return tostring(s) -- tostring not really needed + end + if t == "table" then + return ('"' .. sequenced(s,sep or ",") .. '"') + end + return ('"' .. tostring(s) .. '"') +end + +function string.autosingle(s,sep) + if s == nil then + return "''" + end + local t = type(s) + if t == "number" then + return tostring(s) -- tostring not really needed + end + if t == "table" then + return ("'" .. sequenced(s,sep or ",") .. "'") + end + return ("'" .. tostring(s) .. "'") +end + +local tracedchars = { } +string.tracedchars = tracedchars +strings.tracers = tracedchars + +function string.tracedchar(b) + -- todo: table + if type(b) == "number" then + return tracedchars[b] or (utfchar(b) .. " (U+" .. format('%05X',b) .. ")") + else + local c = utfbyte(b) + return tracedchars[c] or (b .. " (U+" .. format('%05X',c) .. ")") + end +end + +function number.signed(i) + if i > 0 then + return "+", i + else + return "-", -i + end +end + +local zero = P("0")^1 / "" +local plus = P("+") / "" +local minus = P("-") +local separator = S(".") +local digit = R("09") +local trailing = zero^1 * #S("eE") +local exponent = (S("eE") * (plus + Cs((minus * zero^0 * P(-1))/"") + minus) * zero^0 * (P(-1) * Cc("0") + P(1)^1)) +local pattern_a = Cs(minus^0 * digit^1 * (separator/"" * trailing + separator * (trailing + digit)^0) * exponent) +local pattern_b = Cs((exponent + P(1))^0) + +function number.sparseexponent(f,n) + if not n then + n = f + f = "%e" + end + local tn = type(n) + if tn == "string" then -- cast to number + local m = tonumber(n) + if m then + return lpegmatch((f == "%e" or f == "%E") and pattern_a or pattern_b,format(f,m)) + end + elseif tn == "number" then + return lpegmatch((f == "%e" or f == "%E") and pattern_a or pattern_b,format(f,n)) + end + return tostring(n) +end + +local template = [[ +%s +%s +return function(%s) return %s end +]] + +local preamble, environment = "", { } + +if _LUAVERSION < 5.2 then + + preamble = [[ +local lpeg=lpeg +local type=type +local tostring=tostring +local tonumber=tonumber +local format=string.format +local concat=table.concat +local signed=number.signed +local points=number.points +local basepoints= number.basepoints +local utfchar=utf.char +local utfbyte=utf.byte +local lpegmatch=lpeg.match +local nspaces=string.nspaces +local tracedchar=string.tracedchar +local autosingle=string.autosingle +local autodouble=string.autodouble +local sequenced=table.sequenced +local formattednumber=number.formatted +local sparseexponent=number.sparseexponent + ]] + +else + + environment = { + global = global or _G, + lpeg = lpeg, + type = type, + tostring = tostring, + tonumber = tonumber, + format = string.format, + concat = table.concat, + signed = number.signed, + points = number.points, + basepoints = number.basepoints, + utfchar = utf.char, + utfbyte = utf.byte, + lpegmatch = lpeg.match, + nspaces = string.nspaces, + tracedchar = string.tracedchar, + autosingle = string.autosingle, + autodouble = string.autodouble, + sequenced = table.sequenced, + formattednumber = number.formatted, + sparseexponent = number.sparseexponent, + } + +end + +-- -- -- + +local arguments = { "a1" } -- faster than previously used (select(n,...)) + +setmetatable(arguments, { __index = + function(t,k) + local v = t[k-1] .. ",a" .. k + t[k] = v + return v + end +}) + +local prefix_any = C((S("+- .") + R("09"))^0) +local prefix_tab = P("{") * C((1-P("}"))^0) * P("}") + C((1-R("az","AZ","09","%%"))^0) + +-- we've split all cases as then we can optimize them (let's omit the fuzzy u) + +-- todo: replace outer formats in next by .. + +local format_s = function(f) + n = n + 1 + if f and f ~= "" then + return format("format('%%%ss',a%s)",f,n) + else -- best no tostring in order to stay compatible (.. does a selective tostring too) + return format("(a%s or '')",n) -- goodie: nil check + end +end + +local format_S = function(f) -- can be optimized + n = n + 1 + if f and f ~= "" then + return format("format('%%%ss',tostring(a%s))",f,n) + else + return format("tostring(a%s)",n) + end +end + +local format_q = function() + n = n + 1 + return format("(a%s and format('%%q',a%s) or '')",n,n) -- goodie: nil check (maybe separate lpeg, not faster) +end + +local format_Q = function() -- can be optimized + n = n + 1 + return format("format('%%q',tostring(a%s))",n) +end + +local format_i = function(f) + n = n + 1 + if f and f ~= "" then + return format("format('%%%si',a%s)",f,n) + else + return format("format('%%i',a%s)",n) -- why not just tostring() + end +end + +local format_d = format_i + +local format_I = function(f) + n = n + 1 + return format("format('%%s%%%si',signed(a%s))",f,n) +end + +local format_f = function(f) + n = n + 1 + return format("format('%%%sf',a%s)",f,n) +end + +-- The next one formats an integer as integer and very small values as zero. This is needed +-- for pdf backend code. +-- +-- 1.23 % 1 : 0.23 +-- - 1.23 % 1 : 0.77 +-- +-- We could probably use just %s with integers but who knows what Lua 5.3 will do? So let's +-- for the moment use %i. + +local format_F = function(f) -- beware, no cast to number + n = n + 1 + if not f or f == "" then + return format("(((a%s > -0.0000000005 and a%s < 0.0000000005) and '0') or format((a%s %% 1 == 0) and '%%i' or '%%.9f',a%s))",n,n,n,n) + else + return format("format((a%s %% 1 == 0) and '%%i' or '%%%sf',a%s)",n,f,n) + end +end + +local format_g = function(f) + n = n + 1 + return format("format('%%%sg',a%s)",f,n) +end + +local format_G = function(f) + n = n + 1 + return format("format('%%%sG',a%s)",f,n) +end + +local format_e = function(f) + n = n + 1 + return format("format('%%%se',a%s)",f,n) +end + +local format_E = function(f) + n = n + 1 + return format("format('%%%sE',a%s)",f,n) +end + +local format_j = function(f) + n = n + 1 + return format("sparseexponent('%%%se',a%s)",f,n) +end + +local format_J = function(f) + n = n + 1 + return format("sparseexponent('%%%sE',a%s)",f,n) +end + +local format_x = function(f) + n = n + 1 + return format("format('%%%sx',a%s)",f,n) +end + +local format_X = function(f) + n = n + 1 + return format("format('%%%sX',a%s)",f,n) +end + +local format_o = function(f) + n = n + 1 + return format("format('%%%so',a%s)",f,n) +end + +local format_c = function() + n = n + 1 + return format("utfchar(a%s)",n) +end + +local format_C = function() + n = n + 1 + return format("tracedchar(a%s)",n) +end + +local format_r = function(f) + n = n + 1 + return format("format('%%%s.0f',a%s)",f,n) +end + +local format_h = function(f) + n = n + 1 + if f == "-" then + f = sub(f,2) + return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n) + else + return format("format('0x%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n) + end +end + +local format_H = function(f) + n = n + 1 + if f == "-" then + f = sub(f,2) + return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n) + else + return format("format('0x%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n) + end +end + +local format_u = function(f) + n = n + 1 + if f == "-" then + f = sub(f,2) + return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n) + else + return format("format('u+%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n) + end +end + +local format_U = function(f) + n = n + 1 + if f == "-" then + f = sub(f,2) + return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n) + else + return format("format('U+%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n) + end +end + +local format_p = function() + n = n + 1 + return format("points(a%s)",n) +end + +local format_b = function() + n = n + 1 + return format("basepoints(a%s)",n) +end + +local format_t = function(f) + n = n + 1 + if f and f ~= "" then + return format("concat(a%s,%q)",n,f) + else + return format("concat(a%s)",n) + end +end + +local format_T = function(f) + n = n + 1 + if f and f ~= "" then + return format("sequenced(a%s,%q)",n,f) + else + return format("sequenced(a%s)",n) + end +end + +local format_l = function() + n = n + 1 + return format("(a%s and 'true' or 'false')",n) +end + +local format_L = function() + n = n + 1 + return format("(a%s and 'TRUE' or 'FALSE')",n) +end + +local format_N = function() -- strips leading zeros + n = n + 1 + return format("tostring(tonumber(a%s) or a%s)",n,n) +end + +local format_a = function(f) + n = n + 1 + if f and f ~= "" then + return format("autosingle(a%s,%q)",n,f) + else + return format("autosingle(a%s)",n) + end +end + +local format_A = function(f) + n = n + 1 + if f and f ~= "" then + return format("autodouble(a%s,%q)",n,f) + else + return format("autodouble(a%s)",n) + end +end + +local format_w = function(f) -- handy when doing depth related indent + n = n + 1 + f = tonumber(f) + if f then -- not that useful + return format("nspaces[%s+a%s]",f,n) -- no real need for tonumber + else + return format("nspaces[a%s]",n) -- no real need for tonumber + end +end + +local format_W = function(f) -- handy when doing depth related indent + return format("nspaces[%s]",tonumber(f) or 0) +end + +-- maybe to util-num + +local digit = patterns.digit +local period = patterns.period +local three = digit * digit * digit + +local splitter = Cs ( + (((1 - (three^1 * period))^1 + C(three)) * (Carg(1) * three)^1 + C((1-period)^1)) + * (P(1)/"" * Carg(2)) * C(2) +) + +patterns.formattednumber = splitter + +function number.formatted(n,sep1,sep2) + local s = type(s) == "string" and n or format("%0.2f",n) + if sep1 == true then + return lpegmatch(splitter,s,1,".",",") + elseif sep1 == "." then + return lpegmatch(splitter,s,1,sep1,sep2 or ",") + elseif sep1 == "," then + return lpegmatch(splitter,s,1,sep1,sep2 or ".") + else + return lpegmatch(splitter,s,1,sep1 or ",",sep2 or ".") + end +end + +-- print(number.formatted(1)) +-- print(number.formatted(12)) +-- print(number.formatted(123)) +-- print(number.formatted(1234)) +-- print(number.formatted(12345)) +-- print(number.formatted(123456)) +-- print(number.formatted(1234567)) +-- print(number.formatted(12345678)) +-- print(number.formatted(12345678,true)) +-- print(number.formatted(1234.56,"!","?")) + +local format_m = function(f) + n = n + 1 + if not f or f == "" then + f = "," + end + return format([[formattednumber(a%s,%q,".")]],n,f) +end + +local format_M = function(f) + n = n + 1 + if not f or f == "" then + f = "." + end + return format([[formattednumber(a%s,%q,",")]],n,f) +end + +-- + +local format_z = function(f) + n = n + (tonumber(f) or 1) + return "''" -- okay, not that efficient to append '' but a special case anyway +end + +-- + +local format_rest = function(s) + return format("%q",s) -- catches " and \n and such +end + +local format_extension = function(extensions,f,name) + local extension = extensions[name] or "tostring(%s)" + local f = tonumber(f) or 1 + if f == 0 then + return extension + elseif f == 1 then + n = n + 1 + local a = "a" .. n + return format(extension,a,a) -- maybe more times? + elseif f < 0 then + local a = "a" .. (n + f + 1) + return format(extension,a,a) + else + local t = { } + for i=1,f do + n = n + 1 + t[#t+1] = "a" .. n + end + return format(extension,unpack(t)) + end +end + +-- aA b cC d eE f gG hH iI jJ lL mM N o p qQ r sS tT uU wW xX z + +local builder = Cs { "start", + start = ( + ( + P("%") / "" + * ( + V("!") -- new + + V("s") + V("q") + + V("i") + V("d") + + V("f") + V("F") + V("g") + V("G") + V("e") + V("E") + + V("x") + V("X") + V("o") + -- + + V("c") + + V("C") + + V("S") -- new + + V("Q") -- new + + V("N") -- new + -- + + V("r") + + V("h") + V("H") + V("u") + V("U") + + V("p") + V("b") + + V("t") + V("T") + + V("l") + V("L") + + V("I") + + V("w") -- new + + V("W") -- new + + V("a") -- new + + V("A") -- new + + V("j") + V("J") -- stripped e E + + V("m") + V("M") -- new + + V("z") -- new + -- + -- + V("?") -- ignores probably messed up % + ) + + V("*") + ) + * (P(-1) + Carg(1)) + )^0, + -- + ["s"] = (prefix_any * P("s")) / format_s, -- %s => regular %s (string) + ["q"] = (prefix_any * P("q")) / format_q, -- %q => regular %q (quoted string) + ["i"] = (prefix_any * P("i")) / format_i, -- %i => regular %i (integer) + ["d"] = (prefix_any * P("d")) / format_d, -- %d => regular %d (integer) + ["f"] = (prefix_any * P("f")) / format_f, -- %f => regular %f (float) + ["F"] = (prefix_any * P("F")) / format_F, -- %F => regular %f (float) but 0/1 check + ["g"] = (prefix_any * P("g")) / format_g, -- %g => regular %g (float) + ["G"] = (prefix_any * P("G")) / format_G, -- %G => regular %G (float) + ["e"] = (prefix_any * P("e")) / format_e, -- %e => regular %e (float) + ["E"] = (prefix_any * P("E")) / format_E, -- %E => regular %E (float) + ["x"] = (prefix_any * P("x")) / format_x, -- %x => regular %x (hexadecimal) + ["X"] = (prefix_any * P("X")) / format_X, -- %X => regular %X (HEXADECIMAL) + ["o"] = (prefix_any * P("o")) / format_o, -- %o => regular %o (octal) + -- + ["S"] = (prefix_any * P("S")) / format_S, -- %S => %s (tostring) + ["Q"] = (prefix_any * P("Q")) / format_S, -- %Q => %q (tostring) + ["N"] = (prefix_any * P("N")) / format_N, -- %N => tonumber (strips leading zeros) + ["c"] = (prefix_any * P("c")) / format_c, -- %c => utf character (extension to regular) + ["C"] = (prefix_any * P("C")) / format_C, -- %c => U+.... utf character + -- + ["r"] = (prefix_any * P("r")) / format_r, -- %r => round + ["h"] = (prefix_any * P("h")) / format_h, -- %h => 0x0a1b2 (when - no 0x) was v + ["H"] = (prefix_any * P("H")) / format_H, -- %H => 0x0A1B2 (when - no 0x) was V + ["u"] = (prefix_any * P("u")) / format_u, -- %u => u+0a1b2 (when - no u+) + ["U"] = (prefix_any * P("U")) / format_U, -- %U => U+0A1B2 (when - no U+) + ["p"] = (prefix_any * P("p")) / format_p, -- %p => 12.345pt / maybe: P (and more units) + ["b"] = (prefix_any * P("b")) / format_b, -- %b => 12.342bp / maybe: B (and more units) + ["t"] = (prefix_tab * P("t")) / format_t, -- %t => concat + ["T"] = (prefix_tab * P("T")) / format_T, -- %t => sequenced + ["l"] = (prefix_any * P("l")) / format_l, -- %l => boolean + ["L"] = (prefix_any * P("L")) / format_L, -- %L => BOOLEAN + ["I"] = (prefix_any * P("I")) / format_I, -- %I => signed integer + -- + ["w"] = (prefix_any * P("w")) / format_w, -- %w => n spaces (optional prefix is added) + ["W"] = (prefix_any * P("W")) / format_W, -- %W => mandate prefix, no specifier + -- + ["j"] = (prefix_any * P("j")) / format_j, -- %j => %e (float) stripped exponent (irrational) + ["J"] = (prefix_any * P("J")) / format_J, -- %J => %E (float) stripped exponent (irrational) + -- + ["m"] = (prefix_tab * P("m")) / format_m, -- %m => xxx.xxx.xxx,xx (optional prefix instead of .) + ["M"] = (prefix_tab * P("M")) / format_M, -- %M => xxx,xxx,xxx.xx (optional prefix instead of ,) + -- + ["z"] = (prefix_any * P("z")) / format_z, -- %M => xxx,xxx,xxx.xx (optional prefix instead of ,) + -- + ["a"] = (prefix_any * P("a")) / format_a, -- %a => '...' (forces tostring) + ["A"] = (prefix_any * P("A")) / format_A, -- %A => "..." (forces tostring) + -- + ["*"] = Cs(((1-P("%"))^1 + P("%%")/"%%")^1) / format_rest, -- rest (including %%) + ["?"] = Cs(((1-P("%"))^1 )^1) / format_rest, -- rest (including %%) + -- + ["!"] = Carg(2) * prefix_any * P("!") * C((1-P("!"))^1) * P("!") / format_extension, +} + +-- we can be clever and only alias what is needed + +-- local direct = Cs ( +-- P("%")/"" +-- * Cc([[local format = string.format return function(str) return format("%]]) +-- * (S("+- .") + R("09"))^0 +-- * S("sqidfgGeExXo") +-- * Cc([[",str) end]]) +-- * P(-1) +-- ) + +local direct = Cs ( + P("%") + * (S("+- .") + R("09"))^0 + * S("sqidfgGeExXo") + * P(-1) / [[local format = string.format return function(str) return format("%0",str) end]] +) + +local function make(t,str) + local f + local p + local p = lpegmatch(direct,str) + if p then + -- f = loadstripped(p)() + -- print("builder 1 >",p) + f = loadstripped(p)() + else + n = 0 + -- p = lpegmatch(builder,str,1,"..",t._extensions_) -- after this we know n + p = lpegmatch(builder,str,1,t._connector_,t._extensions_) -- after this we know n + if n > 0 then + p = format(template,preamble,t._preamble_,arguments[n],p) + -- print("builder 2 >",p) + f = loadstripped(p,t._environment_)() -- t._environment is not populated (was experiment) + else + f = function() return str end + end + end + t[str] = f + return f +end + +-- -- collect periodically +-- +-- local threshold = 1000 -- max nof cached formats +-- +-- local function make(t,str) +-- local f = rawget(t,str) +-- if f then +-- return f +-- end +-- local parent = t._t_ +-- if parent._n_ > threshold then +-- local m = { _t_ = parent } +-- getmetatable(parent).__index = m +-- setmetatable(m, { __index = make }) +-- else +-- parent._n_ = parent._n_ + 1 +-- end +-- local f +-- local p = lpegmatch(direct,str) +-- if p then +-- f = loadstripped(p)() +-- else +-- n = 0 +-- p = lpegmatch(builder,str,1,"..",parent._extensions_) -- after this we know n +-- if n > 0 then +-- p = format(template,preamble,parent._preamble_,arguments[n],p) +-- -- print("builder>",p) +-- f = loadstripped(p)() +-- else +-- f = function() return str end +-- end +-- end +-- t[str] = f +-- return f +-- end + +local function use(t,fmt,...) + return t[fmt](...) +end + +strings.formatters = { } + +-- we cannot make these tables weak, unless we start using an indirect +-- table (metatable) in which case we could better keep a count and +-- clear that table when a threshold is reached + +-- _connector_ is an experiment + +if _LUAVERSION < 5.2 then + + function strings.formatters.new(noconcat) + local t = { _type_ = "formatter", _connector_ = noconcat and "," or "..", _extensions_ = { }, _preamble_ = preamble, _environment_ = { } } + setmetatable(t, { __index = make, __call = use }) + return t + end + +else + + function strings.formatters.new(noconcat) + local e = { } -- better make a copy as we can overload + for k, v in next, environment do + e[k] = v + end + local t = { _type_ = "formatter", _connector_ = noconcat and "," or "..", _extensions_ = { }, _preamble_ = "", _environment_ = e } + setmetatable(t, { __index = make, __call = use }) + return t + end + +end + +-- function strings.formatters.new() +-- local t = { _extensions_ = { }, _preamble_ = "", _type_ = "formatter", _n_ = 0 } +-- local m = { _t_ = t } +-- setmetatable(t, { __index = m, __call = use }) +-- setmetatable(m, { __index = make }) +-- return t +-- end + +local formatters = strings.formatters.new() -- the default instance + +string.formatters = formatters -- in the main string namespace +string.formatter = function(str,...) return formatters[str](...) end -- sometimes nicer name + +local function add(t,name,template,preamble) + if type(t) == "table" and t._type_ == "formatter" then + t._extensions_[name] = template or "%s" + if type(preamble) == "string" then + t._preamble_ = preamble .. "\n" .. t._preamble_ -- so no overload ! + elseif type(preamble) == "table" then + for k, v in next, preamble do + t._environment_[k] = v + end + end + end +end + +strings.formatters.add = add + +-- registered in the default instance (should we fall back on this one?) + +patterns.xmlescape = Cs((P("<")/"<" + P(">")/">" + P("&")/"&" + P('"')/""" + P(1))^0) +patterns.texescape = Cs((C(S("#$%\\{}"))/"\\%1" + P(1))^0) +patterns.luaescape = Cs(((1-S('"\n'))^1 + P('"')/'\\"' + P('\n')/'\\n"')^0) -- maybe also \0 +patterns.luaquoted = Cs(Cc('"') * ((1-S('"\n'))^1 + P('"')/'\\"' + P('\n')/'\\n"')^0 * Cc('"')) + +-- escaping by lpeg is faster for strings without quotes, slower on a string with quotes, but +-- faster again when other q-escapables are found (the ones we don't need to escape) + +-- add(formatters,"xml", [[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]]) +-- add(formatters,"tex", [[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]]) +-- add(formatters,"lua", [[lpegmatch(luaescape,%s)]],[[local luaescape = lpeg.patterns.luaescape]]) + +if _LUAVERSION < 5.2 then + + add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],"local xmlescape = lpeg.patterns.xmlescape") + add(formatters,"tex",[[lpegmatch(texescape,%s)]],"local texescape = lpeg.patterns.texescape") + add(formatters,"lua",[[lpegmatch(luaescape,%s)]],"local luaescape = lpeg.patterns.luaescape") + +else + + add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],{ xmlescape = lpeg.patterns.xmlescape }) + add(formatters,"tex",[[lpegmatch(texescape,%s)]],{ texescape = lpeg.patterns.texescape }) + add(formatters,"lua",[[lpegmatch(luaescape,%s)]],{ luaescape = lpeg.patterns.luaescape }) + +end + +-- -- yes or no: +-- +-- local function make(t,str) +-- local f +-- local p = lpegmatch(direct,str) +-- if p then +-- f = loadstripped(p)() +-- else +-- n = 0 +-- p = lpegmatch(builder,str,1,",") -- after this we know n +-- if n > 0 then +-- p = format(template,template_shortcuts,arguments[n],p) +-- f = loadstripped(p)() +-- else +-- f = function() return str end +-- end +-- end +-- t[str] = f +-- return f +-- end +-- +-- local formatteds = string.formatteds or { } +-- string.formatteds = formatteds +-- +-- setmetatable(formatteds, { __index = make, __call = use }) + +-- This is a somewhat silly one used in commandline reconstruction but the older +-- method, using a combination of fine, gsub, quoted and unquoted was not that +-- reliable. +-- +-- '"foo"bar \"and " whatever"' => "foo\"bar \"and \" whatever" +-- 'foo"bar \"and " whatever' => "foo\"bar \"and \" whatever" + +local dquote = patterns.dquote -- P('"') +local equote = patterns.escaped + dquote / '\\"' + 1 +local space = patterns.space +local cquote = Cc('"') + +local pattern = + Cs(dquote * (equote - P(-2))^0 * dquote) -- we keep the outer but escape unescaped ones + + Cs(cquote * (equote - space)^0 * space * equote^0 * cquote) -- we escape unescaped ones + +function string.optionalquoted(str) + return lpegmatch(pattern,str) or str +end diff --git a/src/fontloader/runtime/fontloader-basics-gen.lua b/src/fontloader/runtime/fontloader-basics-gen.lua new file mode 100644 index 0000000..e7cdc7b --- /dev/null +++ b/src/fontloader/runtime/fontloader-basics-gen.lua @@ -0,0 +1,373 @@ +if not modules then modules = { } end modules ['luat-basics-gen'] = { + version = 1.100, + comment = "companion to luatex-*.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end + +local dummyfunction = function() +end + +local dummyreporter = function(c) + return function(...) + (texio.reporter or texio.write_nl)(c .. " : " .. string.formatters(...)) + end +end + +statistics = { + register = dummyfunction, + starttiming = dummyfunction, + stoptiming = dummyfunction, + elapsedtime = nil, +} + +directives = { + register = dummyfunction, + enable = dummyfunction, + disable = dummyfunction, +} + +trackers = { + register = dummyfunction, + enable = dummyfunction, + disable = dummyfunction, +} + +experiments = { + register = dummyfunction, + enable = dummyfunction, + disable = dummyfunction, +} + +storage = { -- probably no longer needed + register = dummyfunction, + shared = { }, +} + +logs = { + new = dummyreporter, + reporter = dummyreporter, + messenger = dummyreporter, + report = dummyfunction, +} + +callbacks = { + register = function(n,f) return callback.register(n,f) end, + +} + +utilities = { + storage = { + allocate = function(t) return t or { } end, + mark = function(t) return t or { } end, + }, +} + +characters = characters or { + data = { } +} + +-- we need to cheat a bit here + +texconfig.kpse_init = true + +resolvers = resolvers or { } -- no fancy file helpers used + +local remapper = { + otf = "opentype fonts", + ttf = "truetype fonts", + ttc = "truetype fonts", + dfont = "truetype fonts", -- "truetype dictionary", + cid = "cid maps", + cidmap = "cid maps", + fea = "font feature files", + pfa = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this! + pfb = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this! + afm = "afm", +} + +function resolvers.findfile(name,fileformat) + name = string.gsub(name,"\\","/") + if not fileformat or fileformat == "" then + fileformat = file.suffix(name) + if fileformat == "" then + fileformat = "tex" + end + end + fileformat = string.lower(fileformat) + fileformat = remapper[fileformat] or fileformat + local found = kpse.find_file(name,fileformat) + if not found or found == "" then + found = kpse.find_file(name,"other text files") + end + return found +end + +-- function resolvers.findbinfile(name,fileformat) +-- if not fileformat or fileformat == "" then +-- fileformat = file.suffix(name) +-- end +-- return resolvers.findfile(name,(fileformat and remapper[fileformat]) or fileformat) +-- end + +resolvers.findbinfile = resolvers.findfile + +function resolvers.loadbinfile(filename,filetype) + local data = io.loaddata(filename) + return true, data, #data +end + +function resolvers.resolve(s) + return s +end + +function resolvers.unresolve(s) + return s +end + +-- Caches ... I will make a real stupid version some day when I'm in the +-- mood. After all, the generic code does not need the more advanced +-- ConTeXt features. Cached data is not shared between ConTeXt and other +-- usage as I don't want any dependency at all. Also, ConTeXt might have +-- different needs and tricks added. + +--~ containers.usecache = true + +caches = { } + +local writable = nil +local readables = { } +local usingjit = jit + +if not caches.namespace or caches.namespace == "" or caches.namespace == "context" then + caches.namespace = 'generic' +end + +do + + -- standard context tree setup + + local cachepaths = kpse.expand_var('$TEXMFCACHE') or "" + + -- quite like tex live or so (the weird $TEXMFCACHE test seems to be needed on miktex) + + if cachepaths == "" or cachepaths == "$TEXMFCACHE" then + cachepaths = kpse.expand_var('$TEXMFVAR') or "" + end + + -- this also happened to be used (the weird $TEXMFVAR test seems to be needed on miktex) + + if cachepaths == "" or cachepaths == "$TEXMFVAR" then + cachepaths = kpse.expand_var('$VARTEXMF') or "" + end + + -- and this is a last resort (hm, we could use TEMP or TEMPDIR) + + if cachepaths == "" then + local fallbacks = { "TMPDIR", "TEMPDIR", "TMP", "TEMP", "HOME", "HOMEPATH" } + for i=1,#fallbacks do + cachepaths = os.getenv(fallbacks[i]) or "" + if cachepath ~= "" and lfs.isdir(cachepath) then + break + end + end + end + + if cachepaths == "" then + cachepaths = "." + end + + cachepaths = string.split(cachepaths,os.type == "windows" and ";" or ":") + + for i=1,#cachepaths do + local cachepath = cachepaths[i] + if not lfs.isdir(cachepath) then + lfs.mkdirs(cachepath) -- needed for texlive and latex + if lfs.isdir(cachepath) then + texio.write(string.format("(created cache path: %s)",cachepath)) + end + end + if file.is_writable(cachepath) then + writable = file.join(cachepath,"luatex-cache") + lfs.mkdir(writable) + writable = file.join(writable,caches.namespace) + lfs.mkdir(writable) + break + end + end + + for i=1,#cachepaths do + if file.is_readable(cachepaths[i]) then + readables[#readables+1] = file.join(cachepaths[i],"luatex-cache",caches.namespace) + end + end + + if not writable then + texio.write_nl("quiting: fix your writable cache path") + os.exit() + elseif #readables == 0 then + texio.write_nl("quiting: fix your readable cache path") + os.exit() + elseif #readables == 1 and readables[1] == writable then + texio.write(string.format("(using cache: %s)",writable)) + else + texio.write(string.format("(using write cache: %s)",writable)) + texio.write(string.format("(using read cache: %s)",table.concat(readables, " "))) + end + +end + +function caches.getwritablepath(category,subcategory) + local path = file.join(writable,category) + lfs.mkdir(path) + path = file.join(path,subcategory) + lfs.mkdir(path) + return path +end + +function caches.getreadablepaths(category,subcategory) + local t = { } + for i=1,#readables do + t[i] = file.join(readables[i],category,subcategory) + end + return t +end + +local function makefullname(path,name) + if path and path ~= "" then + return file.addsuffix(file.join(path,name),"lua"), file.addsuffix(file.join(path,name),usingjit and "lub" or "luc") + end +end + +function caches.is_writable(path,name) + local fullname = makefullname(path,name) + return fullname and file.is_writable(fullname) +end + +function caches.loaddata(paths,name) + for i=1,#paths do + local data = false + local luaname, lucname = makefullname(paths[i],name) + if lucname and not lfs.isfile(lucname) and type(caches.compile) == "function" then + -- in case we used luatex and luajittex mixed ... lub or luc file + texio.write(string.format("(compiling luc: %s)",lucname)) + data = loadfile(luaname) + if data then + data = data() + end + if data then + caches.compile(data,luaname,lucname) + return data + end + end + if lucname and lfs.isfile(lucname) then -- maybe also check for size + texio.write(string.format("(load luc: %s)",lucname)) + data = loadfile(lucname) + if data then + data = data() + end + if data then + return data + else + texio.write(string.format("(loading failed: %s)",lucname)) + end + end + if luaname and lfs.isfile(luaname) then + texio.write(string.format("(load lua: %s)",luaname)) + data = loadfile(luaname) + if data then + data = data() + end + if data then + return data + end + end + end +end + +function caches.savedata(path,name,data) + local luaname, lucname = makefullname(path,name) + if luaname then + texio.write(string.format("(save: %s)",luaname)) + table.tofile(luaname,data,true) + if lucname and type(caches.compile) == "function" then + os.remove(lucname) -- better be safe + texio.write(string.format("(save: %s)",lucname)) + caches.compile(data,luaname,lucname) + end + end +end + +-- According to KH os.execute is not permitted in plain/latex so there is +-- no reason to use the normal context way. So the method here is slightly +-- different from the one we have in context. We also use different suffixes +-- as we don't want any clashes (sharing cache files is not that handy as +-- context moves on faster.) +-- +-- Beware: serialization might fail on large files (so maybe we should pcall +-- this) in which case one should limit the method to luac and enable support +-- for execution. + +-- function caches.compile(data,luaname,lucname) +-- local d = io.loaddata(luaname) +-- if not d or d == "" then +-- d = table.serialize(data,true) -- slow +-- end +-- if d and d ~= "" then +-- local f = io.open(lucname,'w') +-- if f then +-- local s = loadstring(d) +-- if s then +-- f:write(string.dump(s,true)) +-- end +-- f:close() +-- end +-- end +-- end + +function caches.compile(data,luaname,lucname) + local d = io.loaddata(luaname) + if not d or d == "" then + d = table.serialize(data,true) -- slow + end + if d and d ~= "" then + local f = io.open(lucname,'wb') + if f then + local s = loadstring(d) + if s then + f:write(string.dump(s,true)) + end + f:close() + end + end +end + +-- + +function table.setmetatableindex(t,f) + if type(t) ~= "table" then + f = f or t + t = { } + end + setmetatable(t,{ __index = f }) + return t +end + +-- helper for plain: + +arguments = { } + +if arg then + for i=1,#arg do + local k, v = string.match(arg[i],"^%-%-([^=]+)=?(.-)$") + if k and v then + arguments[k] = v + end + end +end diff --git a/src/fontloader/runtime/fontloader-fontloader.lua b/src/fontloader/runtime/fontloader-fontloader.lua new file mode 100644 index 0000000..0313204 --- /dev/null +++ b/src/fontloader/runtime/fontloader-fontloader.lua @@ -0,0 +1,14628 @@ +-- merged file : luatex-fonts-merged.lua +-- parent file : luatex-fonts.lua +-- merge date : 12/03/14 18:26:33 + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['l-lua']={ + version=1.001, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local major,minor=string.match(_VERSION,"^[^%d]+(%d+)%.(%d+).*$") +_MAJORVERSION=tonumber(major) or 5 +_MINORVERSION=tonumber(minor) or 1 +_LUAVERSION=_MAJORVERSION+_MINORVERSION/10 +if not lpeg then + lpeg=require("lpeg") +end +if loadstring then + local loadnormal=load + function load(first,...) + if type(first)=="string" then + return loadstring(first,...) + else + return loadnormal(first,...) + end + end +else + loadstring=load +end +if not ipairs then + local function iterate(a,i) + i=i+1 + local v=a[i] + if v~=nil then + return i,v + end + end + function ipairs(a) + return iterate,a,0 + end +end +if not pairs then + function pairs(t) + return next,t + end +end +if not table.unpack then + table.unpack=_G.unpack +elseif not unpack then + _G.unpack=table.unpack +end +if not package.loaders then + package.loaders=package.searchers +end +local print,select,tostring=print,select,tostring +local inspectors={} +function setinspector(inspector) + inspectors[#inspectors+1]=inspector +end +function inspect(...) + for s=1,select("#",...) do + local value=select(s,...) + local done=false + for i=1,#inspectors do + done=inspectors[i](value) + if done then + break + end + end + if not done then + print(tostring(value)) + end + end +end +local dummy=function() end +function optionalrequire(...) + local ok,result=xpcall(require,dummy,...) + if ok then + return result + end +end +if lua then + lua.mask=load([[τεχ = 1]]) and "utf" or "ascii" +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['l-lpeg']={ + version=1.001, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +lpeg=require("lpeg") +if not lpeg.print then function lpeg.print(...) print(lpeg.pcode(...)) end end +local type,next,tostring=type,next,tostring +local byte,char,gmatch,format=string.byte,string.char,string.gmatch,string.format +local floor=math.floor +local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt +local lpegtype,lpegmatch,lpegprint=lpeg.type,lpeg.match,lpeg.print +if setinspector then + setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end) +end +lpeg.patterns=lpeg.patterns or {} +local patterns=lpeg.patterns +local anything=P(1) +local endofstring=P(-1) +local alwaysmatched=P(true) +patterns.anything=anything +patterns.endofstring=endofstring +patterns.beginofstring=alwaysmatched +patterns.alwaysmatched=alwaysmatched +local sign=S('+-') +local zero=P('0') +local digit=R('09') +local octdigit=R("07") +local lowercase=R("az") +local uppercase=R("AZ") +local underscore=P("_") +local hexdigit=digit+lowercase+uppercase +local cr,lf,crlf=P("\r"),P("\n"),P("\r\n") +local newline=P("\r")*(P("\n")+P(true))+P("\n") +local escaped=P("\\")*anything +local squote=P("'") +local dquote=P('"') +local space=P(" ") +local period=P(".") +local comma=P(",") +local utfbom_32_be=P('\000\000\254\255') +local utfbom_32_le=P('\255\254\000\000') +local utfbom_16_be=P('\254\255') +local utfbom_16_le=P('\255\254') +local utfbom_8=P('\239\187\191') +local utfbom=utfbom_32_be+utfbom_32_le+utfbom_16_be+utfbom_16_le+utfbom_8 +local utftype=utfbom_32_be*Cc("utf-32-be")+utfbom_32_le*Cc("utf-32-le")+utfbom_16_be*Cc("utf-16-be")+utfbom_16_le*Cc("utf-16-le")+utfbom_8*Cc("utf-8")+alwaysmatched*Cc("utf-8") +local utfstricttype=utfbom_32_be*Cc("utf-32-be")+utfbom_32_le*Cc("utf-32-le")+utfbom_16_be*Cc("utf-16-be")+utfbom_16_le*Cc("utf-16-le")+utfbom_8*Cc("utf-8") +local utfoffset=utfbom_32_be*Cc(4)+utfbom_32_le*Cc(4)+utfbom_16_be*Cc(2)+utfbom_16_le*Cc(2)+utfbom_8*Cc(3)+Cc(0) +local utf8next=R("\128\191") +patterns.utfbom_32_be=utfbom_32_be +patterns.utfbom_32_le=utfbom_32_le +patterns.utfbom_16_be=utfbom_16_be +patterns.utfbom_16_le=utfbom_16_le +patterns.utfbom_8=utfbom_8 +patterns.utf_16_be_nl=P("\000\r\000\n")+P("\000\r")+P("\000\n") +patterns.utf_16_le_nl=P("\r\000\n\000")+P("\r\000")+P("\n\000") +patterns.utf_32_be_nl=P("\000\000\000\r\000\000\000\n")+P("\000\000\000\r")+P("\000\000\000\n") +patterns.utf_32_le_nl=P("\r\000\000\000\n\000\000\000")+P("\r\000\000\000")+P("\n\000\000\000") +patterns.utf8one=R("\000\127") +patterns.utf8two=R("\194\223")*utf8next +patterns.utf8three=R("\224\239")*utf8next*utf8next +patterns.utf8four=R("\240\244")*utf8next*utf8next*utf8next +patterns.utfbom=utfbom +patterns.utftype=utftype +patterns.utfstricttype=utfstricttype +patterns.utfoffset=utfoffset +local utf8char=patterns.utf8one+patterns.utf8two+patterns.utf8three+patterns.utf8four +local validutf8char=utf8char^0*endofstring*Cc(true)+Cc(false) +local utf8character=P(1)*R("\128\191")^0 +patterns.utf8=utf8char +patterns.utf8char=utf8char +patterns.utf8character=utf8character +patterns.validutf8=validutf8char +patterns.validutf8char=validutf8char +local eol=S("\n\r") +local spacer=S(" \t\f\v") +local whitespace=eol+spacer +local nonspacer=1-spacer +local nonwhitespace=1-whitespace +patterns.eol=eol +patterns.spacer=spacer +patterns.whitespace=whitespace +patterns.nonspacer=nonspacer +patterns.nonwhitespace=nonwhitespace +local stripper=spacer^0*C((spacer^0*nonspacer^1)^0) +local fullstripper=whitespace^0*C((whitespace^0*nonwhitespace^1)^0) +local collapser=Cs(spacer^0/""*nonspacer^0*((spacer^0/" "*nonspacer^1)^0)) +local b_collapser=Cs(whitespace^0/""*(nonwhitespace^1+whitespace^1/" ")^0) +local e_collapser=Cs((whitespace^1*P(-1)/""+nonwhitespace^1+whitespace^1/" ")^0) +local m_collapser=Cs((nonwhitespace^1+whitespace^1/" ")^0) +local b_stripper=Cs(spacer^0/""*(nonspacer^1+spacer^1/" ")^0) +local e_stripper=Cs((spacer^1*P(-1)/""+nonspacer^1+spacer^1/" ")^0) +local m_stripper=Cs((nonspacer^1+spacer^1/" ")^0) +patterns.stripper=stripper +patterns.fullstripper=fullstripper +patterns.collapser=collapser +patterns.b_collapser=b_collapser +patterns.m_collapser=m_collapser +patterns.e_collapser=e_collapser +patterns.b_stripper=b_stripper +patterns.m_stripper=m_stripper +patterns.e_stripper=e_stripper +patterns.lowercase=lowercase +patterns.uppercase=uppercase +patterns.letter=patterns.lowercase+patterns.uppercase +patterns.space=space +patterns.tab=P("\t") +patterns.spaceortab=patterns.space+patterns.tab +patterns.newline=newline +patterns.emptyline=newline^1 +patterns.equal=P("=") +patterns.comma=comma +patterns.commaspacer=comma*spacer^0 +patterns.period=period +patterns.colon=P(":") +patterns.semicolon=P(";") +patterns.underscore=underscore +patterns.escaped=escaped +patterns.squote=squote +patterns.dquote=dquote +patterns.nosquote=(escaped+(1-squote))^0 +patterns.nodquote=(escaped+(1-dquote))^0 +patterns.unsingle=(squote/"")*patterns.nosquote*(squote/"") +patterns.undouble=(dquote/"")*patterns.nodquote*(dquote/"") +patterns.unquoted=patterns.undouble+patterns.unsingle +patterns.unspacer=((patterns.spacer^1)/"")^0 +patterns.singlequoted=squote*patterns.nosquote*squote +patterns.doublequoted=dquote*patterns.nodquote*dquote +patterns.quoted=patterns.doublequoted+patterns.singlequoted +patterns.digit=digit +patterns.octdigit=octdigit +patterns.hexdigit=hexdigit +patterns.sign=sign +patterns.cardinal=digit^1 +patterns.integer=sign^-1*digit^1 +patterns.unsigned=digit^0*period*digit^1 +patterns.float=sign^-1*patterns.unsigned +patterns.cunsigned=digit^0*comma*digit^1 +patterns.cpunsigned=digit^0*(period+comma)*digit^1 +patterns.cfloat=sign^-1*patterns.cunsigned +patterns.cpfloat=sign^-1*patterns.cpunsigned +patterns.number=patterns.float+patterns.integer +patterns.cnumber=patterns.cfloat+patterns.integer +patterns.cpnumber=patterns.cpfloat+patterns.integer +patterns.oct=zero*octdigit^1 +patterns.octal=patterns.oct +patterns.HEX=zero*P("X")*(digit+uppercase)^1 +patterns.hex=zero*P("x")*(digit+lowercase)^1 +patterns.hexadecimal=zero*S("xX")*hexdigit^1 +patterns.hexafloat=sign^-1*zero*S("xX")*(hexdigit^0*period*hexdigit^1+hexdigit^1*period*hexdigit^0+hexdigit^1)*(S("pP")*sign^-1*hexdigit^1)^-1 +patterns.decafloat=sign^-1*(digit^0*period*digit^1+digit^1*period*digit^0+digit^1)*S("eE")*sign^-1*digit^1 +patterns.propername=(uppercase+lowercase+underscore)*(uppercase+lowercase+underscore+digit)^0*endofstring +patterns.somecontent=(anything-newline-space)^1 +patterns.beginline=#(1-newline) +patterns.longtostring=Cs(whitespace^0/""*((patterns.quoted+nonwhitespace^1+whitespace^1/""*(P(-1)+Cc(" ")))^0)) +local function anywhere(pattern) + return P { P(pattern)+1*V(1) } +end +lpeg.anywhere=anywhere +function lpeg.instringchecker(p) + p=anywhere(p) + return function(str) + return lpegmatch(p,str) and true or false + end +end +function lpeg.splitter(pattern,action) + return (((1-P(pattern))^1)/action+1)^0 +end +function lpeg.tsplitter(pattern,action) + return Ct((((1-P(pattern))^1)/action+1)^0) +end +local splitters_s,splitters_m,splitters_t={},{},{} +local function splitat(separator,single) + local splitter=(single and splitters_s[separator]) or splitters_m[separator] + if not splitter then + separator=P(separator) + local other=C((1-separator)^0) + if single then + local any=anything + splitter=other*(separator*C(any^0)+"") + splitters_s[separator]=splitter + else + splitter=other*(separator*other)^0 + splitters_m[separator]=splitter + end + end + return splitter +end +local function tsplitat(separator) + local splitter=splitters_t[separator] + if not splitter then + splitter=Ct(splitat(separator)) + splitters_t[separator]=splitter + end + return splitter +end +lpeg.splitat=splitat +lpeg.tsplitat=tsplitat +function string.splitup(str,separator) + if not separator then + separator="," + end + return lpegmatch(splitters_m[separator] or splitat(separator),str) +end +local cache={} +function lpeg.split(separator,str) + local c=cache[separator] + if not c then + c=tsplitat(separator) + cache[separator]=c + end + return lpegmatch(c,str) +end +function string.split(str,separator) + if separator then + local c=cache[separator] + if not c then + c=tsplitat(separator) + cache[separator]=c + end + return lpegmatch(c,str) + else + return { str } + end +end +local spacing=patterns.spacer^0*newline +local empty=spacing*Cc("") +local nonempty=Cs((1-spacing)^1)*spacing^-1 +local content=(empty+nonempty)^1 +patterns.textline=content +local linesplitter=tsplitat(newline) +patterns.linesplitter=linesplitter +function string.splitlines(str) + return lpegmatch(linesplitter,str) +end +local cache={} +function lpeg.checkedsplit(separator,str) + local c=cache[separator] + if not c then + separator=P(separator) + local other=C((1-separator)^1) + c=Ct(separator^0*other*(separator^1*other)^0) + cache[separator]=c + end + return lpegmatch(c,str) +end +function string.checkedsplit(str,separator) + local c=cache[separator] + if not c then + separator=P(separator) + local other=C((1-separator)^1) + c=Ct(separator^0*other*(separator^1*other)^0) + cache[separator]=c + end + return lpegmatch(c,str) +end +local function f2(s) local c1,c2=byte(s,1,2) return c1*64+c2-12416 end +local function f3(s) local c1,c2,c3=byte(s,1,3) return (c1*64+c2)*64+c3-925824 end +local function f4(s) local c1,c2,c3,c4=byte(s,1,4) return ((c1*64+c2)*64+c3)*64+c4-63447168 end +local utf8byte=patterns.utf8one/byte+patterns.utf8two/f2+patterns.utf8three/f3+patterns.utf8four/f4 +patterns.utf8byte=utf8byte +local cache={} +function lpeg.stripper(str) + if type(str)=="string" then + local s=cache[str] + if not s then + s=Cs(((S(str)^1)/""+1)^0) + cache[str]=s + end + return s + else + return Cs(((str^1)/""+1)^0) + end +end +local cache={} +function lpeg.keeper(str) + if type(str)=="string" then + local s=cache[str] + if not s then + s=Cs((((1-S(str))^1)/""+1)^0) + cache[str]=s + end + return s + else + return Cs((((1-str)^1)/""+1)^0) + end +end +function lpeg.frontstripper(str) + return (P(str)+P(true))*Cs(anything^0) +end +function lpeg.endstripper(str) + return Cs((1-P(str)*endofstring)^0) +end +function lpeg.replacer(one,two,makefunction,isutf) + local pattern + local u=isutf and utf8char or 1 + if type(one)=="table" then + local no=#one + local p=P(false) + if no==0 then + for k,v in next,one do + p=p+P(k)/v + end + pattern=Cs((p+u)^0) + elseif no==1 then + local o=one[1] + one,two=P(o[1]),o[2] + pattern=Cs((one/two+u)^0) + else + for i=1,no do + local o=one[i] + p=p+P(o[1])/o[2] + end + pattern=Cs((p+u)^0) + end + else + pattern=Cs((P(one)/(two or "")+u)^0) + end + if makefunction then + return function(str) + return lpegmatch(pattern,str) + end + else + return pattern + end +end +function lpeg.finder(lst,makefunction,isutf) + local pattern + if type(lst)=="table" then + pattern=P(false) + if #lst==0 then + for k,v in next,lst do + pattern=pattern+P(k) + end + else + for i=1,#lst do + pattern=pattern+P(lst[i]) + end + end + else + pattern=P(lst) + end + if isutf then + pattern=((utf8char or 1)-pattern)^0*pattern + else + pattern=(1-pattern)^0*pattern + end + if makefunction then + return function(str) + return lpegmatch(pattern,str) + end + else + return pattern + end +end +local splitters_f,splitters_s={},{} +function lpeg.firstofsplit(separator) + local splitter=splitters_f[separator] + if not splitter then + local pattern=P(separator) + splitter=C((1-pattern)^0) + splitters_f[separator]=splitter + end + return splitter +end +function lpeg.secondofsplit(separator) + local splitter=splitters_s[separator] + if not splitter then + local pattern=P(separator) + splitter=(1-pattern)^0*pattern*C(anything^0) + splitters_s[separator]=splitter + end + return splitter +end +local splitters_s,splitters_p={},{} +function lpeg.beforesuffix(separator) + local splitter=splitters_s[separator] + if not splitter then + local pattern=P(separator) + splitter=C((1-pattern)^0)*pattern*endofstring + splitters_s[separator]=splitter + end + return splitter +end +function lpeg.afterprefix(separator) + local splitter=splitters_p[separator] + if not splitter then + local pattern=P(separator) + splitter=pattern*C(anything^0) + splitters_p[separator]=splitter + end + return splitter +end +function lpeg.balancer(left,right) + left,right=P(left),P(right) + return P { left*((1-left-right)+V(1))^0*right } +end +local nany=utf8char/"" +function lpeg.counter(pattern) + pattern=Cs((P(pattern)/" "+nany)^0) + return function(str) + return #lpegmatch(pattern,str) + end +end +utf=utf or (unicode and unicode.utf8) or {} +local utfcharacters=utf and utf.characters or string.utfcharacters +local utfgmatch=utf and utf.gmatch +local utfchar=utf and utf.char +lpeg.UP=lpeg.P +if utfcharacters then + function lpeg.US(str) + local p=P(false) + for uc in utfcharacters(str) do + p=p+P(uc) + end + return p + end +elseif utfgmatch then + function lpeg.US(str) + local p=P(false) + for uc in utfgmatch(str,".") do + p=p+P(uc) + end + return p + end +else + function lpeg.US(str) + local p=P(false) + local f=function(uc) + p=p+P(uc) + end + lpegmatch((utf8char/f)^0,str) + return p + end +end +local range=utf8byte*utf8byte+Cc(false) +function lpeg.UR(str,more) + local first,last + if type(str)=="number" then + first=str + last=more or first + else + first,last=lpegmatch(range,str) + if not last then + return P(str) + end + end + if first==last then + return P(str) + elseif utfchar and (last-first<8) then + local p=P(false) + for i=first,last do + p=p+P(utfchar(i)) + end + return p + else + local f=function(b) + return b>=first and b<=last + end + return utf8byte/f + end +end +function lpeg.is_lpeg(p) + return p and lpegtype(p)=="pattern" +end +function lpeg.oneof(list,...) + if type(list)~="table" then + list={ list,... } + end + local p=P(list[1]) + for l=2,#list do + p=p+P(list[l]) + end + return p +end +local sort=table.sort +local function copyindexed(old) + local new={} + for i=1,#old do + new[i]=old + end + return new +end +local function sortedkeys(tab) + local keys,s={},0 + for key,_ in next,tab do + s=s+1 + keys[s]=key + end + sort(keys) + return keys +end +function lpeg.append(list,pp,delayed,checked) + local p=pp + if #list>0 then + local keys=copyindexed(list) + sort(keys) + for i=#keys,1,-1 do + local k=keys[i] + if p then + p=P(k)+p + else + p=P(k) + end + end + elseif delayed then + local keys=sortedkeys(list) + if p then + for i=1,#keys,1 do + local k=keys[i] + local v=list[k] + p=P(k)/list+p + end + else + for i=1,#keys do + local k=keys[i] + local v=list[k] + if p then + p=P(k)+p + else + p=P(k) + end + end + if p then + p=p/list + end + end + elseif checked then + local keys=sortedkeys(list) + for i=1,#keys do + local k=keys[i] + local v=list[k] + if p then + if k==v then + p=P(k)+p + else + p=P(k)/v+p + end + else + if k==v then + p=P(k) + else + p=P(k)/v + end + end + end + else + local keys=sortedkeys(list) + for i=1,#keys do + local k=keys[i] + local v=list[k] + if p then + p=P(k)/v+p + else + p=P(k)/v + end + end + end + return p +end +local function make(t,hash) + local p=P(false) + local keys=sortedkeys(t) + for i=1,#keys do + local k=keys[i] + local v=t[k] + local h=hash[v] + if h then + if next(v) then + p=p+P(k)*(make(v,hash)+P(true)) + else + p=p+P(k)*P(true) + end + else + if next(v) then + p=p+P(k)*make(v,hash) + else + p=p+P(k) + end + end + end + return p +end +function lpeg.utfchartabletopattern(list) + local tree={} + local hash={} + local n=#list + if n==0 then + for s in next,list do + local t=tree + for c in gmatch(s,".") do + local tc=t[c] + if not tc then + tc={} + t[c]=tc + end + t=tc + end + hash[t]=s + end + else + for i=1,n do + local t=tree + local s=list[i] + for c in gmatch(s,".") do + local tc=t[c] + if not tc then + tc={} + t[c]=tc + end + t=tc + end + hash[t]=s + end + end + return make(tree,hash) +end +patterns.containseol=lpeg.finder(eol) +local function nextstep(n,step,result) + local m=n%step + local d=floor(n/step) + if d>0 then + local v=V(tostring(step)) + local s=result.start + for i=1,d do + if s then + s=v*s + else + s=v + end + end + result.start=s + end + if step>1 and result.start then + local v=V(tostring(step/2)) + result[tostring(step)]=v*v + end + if step>0 then + return nextstep(m,step/2,result) + else + return result + end +end +function lpeg.times(pattern,n) + return P(nextstep(n,2^16,{ "start",["1"]=pattern })) +end +local trailingzeros=zero^0*-digit +local case_1=period*trailingzeros/"" +local case_2=period*(digit-trailingzeros)^1*(trailingzeros/"") +local number=digit^1*(case_1+case_2) +local stripper=Cs((number+1)^0) +lpeg.patterns.stripzeros=stripper +local byte_to_HEX={} +local byte_to_hex={} +local byte_to_dec={} +local hex_to_byte={} +for i=0,255 do + local H=format("%02X",i) + local h=format("%02x",i) + local d=format("%03i",i) + local c=char(i) + byte_to_HEX[c]=H + byte_to_hex[c]=h + byte_to_dec[c]=d + hex_to_byte[h]=c + hex_to_byte[H]=c +end +local hextobyte=P(2)/hex_to_byte +local bytetoHEX=P(1)/byte_to_HEX +local bytetohex=P(1)/byte_to_hex +local bytetodec=P(1)/byte_to_dec +local hextobytes=Cs(hextobyte^0) +local bytestoHEX=Cs(bytetoHEX^0) +local bytestohex=Cs(bytetohex^0) +local bytestodec=Cs(bytetodec^0) +patterns.hextobyte=hextobyte +patterns.bytetoHEX=bytetoHEX +patterns.bytetohex=bytetohex +patterns.bytetodec=bytetodec +patterns.hextobytes=hextobytes +patterns.bytestoHEX=bytestoHEX +patterns.bytestohex=bytestohex +patterns.bytestodec=bytestodec +function string.toHEX(s) + if not s or s=="" then + return s + else + return lpegmatch(bytestoHEX,s) + end +end +function string.tohex(s) + if not s or s=="" then + return s + else + return lpegmatch(bytestohex,s) + end +end +function string.todec(s) + if not s or s=="" then + return s + else + return lpegmatch(bytestodec,s) + end +end +function string.tobytes(s) + if not s or s=="" then + return s + else + return lpegmatch(hextobytes,s) + end +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['l-functions']={ + version=1.001, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +functions=functions or {} +function functions.dummy() end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['l-string']={ + version=1.001, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local string=string +local sub,gmatch,format,char,byte,rep,lower=string.sub,string.gmatch,string.format,string.char,string.byte,string.rep,string.lower +local lpegmatch,patterns=lpeg.match,lpeg.patterns +local P,S,C,Ct,Cc,Cs=lpeg.P,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cc,lpeg.Cs +local unquoted=patterns.squote*C(patterns.nosquote)*patterns.squote+patterns.dquote*C(patterns.nodquote)*patterns.dquote +function string.unquoted(str) + return lpegmatch(unquoted,str) or str +end +function string.quoted(str) + return format("%q",str) +end +function string.count(str,pattern) + local n=0 + for _ in gmatch(str,pattern) do + n=n+1 + end + return n +end +function string.limit(str,n,sentinel) + if #str>n then + sentinel=sentinel or "..." + return sub(str,1,(n-#sentinel))..sentinel + else + return str + end +end +local stripper=patterns.stripper +local fullstripper=patterns.fullstripper +local collapser=patterns.collapser +local longtostring=patterns.longtostring +function string.strip(str) + return lpegmatch(stripper,str) or "" +end +function string.fullstrip(str) + return lpegmatch(fullstripper,str) or "" +end +function string.collapsespaces(str) + return lpegmatch(collapser,str) or "" +end +function string.longtostring(str) + return lpegmatch(longtostring,str) or "" +end +local pattern=P(" ")^0*P(-1) +function string.is_empty(str) + if str=="" then + return true + else + return lpegmatch(pattern,str) and true or false + end +end +local anything=patterns.anything +local allescapes=Cc("%")*S(".-+%?()[]*") +local someescapes=Cc("%")*S(".-+%()[]") +local matchescapes=Cc(".")*S("*?") +local pattern_a=Cs ((allescapes+anything )^0 ) +local pattern_b=Cs ((someescapes+matchescapes+anything )^0 ) +local pattern_c=Cs (Cc("^")*(someescapes+matchescapes+anything )^0*Cc("$") ) +function string.escapedpattern(str,simple) + return lpegmatch(simple and pattern_b or pattern_a,str) +end +function string.topattern(str,lowercase,strict) + if str=="" or type(str)~="string" then + return ".*" + elseif strict then + str=lpegmatch(pattern_c,str) + else + str=lpegmatch(pattern_b,str) + end + if lowercase then + return lower(str) + else + return str + end +end +function string.valid(str,default) + return (type(str)=="string" and str~="" and str) or default or nil +end +string.itself=function(s) return s end +local pattern=Ct(C(1)^0) +function string.totable(str) + return lpegmatch(pattern,str) +end +local replacer=lpeg.replacer("@","%%") +function string.tformat(fmt,...) + return format(lpegmatch(replacer,fmt),...) +end +string.quote=string.quoted +string.unquote=string.unquoted + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['l-table']={ + version=1.001, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local type,next,tostring,tonumber,ipairs,select=type,next,tostring,tonumber,ipairs,select +local table,string=table,string +local concat,sort,insert,remove=table.concat,table.sort,table.insert,table.remove +local format,lower,dump=string.format,string.lower,string.dump +local getmetatable,setmetatable=getmetatable,setmetatable +local getinfo=debug.getinfo +local lpegmatch,patterns=lpeg.match,lpeg.patterns +local floor=math.floor +local stripper=patterns.stripper +function table.strip(tab) + local lst,l={},0 + for i=1,#tab do + local s=lpegmatch(stripper,tab[i]) or "" + if s=="" then + else + l=l+1 + lst[l]=s + end + end + return lst +end +function table.keys(t) + if t then + local keys,k={},0 + for key,_ in next,t do + k=k+1 + keys[k]=key + end + return keys + else + return {} + end +end +local function compare(a,b) + local ta,tb=type(a),type(b) + if ta==tb then + return a0 then + local n=0 + for _,v in next,t do + n=n+1 + end + if n==#t then + local tt,nt={},0 + for i=1,#t do + local v=t[i] + local tv=type(v) + if tv=="number" then + nt=nt+1 + if hexify then + tt[nt]=format("0x%X",v) + else + tt[nt]=tostring(v) + end + elseif tv=="string" then + nt=nt+1 + tt[nt]=format("%q",v) + elseif tv=="boolean" then + nt=nt+1 + tt[nt]=v and "true" or "false" + else + tt=nil + break + end + end + return tt + end + end + return nil +end +local propername=patterns.propername +local function dummy() end +local function do_serialize(root,name,depth,level,indexed) + if level>0 then + depth=depth.." " + if indexed then + handle(format("%s{",depth)) + else + local tn=type(name) + if tn=="number" then + if hexify then + handle(format("%s[0x%X]={",depth,name)) + else + handle(format("%s[%s]={",depth,name)) + end + elseif tn=="string" then + if noquotes and not reserved[name] and lpegmatch(propername,name) then + handle(format("%s%s={",depth,name)) + else + handle(format("%s[%q]={",depth,name)) + end + elseif tn=="boolean" then + handle(format("%s[%s]={",depth,name and "true" or "false")) + else + handle(format("%s{",depth)) + end + end + end + if root and next(root) then + local first,last=nil,0 + if compact then + last=#root + for k=1,last do + if root[k]==nil then + last=k-1 + break + end + end + if last>0 then + first=1 + end + end + local sk=sortedkeys(root) + for i=1,#sk do + local k=sk[i] + local v=root[k] + local tv,tk=type(v),type(k) + if compact and first and tk=="number" and k>=first and k<=last then + if tv=="number" then + if hexify then + handle(format("%s 0x%X,",depth,v)) + else + handle(format("%s %s,",depth,v)) + end + elseif tv=="string" then + if reduce and tonumber(v) then + handle(format("%s %s,",depth,v)) + else + handle(format("%s %q,",depth,v)) + end + elseif tv=="table" then + if not next(v) then + handle(format("%s {},",depth)) + elseif inline then + local st=simple_table(v) + if st then + handle(format("%s { %s },",depth,concat(st,", "))) + else + do_serialize(v,k,depth,level+1,true) + end + else + do_serialize(v,k,depth,level+1,true) + end + elseif tv=="boolean" then + handle(format("%s %s,",depth,v and "true" or "false")) + elseif tv=="function" then + if functions then + handle(format('%s load(%q),',depth,dump(v))) + else + handle(format('%s "function",',depth)) + end + else + handle(format("%s %q,",depth,tostring(v))) + end + elseif k=="__p__" then + if false then + handle(format("%s __p__=nil,",depth)) + end + elseif tv=="number" then + if tk=="number" then + if hexify then + handle(format("%s [0x%X]=0x%X,",depth,k,v)) + else + handle(format("%s [%s]=%s,",depth,k,v)) + end + elseif tk=="boolean" then + if hexify then + handle(format("%s [%s]=0x%X,",depth,k and "true" or "false",v)) + else + handle(format("%s [%s]=%s,",depth,k and "true" or "false",v)) + end + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + if hexify then + handle(format("%s %s=0x%X,",depth,k,v)) + else + handle(format("%s %s=%s,",depth,k,v)) + end + else + if hexify then + handle(format("%s [%q]=0x%X,",depth,k,v)) + else + handle(format("%s [%q]=%s,",depth,k,v)) + end + end + elseif tv=="string" then + if reduce and tonumber(v) then + if tk=="number" then + if hexify then + handle(format("%s [0x%X]=%s,",depth,k,v)) + else + handle(format("%s [%s]=%s,",depth,k,v)) + end + elseif tk=="boolean" then + handle(format("%s [%s]=%s,",depth,k and "true" or "false",v)) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s=%s,",depth,k,v)) + else + handle(format("%s [%q]=%s,",depth,k,v)) + end + else + if tk=="number" then + if hexify then + handle(format("%s [0x%X]=%q,",depth,k,v)) + else + handle(format("%s [%s]=%q,",depth,k,v)) + end + elseif tk=="boolean" then + handle(format("%s [%s]=%q,",depth,k and "true" or "false",v)) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s=%q,",depth,k,v)) + else + handle(format("%s [%q]=%q,",depth,k,v)) + end + end + elseif tv=="table" then + if not next(v) then + if tk=="number" then + if hexify then + handle(format("%s [0x%X]={},",depth,k)) + else + handle(format("%s [%s]={},",depth,k)) + end + elseif tk=="boolean" then + handle(format("%s [%s]={},",depth,k and "true" or "false")) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s={},",depth,k)) + else + handle(format("%s [%q]={},",depth,k)) + end + elseif inline then + local st=simple_table(v) + if st then + if tk=="number" then + if hexify then + handle(format("%s [0x%X]={ %s },",depth,k,concat(st,", "))) + else + handle(format("%s [%s]={ %s },",depth,k,concat(st,", "))) + end + elseif tk=="boolean" then + handle(format("%s [%s]={ %s },",depth,k and "true" or "false",concat(st,", "))) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s={ %s },",depth,k,concat(st,", "))) + else + handle(format("%s [%q]={ %s },",depth,k,concat(st,", "))) + end + else + do_serialize(v,k,depth,level+1) + end + else + do_serialize(v,k,depth,level+1) + end + elseif tv=="boolean" then + if tk=="number" then + if hexify then + handle(format("%s [0x%X]=%s,",depth,k,v and "true" or "false")) + else + handle(format("%s [%s]=%s,",depth,k,v and "true" or "false")) + end + elseif tk=="boolean" then + handle(format("%s [%s]=%s,",depth,tostring(k),v and "true" or "false")) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s=%s,",depth,k,v and "true" or "false")) + else + handle(format("%s [%q]=%s,",depth,k,v and "true" or "false")) + end + elseif tv=="function" then + if functions then + local f=getinfo(v).what=="C" and dump(dummy) or dump(v) + if tk=="number" then + if hexify then + handle(format("%s [0x%X]=load(%q),",depth,k,f)) + else + handle(format("%s [%s]=load(%q),",depth,k,f)) + end + elseif tk=="boolean" then + handle(format("%s [%s]=load(%q),",depth,k and "true" or "false",f)) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s=load(%q),",depth,k,f)) + else + handle(format("%s [%q]=load(%q),",depth,k,f)) + end + end + else + if tk=="number" then + if hexify then + handle(format("%s [0x%X]=%q,",depth,k,tostring(v))) + else + handle(format("%s [%s]=%q,",depth,k,tostring(v))) + end + elseif tk=="boolean" then + handle(format("%s [%s]=%q,",depth,k and "true" or "false",tostring(v))) + elseif noquotes and not reserved[k] and lpegmatch(propername,k) then + handle(format("%s %s=%q,",depth,k,tostring(v))) + else + handle(format("%s [%q]=%q,",depth,k,tostring(v))) + end + end + end + end + if level>0 then + handle(format("%s},",depth)) + end +end +local function serialize(_handle,root,name,specification) + local tname=type(name) + if type(specification)=="table" then + noquotes=specification.noquotes + hexify=specification.hexify + handle=_handle or specification.handle or print + reduce=specification.reduce or false + functions=specification.functions + compact=specification.compact + inline=specification.inline and compact + if functions==nil then + functions=true + end + if compact==nil then + compact=true + end + if inline==nil then + inline=compact + end + else + noquotes=false + hexify=false + handle=_handle or print + reduce=false + compact=true + inline=true + functions=true + end + if tname=="string" then + if name=="return" then + handle("return {") + else + handle(name.."={") + end + elseif tname=="number" then + if hexify then + handle(format("[0x%X]={",name)) + else + handle("["..name.."]={") + end + elseif tname=="boolean" then + if name then + handle("return {") + else + handle("{") + end + else + handle("t={") + end + if root then + if getmetatable(root) then + local dummy=root._w_h_a_t_e_v_e_r_ + root._w_h_a_t_e_v_e_r_=nil + end + if next(root) then + do_serialize(root,name,"",0) + end + end + handle("}") +end +function table.serialize(root,name,specification) + local t,n={},0 + local function flush(s) + n=n+1 + t[n]=s + end + serialize(flush,root,name,specification) + return concat(t,"\n") +end +table.tohandle=serialize +local maxtab=2*1024 +function table.tofile(filename,root,name,specification) + local f=io.open(filename,'w') + if f then + if maxtab>1 then + local t,n={},0 + local function flush(s) + n=n+1 + t[n]=s + if n>maxtab then + f:write(concat(t,"\n"),"\n") + t,n={},0 + end + end + serialize(flush,root,name,specification) + f:write(concat(t,"\n"),"\n") + else + local function flush(s) + f:write(s,"\n") + end + serialize(flush,root,name,specification) + end + f:close() + io.flush() + end +end +local function flattened(t,f,depth) + if f==nil then + f={} + depth=0xFFFF + elseif tonumber(f) then + depth=f + f={} + elseif not depth then + depth=0xFFFF + end + for k,v in next,t do + if type(k)~="number" then + if depth>0 and type(v)=="table" then + flattened(v,f,depth-1) + else + f[#f+1]=v + end + end + end + for k=1,#t do + local v=t[k] + if depth>0 and type(v)=="table" then + flattened(v,f,depth-1) + else + f[#f+1]=v + end + end + return f +end +table.flattened=flattened +local function unnest(t,f) + if not f then + f={} + end + for i=1,#t do + local v=t[i] + if type(v)=="table" then + if type(v[1])=="table" then + unnest(v,f) + else + f[#f+1]=v + end + else + f[#f+1]=v + end + end + return f +end +function table.unnest(t) + return unnest(t) +end +local function are_equal(a,b,n,m) + if a and b and #a==#b then + n=n or 1 + m=m or #a + for i=n,m do + local ai,bi=a[i],b[i] + if ai==bi then + elseif type(ai)=="table" and type(bi)=="table" then + if not are_equal(ai,bi) then + return false + end + else + return false + end + end + return true + else + return false + end +end +local function identical(a,b) + for ka,va in next,a do + local vb=b[ka] + if va==vb then + elseif type(va)=="table" and type(vb)=="table" then + if not identical(va,vb) then + return false + end + else + return false + end + end + return true +end +table.identical=identical +table.are_equal=are_equal +local function sparse(old,nest,keeptables) + local new={} + for k,v in next,old do + if not (v=="" or v==false) then + if nest and type(v)=="table" then + v=sparse(v,nest) + if keeptables or next(v) then + new[k]=v + end + else + new[k]=v + end + end + end + return new +end +table.sparse=sparse +function table.compact(t) + return sparse(t,true,true) +end +function table.contains(t,v) + if t then + for i=1,#t do + if t[i]==v then + return i + end + end + end + return false +end +function table.count(t) + local n=0 + for k,v in next,t do + n=n+1 + end + return n +end +function table.swapped(t,s) + local n={} + if s then + for k,v in next,s do + n[k]=v + end + end + for k,v in next,t do + n[v]=k + end + return n +end +function table.mirrored(t) + local n={} + for k,v in next,t do + n[v]=k + n[k]=v + end + return n +end +function table.reversed(t) + if t then + local tt,tn={},#t + if tn>0 then + local ttn=0 + for i=tn,1,-1 do + ttn=ttn+1 + tt[ttn]=t[i] + end + end + return tt + end +end +function table.reverse(t) + if t then + local n=#t + for i=1,floor(n/2) do + local j=n-i+1 + t[i],t[j]=t[j],t[i] + end + return t + end +end +function table.sequenced(t,sep,simple) + if not t then + return "" + end + local n=#t + local s={} + if n>0 then + for i=1,n do + s[i]=tostring(t[i]) + end + else + n=0 + for k,v in sortedhash(t) do + if simple then + if v==true then + n=n+1 + s[n]=k + elseif v and v~="" then + n=n+1 + s[n]=k.."="..tostring(v) + end + else + n=n+1 + s[n]=k.."="..tostring(v) + end + end + end + return concat(s,sep or " | ") +end +function table.print(t,...) + if type(t)~="table" then + print(tostring(t)) + else + serialize(print,t,...) + end +end +if setinspector then + setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end) +end +function table.sub(t,i,j) + return { unpack(t,i,j) } +end +function table.is_empty(t) + return not t or not next(t) +end +function table.has_one_entry(t) + return t and not next(t,next(t)) +end +function table.loweredkeys(t) + local l={} + for k,v in next,t do + l[lower(k)]=v + end + return l +end +function table.unique(old) + local hash={} + local new={} + local n=0 + for i=1,#old do + local oi=old[i] + if not hash[oi] then + n=n+1 + new[n]=oi + hash[oi]=true + end + end + return new +end +function table.sorted(t,...) + sort(t,...) + return t +end +function table.values(t,s) + if t then + local values,keys,v={},{},0 + for key,value in next,t do + if not keys[value] then + v=v+1 + values[v]=value + keys[k]=key + end + end + if s then + sort(values) + end + return values + else + return {} + end +end +function table.filtered(t,pattern,sort,cmp) + if t and type(pattern)=="string" then + if sort then + local s + if cmp then + s=sortedhashkeys(t,function(a,b) return cmp(t,a,b) end) + else + s=sortedkeys(t) + end + local n=0 + local m=#s + local function kv(s) + while n16*1024*1024 then + step=16*1024*1024 + else + step=floor(size/(1024*1024))*1024*1024/8 + end + local data={} + while true do + local r=f:read(step) + if not r then + return concat(data) + else + data[#data+1]=r + end + end + end +end +io.readall=readall +function io.loaddata(filename,textmode) + local f=io.open(filename,(textmode and 'r') or 'rb') + if f then + local data=readall(f) + f:close() + if #data>0 then + return data + end + end +end +function io.savedata(filename,data,joiner) + local f=io.open(filename,"wb") + if f then + if type(data)=="table" then + f:write(concat(data,joiner or "")) + elseif type(data)=="function" then + data(f) + else + f:write(data or "") + end + f:close() + io.flush() + return true + else + return false + end +end +function io.loadlines(filename,n) + local f=io.open(filename,'r') + if not f then + elseif n then + local lines={} + for i=1,n do + local line=f:read("*lines") + if line then + lines[#lines+1]=line + else + break + end + end + f:close() + lines=concat(lines,"\n") + if #lines>0 then + return lines + end + else + local line=f:read("*line") or "" + f:close() + if #line>0 then + return line + end + end +end +function io.loadchunk(filename,n) + local f=io.open(filename,'rb') + if f then + local data=f:read(n or 1024) + f:close() + if #data>0 then + return data + end + end +end +function io.exists(filename) + local f=io.open(filename) + if f==nil then + return false + else + f:close() + return true + end +end +function io.size(filename) + local f=io.open(filename) + if f==nil then + return 0 + else + local s=f:seek("end") + f:close() + return s + end +end +function io.noflines(f) + if type(f)=="string" then + local f=io.open(filename) + if f then + local n=f and io.noflines(f) or 0 + f:close() + return n + else + return 0 + end + else + local n=0 + for _ in f:lines() do + n=n+1 + end + f:seek('set',0) + return n + end +end +local nextchar={ + [ 4]=function(f) + return f:read(1,1,1,1) + end, + [ 2]=function(f) + return f:read(1,1) + end, + [ 1]=function(f) + return f:read(1) + end, + [-2]=function(f) + local a,b=f:read(1,1) + return b,a + end, + [-4]=function(f) + local a,b,c,d=f:read(1,1,1,1) + return d,c,b,a + end +} +function io.characters(f,n) + if f then + return nextchar[n or 1],f + end +end +local nextbyte={ + [4]=function(f) + local a,b,c,d=f:read(1,1,1,1) + if d then + return byte(a),byte(b),byte(c),byte(d) + end + end, + [3]=function(f) + local a,b,c=f:read(1,1,1) + if b then + return byte(a),byte(b),byte(c) + end + end, + [2]=function(f) + local a,b=f:read(1,1) + if b then + return byte(a),byte(b) + end + end, + [1]=function (f) + local a=f:read(1) + if a then + return byte(a) + end + end, + [-2]=function (f) + local a,b=f:read(1,1) + if b then + return byte(b),byte(a) + end + end, + [-3]=function(f) + local a,b,c=f:read(1,1,1) + if b then + return byte(c),byte(b),byte(a) + end + end, + [-4]=function(f) + local a,b,c,d=f:read(1,1,1,1) + if d then + return byte(d),byte(c),byte(b),byte(a) + end + end +} +function io.bytes(f,n) + if f then + return nextbyte[n or 1],f + else + return nil,nil + end +end +function io.ask(question,default,options) + while true do + io.write(question) + if options then + io.write(format(" [%s]",concat(options,"|"))) + end + if default then + io.write(format(" [%s]",default)) + end + io.write(format(" ")) + io.flush() + local answer=io.read() + answer=gsub(answer,"^%s*(.*)%s*$","%1") + if answer=="" and default then + return default + elseif not options then + return answer + else + for k=1,#options do + if options[k]==answer then + return answer + end + end + local pattern="^"..answer + for k=1,#options do + local v=options[k] + if find(v,pattern) then + return v + end + end + end + end +end +local function readnumber(f,n,m) + if m then + f:seek("set",n) + n=m + end + if n==1 then + return byte(f:read(1)) + elseif n==2 then + local a,b=byte(f:read(2),1,2) + return 256*a+b + elseif n==3 then + local a,b,c=byte(f:read(3),1,3) + return 256*256*a+256*b+c + elseif n==4 then + local a,b,c,d=byte(f:read(4),1,4) + return 256*256*256*a+256*256*b+256*c+d + elseif n==8 then + local a,b=readnumber(f,4),readnumber(f,4) + return 256*a+b + elseif n==12 then + local a,b,c=readnumber(f,4),readnumber(f,4),readnumber(f,4) + return 256*256*a+256*b+c + elseif n==-2 then + local b,a=byte(f:read(2),1,2) + return 256*a+b + elseif n==-3 then + local c,b,a=byte(f:read(3),1,3) + return 256*256*a+256*b+c + elseif n==-4 then + local d,c,b,a=byte(f:read(4),1,4) + return 256*256*256*a+256*256*b+256*c+d + elseif n==-8 then + local h,g,f,e,d,c,b,a=byte(f:read(8),1,8) + return 256*256*256*256*256*256*256*a+256*256*256*256*256*256*b+256*256*256*256*256*c+256*256*256*256*d+256*256*256*e+256*256*f+256*g+h + else + return 0 + end +end +io.readnumber=readnumber +function io.readstring(f,n,m) + if m then + f:seek("set",n) + n=m + end + local str=gsub(f:read(n),"\000","") + return str +end +if not io.i_limiter then function io.i_limiter() end end +if not io.o_limiter then function io.o_limiter() end end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['l-file']={ + version=1.001, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +file=file or {} +local file=file +if not lfs then + lfs=optionalrequire("lfs") +end +if not lfs then + lfs={ + getcurrentdir=function() + return "." + end, + attributes=function() + return nil + end, + isfile=function(name) + local f=io.open(name,'rb') + if f then + f:close() + return true + end + end, + isdir=function(name) + print("you need to load lfs") + return false + end + } +elseif not lfs.isfile then + local attributes=lfs.attributes + function lfs.isdir(name) + return attributes(name,"mode")=="directory" + end + function lfs.isfile(name) + return attributes(name,"mode")=="file" + end +end +local insert,concat=table.insert,table.concat +local match,find,gmatch=string.match,string.find,string.gmatch +local lpegmatch=lpeg.match +local getcurrentdir,attributes=lfs.currentdir,lfs.attributes +local checkedsplit=string.checkedsplit +local P,R,S,C,Cs,Cp,Cc,Ct=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Cs,lpeg.Cp,lpeg.Cc,lpeg.Ct +local colon=P(":") +local period=P(".") +local periods=P("..") +local fwslash=P("/") +local bwslash=P("\\") +local slashes=S("\\/") +local noperiod=1-period +local noslashes=1-slashes +local name=noperiod^1 +local suffix=period/""*(1-period-slashes)^1*-1 +local pattern=C((1-(slashes^1*noslashes^1*-1))^1)*P(1) +local function pathpart(name,default) + return name and lpegmatch(pattern,name) or default or "" +end +local pattern=(noslashes^0*slashes)^1*C(noslashes^1)*-1 +local function basename(name) + return name and lpegmatch(pattern,name) or name +end +local pattern=(noslashes^0*slashes^1)^0*Cs((1-suffix)^1)*suffix^0 +local function nameonly(name) + return name and lpegmatch(pattern,name) or name +end +local pattern=(noslashes^0*slashes)^0*(noperiod^1*period)^1*C(noperiod^1)*-1 +local function suffixonly(name) + return name and lpegmatch(pattern,name) or "" +end +local pattern=(noslashes^0*slashes)^0*noperiod^1*((period*C(noperiod^1))^1)*-1+Cc("") +local function suffixesonly(name) + if name then + return lpegmatch(pattern,name) + else + return "" + end +end +file.pathpart=pathpart +file.basename=basename +file.nameonly=nameonly +file.suffixonly=suffixonly +file.suffix=suffixonly +file.suffixesonly=suffixesonly +file.suffixes=suffixesonly +file.dirname=pathpart +file.extname=suffixonly +local drive=C(R("az","AZ"))*colon +local path=C((noslashes^0*slashes)^0) +local suffix=period*C(P(1-period)^0*P(-1)) +local base=C((1-suffix)^0) +local rest=C(P(1)^0) +drive=drive+Cc("") +path=path+Cc("") +base=base+Cc("") +suffix=suffix+Cc("") +local pattern_a=drive*path*base*suffix +local pattern_b=path*base*suffix +local pattern_c=C(drive*path)*C(base*suffix) +local pattern_d=path*rest +function file.splitname(str,splitdrive) + if not str then + elseif splitdrive then + return lpegmatch(pattern_a,str) + else + return lpegmatch(pattern_b,str) + end +end +function file.splitbase(str) + if str then + return lpegmatch(pattern_d,str) + else + return "",str + end +end +function file.nametotable(str,splitdrive) + if str then + local path,drive,subpath,name,base,suffix=lpegmatch(pattern_c,str) + if splitdrive then + return { + path=path, + drive=drive, + subpath=subpath, + name=name, + base=base, + suffix=suffix, + } + else + return { + path=path, + name=name, + base=base, + suffix=suffix, + } + end + end +end +local pattern=Cs(((period*(1-period-slashes)^1*-1)/""+1)^1) +function file.removesuffix(name) + return name and lpegmatch(pattern,name) +end +local suffix=period/""*(1-period-slashes)^1*-1 +local pattern=Cs((noslashes^0*slashes^1)^0*((1-suffix)^1))*Cs(suffix) +function file.addsuffix(filename,suffix,criterium) + if not filename or not suffix or suffix=="" then + return filename + elseif criterium==true then + return filename.."."..suffix + elseif not criterium then + local n,s=lpegmatch(pattern,filename) + if not s or s=="" then + return filename.."."..suffix + else + return filename + end + else + local n,s=lpegmatch(pattern,filename) + if s and s~="" then + local t=type(criterium) + if t=="table" then + for i=1,#criterium do + if s==criterium[i] then + return filename + end + end + elseif t=="string" then + if s==criterium then + return filename + end + end + end + return (n or filename).."."..suffix + end +end +local suffix=period*(1-period-slashes)^1*-1 +local pattern=Cs((1-suffix)^0) +function file.replacesuffix(name,suffix) + if name and suffix and suffix~="" then + return lpegmatch(pattern,name).."."..suffix + else + return name + end +end +local reslasher=lpeg.replacer(P("\\"),"/") +function file.reslash(str) + return str and lpegmatch(reslasher,str) +end +function file.is_writable(name) + if not name then + elseif lfs.isdir(name) then + name=name.."/m_t_x_t_e_s_t.tmp" + local f=io.open(name,"wb") + if f then + f:close() + os.remove(name) + return true + end + elseif lfs.isfile(name) then + local f=io.open(name,"ab") + if f then + f:close() + return true + end + else + local f=io.open(name,"ab") + if f then + f:close() + os.remove(name) + return true + end + end + return false +end +local readable=P("r")*Cc(true) +function file.is_readable(name) + if name then + local a=attributes(name) + return a and lpegmatch(readable,a.permissions) or false + else + return false + end +end +file.isreadable=file.is_readable +file.iswritable=file.is_writable +function file.size(name) + if name then + local a=attributes(name) + return a and a.size or 0 + else + return 0 + end +end +function file.splitpath(str,separator) + return str and checkedsplit(lpegmatch(reslasher,str),separator or io.pathseparator) +end +function file.joinpath(tab,separator) + return tab and concat(tab,separator or io.pathseparator) +end +local someslash=S("\\/") +local stripper=Cs(P(fwslash)^0/""*reslasher) +local isnetwork=someslash*someslash*(1-someslash)+(1-fwslash-colon)^1*colon +local isroot=fwslash^1*-1 +local hasroot=fwslash^1 +local reslasher=lpeg.replacer(S("\\/"),"/") +local deslasher=lpeg.replacer(S("\\/")^1,"/") +function file.join(one,two,three,...) + if not two then + return one=="" and one or lpegmatch(stripper,one) + end + if one=="" then + return lpegmatch(stripper,three and concat({ two,three,... },"/") or two) + end + if lpegmatch(isnetwork,one) then + local one=lpegmatch(reslasher,one) + local two=lpegmatch(deslasher,three and concat({ two,three,... },"/") or two) + if lpegmatch(hasroot,two) then + return one..two + else + return one.."/"..two + end + elseif lpegmatch(isroot,one) then + local two=lpegmatch(deslasher,three and concat({ two,three,... },"/") or two) + if lpegmatch(hasroot,two) then + return two + else + return "/"..two + end + else + return lpegmatch(deslasher,concat({ one,two,three,... },"/")) + end +end +local drivespec=R("az","AZ")^1*colon +local anchors=fwslash+drivespec +local untouched=periods+(1-period)^1*P(-1) +local mswindrive=Cs(drivespec*(bwslash/"/"+fwslash)^0) +local mswinuncpath=(bwslash+fwslash)*(bwslash+fwslash)*Cc("//") +local splitstarter=(mswindrive+mswinuncpath+Cc(false))*Ct(lpeg.splitat(S("/\\")^1)) +local absolute=fwslash +function file.collapsepath(str,anchor) + if not str then + return + end + if anchor==true and not lpegmatch(anchors,str) then + str=getcurrentdir().."/"..str + end + if str=="" or str=="." then + return "." + elseif lpegmatch(untouched,str) then + return lpegmatch(reslasher,str) + end + local starter,oldelements=lpegmatch(splitstarter,str) + local newelements={} + local i=#oldelements + while i>0 do + local element=oldelements[i] + if element=='.' then + elseif element=='..' then + local n=i-1 + while n>0 do + local element=oldelements[n] + if element~='..' and element~='.' then + oldelements[n]='.' + break + else + n=n-1 + end + end + if n<1 then + insert(newelements,1,'..') + end + elseif element~="" then + insert(newelements,1,element) + end + i=i-1 + end + if #newelements==0 then + return starter or "." + elseif starter then + return starter..concat(newelements,'/') + elseif lpegmatch(absolute,str) then + return "/"..concat(newelements,'/') + else + newelements=concat(newelements,'/') + if anchor=="." and find(str,"^%./") then + return "./"..newelements + else + return newelements + end + end +end +local tricky=S("/\\")*P(-1) +local attributes=lfs.attributes +function lfs.isdir(name) + if lpegmatch(tricky,name) then + return attributes(name,"mode")=="directory" + else + return attributes(name.."/.","mode")=="directory" + end +end +function lfs.isfile(name) + return attributes(name,"mode")=="file" +end +local validchars=R("az","09","AZ","--","..") +local pattern_a=lpeg.replacer(1-validchars) +local pattern_a=Cs((validchars+P(1)/"-")^1) +local whatever=P("-")^0/"" +local pattern_b=Cs(whatever*(1-whatever*-1)^1) +function file.robustname(str,strict) + if str then + str=lpegmatch(pattern_a,str) or str + if strict then + return lpegmatch(pattern_b,str) or str + else + return str + end + end +end +file.readdata=io.loaddata +file.savedata=io.savedata +function file.copy(oldname,newname) + if oldname and newname then + local data=io.loaddata(oldname) + if data and data~="" then + file.savedata(newname,data) + end + end +end +local letter=R("az","AZ")+S("_-+") +local separator=P("://") +local qualified=period^0*fwslash+letter*colon+letter^1*separator+letter^1*fwslash +local rootbased=fwslash+letter*colon +lpeg.patterns.qualified=qualified +lpeg.patterns.rootbased=rootbased +function file.is_qualified_path(filename) + return filename and lpegmatch(qualified,filename)~=nil +end +function file.is_rootbased_path(filename) + return filename and lpegmatch(rootbased,filename)~=nil +end +function file.strip(name,dir) + if name then + local b,a=match(name,"^(.-)"..dir.."(.*)$") + return a~="" and a or name + end +end +function lfs.mkdirs(path) + local full="" + for sub in gmatch(path,"(/*[^\\/]+)") do + full=full..sub + lfs.mkdir(full) + end +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['l-boolean']={ + version=1.001, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local type,tonumber=type,tonumber +boolean=boolean or {} +local boolean=boolean +function boolean.tonumber(b) + if b then return 1 else return 0 end +end +function toboolean(str,tolerant) + if str==nil then + return false + elseif str==false then + return false + elseif str==true then + return true + elseif str=="true" then + return true + elseif str=="false" then + return false + elseif not tolerant then + return false + elseif str==0 then + return false + elseif (tonumber(str) or 0)>0 then + return true + else + return str=="yes" or str=="on" or str=="t" + end +end +string.toboolean=toboolean +function string.booleanstring(str) + if str=="0" then + return false + elseif str=="1" then + return true + elseif str=="" then + return false + elseif str=="false" then + return false + elseif str=="true" then + return true + elseif (tonumber(str) or 0)>0 then + return true + else + return str=="yes" or str=="on" or str=="t" + end +end +function string.is_boolean(str,default,strict) + if type(str)=="string" then + if str=="true" or str=="yes" or str=="on" or str=="t" or (not strict and str=="1") then + return true + elseif str=="false" or str=="no" or str=="off" or str=="f" or (not strict and str=="0") then + return false + end + end + return default +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['l-math']={ + version=1.001, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local floor,sin,cos,tan=math.floor,math.sin,math.cos,math.tan +if not math.round then + function math.round(x) return floor(x+0.5) end +end +if not math.div then + function math.div(n,m) return floor(n/m) end +end +if not math.mod then + function math.mod(n,m) return n%m end +end +local pipi=2*math.pi/360 +if not math.sind then + function math.sind(d) return sin(d*pipi) end + function math.cosd(d) return cos(d*pipi) end + function math.tand(d) return tan(d*pipi) end +end +if not math.odd then + function math.odd (n) return n%2~=0 end + function math.even(n) return n%2==0 end +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['util-str']={ + version=1.001, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +utilities=utilities or {} +utilities.strings=utilities.strings or {} +local strings=utilities.strings +local format,gsub,rep,sub=string.format,string.gsub,string.rep,string.sub +local load,dump=load,string.dump +local tonumber,type,tostring=tonumber,type,tostring +local unpack,concat=table.unpack,table.concat +local P,V,C,S,R,Ct,Cs,Cp,Carg,Cc=lpeg.P,lpeg.V,lpeg.C,lpeg.S,lpeg.R,lpeg.Ct,lpeg.Cs,lpeg.Cp,lpeg.Carg,lpeg.Cc +local patterns,lpegmatch=lpeg.patterns,lpeg.match +local utfchar,utfbyte=utf.char,utf.byte +local loadstripped=nil +if _LUAVERSION<5.2 then + loadstripped=function(str,shortcuts) + return load(str) + end +else + loadstripped=function(str,shortcuts) + if shortcuts then + return load(dump(load(str),true),nil,nil,shortcuts) + else + return load(dump(load(str),true)) + end + end +end +if not number then number={} end +local stripper=patterns.stripzeros +local function points(n) + n=tonumber(n) + return (not n or n==0) and "0pt" or lpegmatch(stripper,format("%.5fpt",n/65536)) +end +local function basepoints(n) + n=tonumber(n) + return (not n or n==0) and "0bp" or lpegmatch(stripper,format("%.5fbp",n*(7200/7227)/65536)) +end +number.points=points +number.basepoints=basepoints +local rubish=patterns.spaceortab^0*patterns.newline +local anyrubish=patterns.spaceortab+patterns.newline +local anything=patterns.anything +local stripped=(patterns.spaceortab^1/"")*patterns.newline +local leading=rubish^0/"" +local trailing=(anyrubish^1*patterns.endofstring)/"" +local redundant=rubish^3/"\n" +local pattern=Cs(leading*(trailing+redundant+stripped+anything)^0) +function strings.collapsecrlf(str) + return lpegmatch(pattern,str) +end +local repeaters={} +function strings.newrepeater(str,offset) + offset=offset or 0 + local s=repeaters[str] + if not s then + s={} + repeaters[str]=s + end + local t=s[offset] + if t then + return t + end + t={} + setmetatable(t,{ __index=function(t,k) + if not k then + return "" + end + local n=k+offset + local s=n>0 and rep(str,n) or "" + t[k]=s + return s + end }) + s[offset]=t + return t +end +local extra,tab,start=0,0,4,0 +local nspaces=strings.newrepeater(" ") +string.nspaces=nspaces +local pattern=Carg(1)/function(t) + extra,tab,start=0,t or 7,1 + end*Cs(( + Cp()*patterns.tab/function(position) + local current=(position-start+1)+extra + local spaces=tab-(current-1)%tab + if spaces>0 then + extra=extra+spaces-1 + return nspaces[spaces] + else + return "" + end + end+patterns.newline*Cp()/function(position) + extra,start=0,position + end+patterns.anything + )^1) +function strings.tabtospace(str,tab) + return lpegmatch(pattern,str,1,tab or 7) +end +local newline=patterns.newline +local endofstring=patterns.endofstring +local whitespace=patterns.whitespace +local spacer=patterns.spacer +local space=spacer^0 +local nospace=space/"" +local endofline=nospace*newline +local stripend=(whitespace^1*endofstring)/"" +local normalline=(nospace*((1-space*(newline+endofstring))^1)*nospace) +local stripempty=endofline^1/"" +local normalempty=endofline^1 +local singleempty=endofline*(endofline^0/"") +local doubleempty=endofline*endofline^-1*(endofline^0/"") +local stripstart=stripempty^0 +local p_prune_normal=Cs (stripstart*(stripend+normalline+normalempty )^0 ) +local p_prune_collapse=Cs (stripstart*(stripend+normalline+doubleempty )^0 ) +local p_prune_noempty=Cs (stripstart*(stripend+normalline+singleempty )^0 ) +local p_retain_normal=Cs ((normalline+normalempty )^0 ) +local p_retain_collapse=Cs ((normalline+doubleempty )^0 ) +local p_retain_noempty=Cs ((normalline+singleempty )^0 ) +local striplinepatterns={ + ["prune"]=p_prune_normal, + ["prune and collapse"]=p_prune_collapse, + ["prune and no empty"]=p_prune_noempty, + ["retain"]=p_retain_normal, + ["retain and collapse"]=p_retain_collapse, + ["retain and no empty"]=p_retain_noempty, + ["collapse"]=patterns.collapser, +} +strings.striplinepatterns=striplinepatterns +function strings.striplines(str,how) + return str and lpegmatch(how and striplinepatterns[how] or p_prune_collapse,str) or str +end +strings.striplong=strings.striplines +function strings.nice(str) + str=gsub(str,"[:%-+_]+"," ") + return str +end +local n=0 +local sequenced=table.sequenced +function string.autodouble(s,sep) + if s==nil then + return '""' + end + local t=type(s) + if t=="number" then + return tostring(s) + end + if t=="table" then + return ('"'..sequenced(s,sep or ",")..'"') + end + return ('"'..tostring(s)..'"') +end +function string.autosingle(s,sep) + if s==nil then + return "''" + end + local t=type(s) + if t=="number" then + return tostring(s) + end + if t=="table" then + return ("'"..sequenced(s,sep or ",").."'") + end + return ("'"..tostring(s).."'") +end +local tracedchars={} +string.tracedchars=tracedchars +strings.tracers=tracedchars +function string.tracedchar(b) + if type(b)=="number" then + return tracedchars[b] or (utfchar(b).." (U+"..format('%05X',b)..")") + else + local c=utfbyte(b) + return tracedchars[c] or (b.." (U+"..format('%05X',c)..")") + end +end +function number.signed(i) + if i>0 then + return "+",i + else + return "-",-i + end +end +local zero=P("0")^1/"" +local plus=P("+")/"" +local minus=P("-") +local separator=S(".") +local digit=R("09") +local trailing=zero^1*#S("eE") +local exponent=(S("eE")*(plus+Cs((minus*zero^0*P(-1))/"")+minus)*zero^0*(P(-1)*Cc("0")+P(1)^1)) +local pattern_a=Cs(minus^0*digit^1*(separator/""*trailing+separator*(trailing+digit)^0)*exponent) +local pattern_b=Cs((exponent+P(1))^0) +function number.sparseexponent(f,n) + if not n then + n=f + f="%e" + end + local tn=type(n) + if tn=="string" then + local m=tonumber(n) + if m then + return lpegmatch((f=="%e" or f=="%E") and pattern_a or pattern_b,format(f,m)) + end + elseif tn=="number" then + return lpegmatch((f=="%e" or f=="%E") and pattern_a or pattern_b,format(f,n)) + end + return tostring(n) +end +local template=[[ +%s +%s +return function(%s) return %s end +]] +local preamble,environment="",{} +if _LUAVERSION<5.2 then + preamble=[[ +local lpeg=lpeg +local type=type +local tostring=tostring +local tonumber=tonumber +local format=string.format +local concat=table.concat +local signed=number.signed +local points=number.points +local basepoints= number.basepoints +local utfchar=utf.char +local utfbyte=utf.byte +local lpegmatch=lpeg.match +local nspaces=string.nspaces +local tracedchar=string.tracedchar +local autosingle=string.autosingle +local autodouble=string.autodouble +local sequenced=table.sequenced +local formattednumber=number.formatted +local sparseexponent=number.sparseexponent + ]] +else + environment={ + global=global or _G, + lpeg=lpeg, + type=type, + tostring=tostring, + tonumber=tonumber, + format=string.format, + concat=table.concat, + signed=number.signed, + points=number.points, + basepoints=number.basepoints, + utfchar=utf.char, + utfbyte=utf.byte, + lpegmatch=lpeg.match, + nspaces=string.nspaces, + tracedchar=string.tracedchar, + autosingle=string.autosingle, + autodouble=string.autodouble, + sequenced=table.sequenced, + formattednumber=number.formatted, + sparseexponent=number.sparseexponent, + } +end +local arguments={ "a1" } +setmetatable(arguments,{ __index=function(t,k) + local v=t[k-1]..",a"..k + t[k]=v + return v + end +}) +local prefix_any=C((S("+- .")+R("09"))^0) +local prefix_tab=P("{")*C((1-P("}"))^0)*P("}")+C((1-R("az","AZ","09","%%"))^0) +local format_s=function(f) + n=n+1 + if f and f~="" then + return format("format('%%%ss',a%s)",f,n) + else + return format("(a%s or '')",n) + end +end +local format_S=function(f) + n=n+1 + if f and f~="" then + return format("format('%%%ss',tostring(a%s))",f,n) + else + return format("tostring(a%s)",n) + end +end +local format_q=function() + n=n+1 + return format("(a%s and format('%%q',a%s) or '')",n,n) +end +local format_Q=function() + n=n+1 + return format("format('%%q',tostring(a%s))",n) +end +local format_i=function(f) + n=n+1 + if f and f~="" then + return format("format('%%%si',a%s)",f,n) + else + return format("format('%%i',a%s)",n) + end +end +local format_d=format_i +local format_I=function(f) + n=n+1 + return format("format('%%s%%%si',signed(a%s))",f,n) +end +local format_f=function(f) + n=n+1 + return format("format('%%%sf',a%s)",f,n) +end +local format_F=function(f) + n=n+1 + if not f or f=="" then + return format("(((a%s > -0.0000000005 and a%s < 0.0000000005) and '0') or format((a%s %% 1 == 0) and '%%i' or '%%.9f',a%s))",n,n,n,n) + else + return format("format((a%s %% 1 == 0) and '%%i' or '%%%sf',a%s)",n,f,n) + end +end +local format_g=function(f) + n=n+1 + return format("format('%%%sg',a%s)",f,n) +end +local format_G=function(f) + n=n+1 + return format("format('%%%sG',a%s)",f,n) +end +local format_e=function(f) + n=n+1 + return format("format('%%%se',a%s)",f,n) +end +local format_E=function(f) + n=n+1 + return format("format('%%%sE',a%s)",f,n) +end +local format_j=function(f) + n=n+1 + return format("sparseexponent('%%%se',a%s)",f,n) +end +local format_J=function(f) + n=n+1 + return format("sparseexponent('%%%sE',a%s)",f,n) +end +local format_x=function(f) + n=n+1 + return format("format('%%%sx',a%s)",f,n) +end +local format_X=function(f) + n=n+1 + return format("format('%%%sX',a%s)",f,n) +end +local format_o=function(f) + n=n+1 + return format("format('%%%so',a%s)",f,n) +end +local format_c=function() + n=n+1 + return format("utfchar(a%s)",n) +end +local format_C=function() + n=n+1 + return format("tracedchar(a%s)",n) +end +local format_r=function(f) + n=n+1 + return format("format('%%%s.0f',a%s)",f,n) +end +local format_h=function(f) + n=n+1 + if f=="-" then + f=sub(f,2) + return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) + else + return format("format('0x%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) + end +end +local format_H=function(f) + n=n+1 + if f=="-" then + f=sub(f,2) + return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) + else + return format("format('0x%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) + end +end +local format_u=function(f) + n=n+1 + if f=="-" then + f=sub(f,2) + return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) + else + return format("format('u+%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) + end +end +local format_U=function(f) + n=n+1 + if f=="-" then + f=sub(f,2) + return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) + else + return format("format('U+%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n) + end +end +local format_p=function() + n=n+1 + return format("points(a%s)",n) +end +local format_b=function() + n=n+1 + return format("basepoints(a%s)",n) +end +local format_t=function(f) + n=n+1 + if f and f~="" then + return format("concat(a%s,%q)",n,f) + else + return format("concat(a%s)",n) + end +end +local format_T=function(f) + n=n+1 + if f and f~="" then + return format("sequenced(a%s,%q)",n,f) + else + return format("sequenced(a%s)",n) + end +end +local format_l=function() + n=n+1 + return format("(a%s and 'true' or 'false')",n) +end +local format_L=function() + n=n+1 + return format("(a%s and 'TRUE' or 'FALSE')",n) +end +local format_N=function() + n=n+1 + return format("tostring(tonumber(a%s) or a%s)",n,n) +end +local format_a=function(f) + n=n+1 + if f and f~="" then + return format("autosingle(a%s,%q)",n,f) + else + return format("autosingle(a%s)",n) + end +end +local format_A=function(f) + n=n+1 + if f and f~="" then + return format("autodouble(a%s,%q)",n,f) + else + return format("autodouble(a%s)",n) + end +end +local format_w=function(f) + n=n+1 + f=tonumber(f) + if f then + return format("nspaces[%s+a%s]",f,n) + else + return format("nspaces[a%s]",n) + end +end +local format_W=function(f) + return format("nspaces[%s]",tonumber(f) or 0) +end +local digit=patterns.digit +local period=patterns.period +local three=digit*digit*digit +local splitter=Cs ( + (((1-(three^1*period))^1+C(three))*(Carg(1)*three)^1+C((1-period)^1))*(P(1)/""*Carg(2))*C(2) +) +patterns.formattednumber=splitter +function number.formatted(n,sep1,sep2) + local s=type(s)=="string" and n or format("%0.2f",n) + if sep1==true then + return lpegmatch(splitter,s,1,".",",") + elseif sep1=="." then + return lpegmatch(splitter,s,1,sep1,sep2 or ",") + elseif sep1=="," then + return lpegmatch(splitter,s,1,sep1,sep2 or ".") + else + return lpegmatch(splitter,s,1,sep1 or ",",sep2 or ".") + end +end +local format_m=function(f) + n=n+1 + if not f or f=="" then + f="," + end + return format([[formattednumber(a%s,%q,".")]],n,f) +end +local format_M=function(f) + n=n+1 + if not f or f=="" then + f="." + end + return format([[formattednumber(a%s,%q,",")]],n,f) +end +local format_z=function(f) + n=n+(tonumber(f) or 1) + return "''" +end +local format_rest=function(s) + return format("%q",s) +end +local format_extension=function(extensions,f,name) + local extension=extensions[name] or "tostring(%s)" + local f=tonumber(f) or 1 + if f==0 then + return extension + elseif f==1 then + n=n+1 + local a="a"..n + return format(extension,a,a) + elseif f<0 then + local a="a"..(n+f+1) + return format(extension,a,a) + else + local t={} + for i=1,f do + n=n+1 + t[#t+1]="a"..n + end + return format(extension,unpack(t)) + end +end +local builder=Cs { "start", + start=( + ( + P("%")/""*( + V("!") ++V("s")+V("q")+V("i")+V("d")+V("f")+V("F")+V("g")+V("G")+V("e")+V("E")+V("x")+V("X")+V("o") ++V("c")+V("C")+V("S") ++V("Q") ++V("N") ++V("r")+V("h")+V("H")+V("u")+V("U")+V("p")+V("b")+V("t")+V("T")+V("l")+V("L")+V("I")+V("w") ++V("W") ++V("a") ++V("A") ++V("j")+V("J") ++V("m")+V("M") ++V("z") + )+V("*") + )*(P(-1)+Carg(1)) + )^0, + ["s"]=(prefix_any*P("s"))/format_s, + ["q"]=(prefix_any*P("q"))/format_q, + ["i"]=(prefix_any*P("i"))/format_i, + ["d"]=(prefix_any*P("d"))/format_d, + ["f"]=(prefix_any*P("f"))/format_f, + ["F"]=(prefix_any*P("F"))/format_F, + ["g"]=(prefix_any*P("g"))/format_g, + ["G"]=(prefix_any*P("G"))/format_G, + ["e"]=(prefix_any*P("e"))/format_e, + ["E"]=(prefix_any*P("E"))/format_E, + ["x"]=(prefix_any*P("x"))/format_x, + ["X"]=(prefix_any*P("X"))/format_X, + ["o"]=(prefix_any*P("o"))/format_o, + ["S"]=(prefix_any*P("S"))/format_S, + ["Q"]=(prefix_any*P("Q"))/format_S, + ["N"]=(prefix_any*P("N"))/format_N, + ["c"]=(prefix_any*P("c"))/format_c, + ["C"]=(prefix_any*P("C"))/format_C, + ["r"]=(prefix_any*P("r"))/format_r, + ["h"]=(prefix_any*P("h"))/format_h, + ["H"]=(prefix_any*P("H"))/format_H, + ["u"]=(prefix_any*P("u"))/format_u, + ["U"]=(prefix_any*P("U"))/format_U, + ["p"]=(prefix_any*P("p"))/format_p, + ["b"]=(prefix_any*P("b"))/format_b, + ["t"]=(prefix_tab*P("t"))/format_t, + ["T"]=(prefix_tab*P("T"))/format_T, + ["l"]=(prefix_any*P("l"))/format_l, + ["L"]=(prefix_any*P("L"))/format_L, + ["I"]=(prefix_any*P("I"))/format_I, + ["w"]=(prefix_any*P("w"))/format_w, + ["W"]=(prefix_any*P("W"))/format_W, + ["j"]=(prefix_any*P("j"))/format_j, + ["J"]=(prefix_any*P("J"))/format_J, + ["m"]=(prefix_tab*P("m"))/format_m, + ["M"]=(prefix_tab*P("M"))/format_M, + ["z"]=(prefix_any*P("z"))/format_z, + ["a"]=(prefix_any*P("a"))/format_a, + ["A"]=(prefix_any*P("A"))/format_A, + ["*"]=Cs(((1-P("%"))^1+P("%%")/"%%")^1)/format_rest, + ["?"]=Cs(((1-P("%"))^1 )^1)/format_rest, + ["!"]=Carg(2)*prefix_any*P("!")*C((1-P("!"))^1)*P("!")/format_extension, +} +local direct=Cs ( + P("%")*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*P(-1)/[[local format = string.format return function(str) return format("%0",str) end]] +) +local function make(t,str) + local f + local p + local p=lpegmatch(direct,str) + if p then + f=loadstripped(p)() + else + n=0 + p=lpegmatch(builder,str,1,t._connector_,t._extensions_) + if n>0 then + p=format(template,preamble,t._preamble_,arguments[n],p) + f=loadstripped(p,t._environment_)() + else + f=function() return str end + end + end + t[str]=f + return f +end +local function use(t,fmt,...) + return t[fmt](...) +end +strings.formatters={} +if _LUAVERSION<5.2 then + function strings.formatters.new(noconcat) + local t={ _type_="formatter",_connector_=noconcat and "," or "..",_extensions_={},_preamble_=preamble,_environment_={} } + setmetatable(t,{ __index=make,__call=use }) + return t + end +else + function strings.formatters.new(noconcat) + local e={} + for k,v in next,environment do + e[k]=v + end + local t={ _type_="formatter",_connector_=noconcat and "," or "..",_extensions_={},_preamble_="",_environment_=e } + setmetatable(t,{ __index=make,__call=use }) + return t + end +end +local formatters=strings.formatters.new() +string.formatters=formatters +string.formatter=function(str,...) return formatters[str](...) end +local function add(t,name,template,preamble) + if type(t)=="table" and t._type_=="formatter" then + t._extensions_[name]=template or "%s" + if type(preamble)=="string" then + t._preamble_=preamble.."\n"..t._preamble_ + elseif type(preamble)=="table" then + for k,v in next,preamble do + t._environment_[k]=v + end + end + end +end +strings.formatters.add=add +patterns.xmlescape=Cs((P("<")/"<"+P(">")/">"+P("&")/"&"+P('"')/"""+P(1))^0) +patterns.texescape=Cs((C(S("#$%\\{}"))/"\\%1"+P(1))^0) +patterns.luaescape=Cs(((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0) +patterns.luaquoted=Cs(Cc('"')*((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0*Cc('"')) +if _LUAVERSION<5.2 then + add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],"local xmlescape = lpeg.patterns.xmlescape") + add(formatters,"tex",[[lpegmatch(texescape,%s)]],"local texescape = lpeg.patterns.texescape") + add(formatters,"lua",[[lpegmatch(luaescape,%s)]],"local luaescape = lpeg.patterns.luaescape") +else + add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],{ xmlescape=lpeg.patterns.xmlescape }) + add(formatters,"tex",[[lpegmatch(texescape,%s)]],{ texescape=lpeg.patterns.texescape }) + add(formatters,"lua",[[lpegmatch(luaescape,%s)]],{ luaescape=lpeg.patterns.luaescape }) +end +local dquote=patterns.dquote +local equote=patterns.escaped+dquote/'\\"'+1 +local space=patterns.space +local cquote=Cc('"') +local pattern=Cs(dquote*(equote-P(-2))^0*dquote) ++Cs(cquote*(equote-space)^0*space*equote^0*cquote) +function string.optionalquoted(str) + return lpegmatch(pattern,str) or str +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['luat-basics-gen']={ + version=1.100, + comment="companion to luatex-*.tex", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end +local dummyfunction=function() +end +local dummyreporter=function(c) + return function(...) + (texio.reporter or texio.write_nl)(c.." : "..string.formatters(...)) + end +end +statistics={ + register=dummyfunction, + starttiming=dummyfunction, + stoptiming=dummyfunction, + elapsedtime=nil, +} +directives={ + register=dummyfunction, + enable=dummyfunction, + disable=dummyfunction, +} +trackers={ + register=dummyfunction, + enable=dummyfunction, + disable=dummyfunction, +} +experiments={ + register=dummyfunction, + enable=dummyfunction, + disable=dummyfunction, +} +storage={ + register=dummyfunction, + shared={}, +} +logs={ + new=dummyreporter, + reporter=dummyreporter, + messenger=dummyreporter, + report=dummyfunction, +} +callbacks={ + register=function(n,f) return callback.register(n,f) end, +} +utilities={ + storage={ + allocate=function(t) return t or {} end, + mark=function(t) return t or {} end, + }, +} +characters=characters or { + data={} +} +texconfig.kpse_init=true +resolvers=resolvers or {} +local remapper={ + otf="opentype fonts", + ttf="truetype fonts", + ttc="truetype fonts", + dfont="truetype fonts", + cid="cid maps", + cidmap="cid maps", + fea="font feature files", + pfa="type1 fonts", + pfb="type1 fonts", + afm="afm", +} +function resolvers.findfile(name,fileformat) + name=string.gsub(name,"\\","/") + if not fileformat or fileformat=="" then + fileformat=file.suffix(name) + if fileformat=="" then + fileformat="tex" + end + end + fileformat=string.lower(fileformat) + fileformat=remapper[fileformat] or fileformat + local found=kpse.find_file(name,fileformat) + if not found or found=="" then + found=kpse.find_file(name,"other text files") + end + return found +end +resolvers.findbinfile=resolvers.findfile +function resolvers.loadbinfile(filename,filetype) + local data=io.loaddata(filename) + return true,data,#data +end +function resolvers.resolve(s) + return s +end +function resolvers.unresolve(s) + return s +end +caches={} +local writable=nil +local readables={} +local usingjit=jit +if not caches.namespace or caches.namespace=="" or caches.namespace=="context" then + caches.namespace='generic' +end +do + local cachepaths=kpse.expand_var('$TEXMFCACHE') or "" + if cachepaths=="" or cachepaths=="$TEXMFCACHE" then + cachepaths=kpse.expand_var('$TEXMFVAR') or "" + end + if cachepaths=="" or cachepaths=="$TEXMFVAR" then + cachepaths=kpse.expand_var('$VARTEXMF') or "" + end + if cachepaths=="" then + local fallbacks={ "TMPDIR","TEMPDIR","TMP","TEMP","HOME","HOMEPATH" } + for i=1,#fallbacks do + cachepaths=os.getenv(fallbacks[i]) or "" + if cachepath~="" and lfs.isdir(cachepath) then + break + end + end + end + if cachepaths=="" then + cachepaths="." + end + cachepaths=string.split(cachepaths,os.type=="windows" and ";" or ":") + for i=1,#cachepaths do + local cachepath=cachepaths[i] + if not lfs.isdir(cachepath) then + lfs.mkdirs(cachepath) + if lfs.isdir(cachepath) then + texio.write(string.format("(created cache path: %s)",cachepath)) + end + end + if file.is_writable(cachepath) then + writable=file.join(cachepath,"luatex-cache") + lfs.mkdir(writable) + writable=file.join(writable,caches.namespace) + lfs.mkdir(writable) + break + end + end + for i=1,#cachepaths do + if file.is_readable(cachepaths[i]) then + readables[#readables+1]=file.join(cachepaths[i],"luatex-cache",caches.namespace) + end + end + if not writable then + texio.write_nl("quiting: fix your writable cache path") + os.exit() + elseif #readables==0 then + texio.write_nl("quiting: fix your readable cache path") + os.exit() + elseif #readables==1 and readables[1]==writable then + texio.write(string.format("(using cache: %s)",writable)) + else + texio.write(string.format("(using write cache: %s)",writable)) + texio.write(string.format("(using read cache: %s)",table.concat(readables," "))) + end +end +function caches.getwritablepath(category,subcategory) + local path=file.join(writable,category) + lfs.mkdir(path) + path=file.join(path,subcategory) + lfs.mkdir(path) + return path +end +function caches.getreadablepaths(category,subcategory) + local t={} + for i=1,#readables do + t[i]=file.join(readables[i],category,subcategory) + end + return t +end +local function makefullname(path,name) + if path and path~="" then + return file.addsuffix(file.join(path,name),"lua"),file.addsuffix(file.join(path,name),usingjit and "lub" or "luc") + end +end +function caches.is_writable(path,name) + local fullname=makefullname(path,name) + return fullname and file.is_writable(fullname) +end +function caches.loaddata(paths,name) + for i=1,#paths do + local data=false + local luaname,lucname=makefullname(paths[i],name) + if lucname and not lfs.isfile(lucname) and type(caches.compile)=="function" then + texio.write(string.format("(compiling luc: %s)",lucname)) + data=loadfile(luaname) + if data then + data=data() + end + if data then + caches.compile(data,luaname,lucname) + return data + end + end + if lucname and lfs.isfile(lucname) then + texio.write(string.format("(load luc: %s)",lucname)) + data=loadfile(lucname) + if data then + data=data() + end + if data then + return data + else + texio.write(string.format("(loading failed: %s)",lucname)) + end + end + if luaname and lfs.isfile(luaname) then + texio.write(string.format("(load lua: %s)",luaname)) + data=loadfile(luaname) + if data then + data=data() + end + if data then + return data + end + end + end +end +function caches.savedata(path,name,data) + local luaname,lucname=makefullname(path,name) + if luaname then + texio.write(string.format("(save: %s)",luaname)) + table.tofile(luaname,data,true) + if lucname and type(caches.compile)=="function" then + os.remove(lucname) + texio.write(string.format("(save: %s)",lucname)) + caches.compile(data,luaname,lucname) + end + end +end +function caches.compile(data,luaname,lucname) + local d=io.loaddata(luaname) + if not d or d=="" then + d=table.serialize(data,true) + end + if d and d~="" then + local f=io.open(lucname,'wb') + if f then + local s=loadstring(d) + if s then + f:write(string.dump(s,true)) + end + f:close() + end + end +end +function table.setmetatableindex(t,f) + if type(t)~="table" then + f=f or t + t={} + end + setmetatable(t,{ __index=f }) + return t +end +arguments={} +if arg then + for i=1,#arg do + local k,v=string.match(arg[i],"^%-%-([^=]+)=?(.-)$") + if k and v then + arguments[k]=v + end + end +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['data-con']={ + version=1.100, + comment="companion to luat-lib.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local format,lower,gsub=string.format,string.lower,string.gsub +local trace_cache=false trackers.register("resolvers.cache",function(v) trace_cache=v end) +local trace_containers=false trackers.register("resolvers.containers",function(v) trace_containers=v end) +local trace_storage=false trackers.register("resolvers.storage",function(v) trace_storage=v end) +containers=containers or {} +local containers=containers +containers.usecache=true +local report_containers=logs.reporter("resolvers","containers") +local allocated={} +local mt={ + __index=function(t,k) + if k=="writable" then + local writable=caches.getwritablepath(t.category,t.subcategory) or { "." } + t.writable=writable + return writable + elseif k=="readables" then + local readables=caches.getreadablepaths(t.category,t.subcategory) or { "." } + t.readables=readables + return readables + end + end, + __storage__=true +} +function containers.define(category,subcategory,version,enabled) + if category and subcategory then + local c=allocated[category] + if not c then + c={} + allocated[category]=c + end + local s=c[subcategory] + if not s then + s={ + category=category, + subcategory=subcategory, + storage={}, + enabled=enabled, + version=version or math.pi, + trace=false, + } + setmetatable(s,mt) + c[subcategory]=s + end + return s + end +end +function containers.is_usable(container,name) + return container.enabled and caches and caches.is_writable(container.writable,name) +end +function containers.is_valid(container,name) + if name and name~="" then + local storage=container.storage[name] + return storage and storage.cache_version==container.version + else + return false + end +end +function containers.read(container,name) + local storage=container.storage + local stored=storage[name] + if not stored and container.enabled and caches and containers.usecache then + stored=caches.loaddata(container.readables,name) + if stored and stored.cache_version==container.version then + if trace_cache or trace_containers then + report_containers("action %a, category %a, name %a","load",container.subcategory,name) + end + else + stored=nil + end + storage[name]=stored + elseif stored then + if trace_cache or trace_containers then + report_containers("action %a, category %a, name %a","reuse",container.subcategory,name) + end + end + return stored +end +function containers.write(container,name,data) + if data then + data.cache_version=container.version + if container.enabled and caches then + local unique,shared=data.unique,data.shared + data.unique,data.shared=nil,nil + caches.savedata(container.writable,name,data) + if trace_cache or trace_containers then + report_containers("action %a, category %a, name %a","save",container.subcategory,name) + end + data.unique,data.shared=unique,shared + end + if trace_cache or trace_containers then + report_containers("action %a, category %a, name %a","store",container.subcategory,name) + end + container.storage[name]=data + end + return data +end +function containers.content(container,name) + return container.storage[name] +end +function containers.cleanname(name) + return (gsub(lower(name),"[^%w\128-\255]+","-")) +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['luatex-fonts-nod']={ + version=1.001, + comment="companion to luatex-fonts.lua", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end +if tex.attribute[0]~=0 then + texio.write_nl("log","!") + texio.write_nl("log","! Attribute 0 is reserved for ConTeXt's font feature management and has to be") + texio.write_nl("log","! set to zero. Also, some attributes in the range 1-255 are used for special") + texio.write_nl("log","! purposes so setting them at the TeX end might break the font handler.") + texio.write_nl("log","!") + tex.attribute[0]=0 +end +attributes=attributes or {} +attributes.unsetvalue=-0x7FFFFFFF +local numbers,last={},127 +attributes.private=attributes.private or function(name) + local number=numbers[name] + if not number then + if last<255 then + last=last+1 + end + number=last + numbers[name]=number + end + return number +end +nodes={} +nodes.pool={} +nodes.handlers={} +local nodecodes={} for k,v in next,node.types () do nodecodes[string.gsub(v,"_","")]=k end +local whatcodes={} for k,v in next,node.whatsits() do whatcodes[string.gsub(v,"_","")]=k end +local glyphcodes={ [0]="character","glyph","ligature","ghost","left","right" } +local disccodes={ [0]="discretionary","explicit","automatic","regular","first","second" } +nodes.nodecodes=nodecodes +nodes.whatcodes=whatcodes +nodes.whatsitcodes=whatcodes +nodes.glyphcodes=glyphcodes +nodes.disccodes=disccodes +local free_node=node.free +local remove_node=node.remove +local new_node=node.new +local traverse_id=node.traverse_id +nodes.handlers.protectglyphs=node.protect_glyphs +nodes.handlers.unprotectglyphs=node.unprotect_glyphs +local math_code=nodecodes.math +local end_of_math=node.end_of_math +function node.end_of_math(n) + if n.id==math_code and n.subtype==1 then + return n + else + return end_of_math(n) + end +end +function nodes.remove(head,current,free_too) + local t=current + head,current=remove_node(head,current) + if t then + if free_too then + free_node(t) + t=nil + else + t.next,t.prev=nil,nil + end + end + return head,current,t +end +function nodes.delete(head,current) + return nodes.remove(head,current,true) +end +function nodes.pool.kern(k) + local n=new_node("kern",1) + n.kern=k + return n +end +local getfield=node.getfield +local setfield=node.setfield +nodes.getfield=getfield +nodes.setfield=setfield +nodes.getattr=getfield +nodes.setattr=setfield +nodes.tostring=node.tostring or tostring +nodes.copy=node.copy +nodes.copy_list=node.copy_list +nodes.delete=node.delete +nodes.dimensions=node.dimensions +nodes.end_of_math=node.end_of_math +nodes.flush_list=node.flush_list +nodes.flush_node=node.flush_node +nodes.free=node.free +nodes.insert_after=node.insert_after +nodes.insert_before=node.insert_before +nodes.hpack=node.hpack +nodes.new=node.new +nodes.tail=node.tail +nodes.traverse=node.traverse +nodes.traverse_id=node.traverse_id +nodes.slide=node.slide +nodes.vpack=node.vpack +nodes.first_glyph=node.first_glyph +nodes.first_character=node.first_character +nodes.has_glyph=node.has_glyph or node.first_glyph +nodes.current_attr=node.current_attr +nodes.do_ligature_n=node.do_ligature_n +nodes.has_field=node.has_field +nodes.last_node=node.last_node +nodes.usedlist=node.usedlist +nodes.protrusion_skippable=node.protrusion_skippable +nodes.write=node.write +nodes.has_attribute=node.has_attribute +nodes.set_attribute=node.set_attribute +nodes.unset_attribute=node.unset_attribute +nodes.protect_glyphs=node.protect_glyphs +nodes.unprotect_glyphs=node.unprotect_glyphs +nodes.kerning=node.kerning +nodes.ligaturing=node.ligaturing +nodes.mlist_to_hlist=node.mlist_to_hlist +local direct=node.direct +local nuts={} +nodes.nuts=nuts +local tonode=direct.tonode +local tonut=direct.todirect +nodes.tonode=tonode +nodes.tonut=tonut +nuts.tonode=tonode +nuts.tonut=tonut +local getfield=direct.getfield +local setfield=direct.setfield +nuts.getfield=getfield +nuts.setfield=setfield +nuts.getnext=direct.getnext +nuts.getprev=direct.getprev +nuts.getid=direct.getid +nuts.getattr=getfield +nuts.setattr=setfield +nuts.getfont=direct.getfont +nuts.getsubtype=direct.getsubtype +nuts.getchar=direct.getchar +nuts.insert_before=direct.insert_before +nuts.insert_after=direct.insert_after +nuts.delete=direct.delete +nuts.copy=direct.copy +nuts.tail=direct.tail +nuts.flush_list=direct.flush_list +nuts.end_of_math=direct.end_of_math +nuts.traverse=direct.traverse +nuts.traverse_id=direct.traverse_id +nuts.getprop=nuts.getattr +nuts.setprop=nuts.setattr +local new_nut=direct.new +nuts.new=new_nut +nuts.pool={} +function nuts.pool.kern(k) + local n=new_nut("kern",1) + setfield(n,"kern",k) + return n +end +local propertydata=direct.get_properties_table() +nodes.properties={ data=propertydata } +direct.set_properties_mode(true,true) +function direct.set_properties_mode() end +nuts.getprop=function(n,k) + local p=propertydata[n] + if p then + return p[k] + end +end +nuts.setprop=function(n,k,v) + if v then + local p=propertydata[n] + if p then + p[k]=v + else + propertydata[n]={ [k]=v } + end + end +end +nodes.setprop=nodes.setproperty +nodes.getprop=nodes.getproperty + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-ini']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local allocate=utilities.storage.allocate +local report_defining=logs.reporter("fonts","defining") +fonts=fonts or {} +local fonts=fonts +fonts.hashes={ identifiers=allocate() } +fonts.tables=fonts.tables or {} +fonts.helpers=fonts.helpers or {} +fonts.tracers=fonts.tracers or {} +fonts.specifiers=fonts.specifiers or {} +fonts.analyzers={} +fonts.readers={} +fonts.definers={ methods={} } +fonts.loggers={ register=function() end } +fontloader.totable=fontloader.to_table + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-con']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local next,tostring,rawget=next,tostring,rawget +local format,match,lower,gsub=string.format,string.match,string.lower,string.gsub +local utfbyte=utf.byte +local sort,insert,concat,sortedkeys,serialize,fastcopy=table.sort,table.insert,table.concat,table.sortedkeys,table.serialize,table.fastcopy +local derivetable=table.derive +local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end) +local trace_scaling=false trackers.register("fonts.scaling",function(v) trace_scaling=v end) +local report_defining=logs.reporter("fonts","defining") +local fonts=fonts +local constructors=fonts.constructors or {} +fonts.constructors=constructors +local handlers=fonts.handlers or {} +fonts.handlers=handlers +local allocate=utilities.storage.allocate +local setmetatableindex=table.setmetatableindex +constructors.dontembed=allocate() +constructors.autocleanup=true +constructors.namemode="fullpath" +constructors.version=1.01 +constructors.cache=containers.define("fonts","constructors",constructors.version,false) +constructors.privateoffset=0xF0000 +constructors.cacheintex=true +constructors.keys={ + properties={ + encodingbytes="number", + embedding="number", + cidinfo={}, + format="string", + fontname="string", + fullname="string", + filename="filename", + psname="string", + name="string", + virtualized="boolean", + hasitalics="boolean", + autoitalicamount="basepoints", + nostackmath="boolean", + noglyphnames="boolean", + mode="string", + hasmath="boolean", + mathitalics="boolean", + textitalics="boolean", + finalized="boolean", + }, + parameters={ + mathsize="number", + scriptpercentage="float", + scriptscriptpercentage="float", + units="cardinal", + designsize="scaledpoints", + expansion={ + stretch="integerscale", + shrink="integerscale", + step="integerscale", + auto="boolean", + }, + protrusion={ + auto="boolean", + }, + slantfactor="float", + extendfactor="float", + factor="float", + hfactor="float", + vfactor="float", + size="scaledpoints", + units="scaledpoints", + scaledpoints="scaledpoints", + slantperpoint="scaledpoints", + spacing={ + width="scaledpoints", + stretch="scaledpoints", + shrink="scaledpoints", + extra="scaledpoints", + }, + xheight="scaledpoints", + quad="scaledpoints", + ascender="scaledpoints", + descender="scaledpoints", + synonyms={ + space="spacing.width", + spacestretch="spacing.stretch", + spaceshrink="spacing.shrink", + extraspace="spacing.extra", + x_height="xheight", + space_stretch="spacing.stretch", + space_shrink="spacing.shrink", + extra_space="spacing.extra", + em="quad", + ex="xheight", + slant="slantperpoint", + }, + }, + description={ + width="basepoints", + height="basepoints", + depth="basepoints", + boundingbox={}, + }, + character={ + width="scaledpoints", + height="scaledpoints", + depth="scaledpoints", + italic="scaledpoints", + }, +} +local designsizes=allocate() +constructors.designsizes=designsizes +local loadedfonts=allocate() +constructors.loadedfonts=loadedfonts +local factors={ + pt=65536.0, + bp=65781.8, +} +function constructors.setfactor(f) + constructors.factor=factors[f or 'pt'] or factors.pt +end +constructors.setfactor() +function constructors.scaled(scaledpoints,designsize) + if scaledpoints<0 then + if designsize then + local factor=constructors.factor + if designsize>factor then + return (- scaledpoints/1000)*designsize + else + return (- scaledpoints/1000)*designsize*factor + end + else + return (- scaledpoints/1000)*10*factor + end + else + return scaledpoints + end +end +function constructors.cleanuptable(tfmdata) + if constructors.autocleanup and tfmdata.properties.virtualized then + for k,v in next,tfmdata.characters do + if v.commands then v.commands=nil end + end + end +end +function constructors.calculatescale(tfmdata,scaledpoints) + local parameters=tfmdata.parameters + if scaledpoints<0 then + scaledpoints=(- scaledpoints/1000)*(tfmdata.designsize or parameters.designsize) + end + return scaledpoints,scaledpoints/(parameters.units or 1000) +end +local unscaled={ + ScriptPercentScaleDown=true, + ScriptScriptPercentScaleDown=true, + RadicalDegreeBottomRaisePercent=true +} +function constructors.assignmathparameters(target,original) + local mathparameters=original.mathparameters + if mathparameters and next(mathparameters) then + local targetparameters=target.parameters + local targetproperties=target.properties + local targetmathparameters={} + local factor=targetproperties.math_is_scaled and 1 or targetparameters.factor + for name,value in next,mathparameters do + if unscaled[name] then + targetmathparameters[name]=value + else + targetmathparameters[name]=value*factor + end + end + if not targetmathparameters.FractionDelimiterSize then + targetmathparameters.FractionDelimiterSize=1.01*targetparameters.size + end + if not mathparameters.FractionDelimiterDisplayStyleSize then + targetmathparameters.FractionDelimiterDisplayStyleSize=2.40*targetparameters.size + end + target.mathparameters=targetmathparameters + end +end +function constructors.beforecopyingcharacters(target,original) +end +function constructors.aftercopyingcharacters(target,original) +end +constructors.sharefonts=false +constructors.nofsharedfonts=0 +local sharednames={} +function constructors.trytosharefont(target,tfmdata) + if constructors.sharefonts then + local characters=target.characters + local n=1 + local t={ target.psname } + local u=sortedkeys(characters) + for i=1,#u do + local k=u[i] + n=n+1;t[n]=k + n=n+1;t[n]=characters[k].index or k + end + local h=md5.HEX(concat(t," ")) + local s=sharednames[h] + if s then + if trace_defining then + report_defining("font %a uses backend resources of font %a",target.fullname,s) + end + target.fullname=s + constructors.nofsharedfonts=constructors.nofsharedfonts+1 + target.properties.sharedwith=s + else + sharednames[h]=target.fullname + end + end +end +function constructors.enhanceparameters(parameters) + local xheight=parameters.x_height + local quad=parameters.quad + local space=parameters.space + local stretch=parameters.space_stretch + local shrink=parameters.space_shrink + local extra=parameters.extra_space + local slant=parameters.slant + parameters.xheight=xheight + parameters.spacestretch=stretch + parameters.spaceshrink=shrink + parameters.extraspace=extra + parameters.em=quad + parameters.ex=xheight + parameters.slantperpoint=slant + parameters.spacing={ + width=space, + stretch=stretch, + shrink=shrink, + extra=extra, + } +end +function constructors.scale(tfmdata,specification) + local target={} + if tonumber(specification) then + specification={ size=specification } + end + target.specification=specification + local scaledpoints=specification.size + local relativeid=specification.relativeid + local properties=tfmdata.properties or {} + local goodies=tfmdata.goodies or {} + local resources=tfmdata.resources or {} + local descriptions=tfmdata.descriptions or {} + local characters=tfmdata.characters or {} + local changed=tfmdata.changed or {} + local shared=tfmdata.shared or {} + local parameters=tfmdata.parameters or {} + local mathparameters=tfmdata.mathparameters or {} + local targetcharacters={} + local targetdescriptions=derivetable(descriptions) + local targetparameters=derivetable(parameters) + local targetproperties=derivetable(properties) + local targetgoodies=goodies + target.characters=targetcharacters + target.descriptions=targetdescriptions + target.parameters=targetparameters + target.properties=targetproperties + target.goodies=targetgoodies + target.shared=shared + target.resources=resources + target.unscaled=tfmdata + local mathsize=tonumber(specification.mathsize) or 0 + local textsize=tonumber(specification.textsize) or scaledpoints + local forcedsize=tonumber(parameters.mathsize ) or 0 + local extrafactor=tonumber(specification.factor ) or 1 + if (mathsize==2 or forcedsize==2) and parameters.scriptpercentage then + scaledpoints=parameters.scriptpercentage*textsize/100 + elseif (mathsize==3 or forcedsize==3) and parameters.scriptscriptpercentage then + scaledpoints=parameters.scriptscriptpercentage*textsize/100 + elseif forcedsize>1000 then + scaledpoints=forcedsize + end + targetparameters.mathsize=mathsize + targetparameters.textsize=textsize + targetparameters.forcedsize=forcedsize + targetparameters.extrafactor=extrafactor + local tounicode=fonts.mappings.tounicode + local defaultwidth=resources.defaultwidth or 0 + local defaultheight=resources.defaultheight or 0 + local defaultdepth=resources.defaultdepth or 0 + local units=parameters.units or 1000 + if target.fonts then + target.fonts=fastcopy(target.fonts) + end + targetproperties.language=properties.language or "dflt" + targetproperties.script=properties.script or "dflt" + targetproperties.mode=properties.mode or "base" + local askedscaledpoints=scaledpoints + local scaledpoints,delta=constructors.calculatescale(tfmdata,scaledpoints,nil,specification) + local hdelta=delta + local vdelta=delta + target.designsize=parameters.designsize + target.units_per_em=units + local direction=properties.direction or tfmdata.direction or 0 + target.direction=direction + properties.direction=direction + target.size=scaledpoints + target.encodingbytes=properties.encodingbytes or 1 + target.embedding=properties.embedding or "subset" + target.tounicode=1 + target.cidinfo=properties.cidinfo + target.format=properties.format + target.cache=constructors.cacheintex and "yes" or "renew" + local fontname=properties.fontname or tfmdata.fontname + local fullname=properties.fullname or tfmdata.fullname + local filename=properties.filename or tfmdata.filename + local psname=properties.psname or tfmdata.psname + local name=properties.name or tfmdata.name + if not psname or psname=="" then + psname=fontname or (fullname and fonts.names.cleanname(fullname)) + end + target.fontname=fontname + target.fullname=fullname + target.filename=filename + target.psname=psname + target.name=name + properties.fontname=fontname + properties.fullname=fullname + properties.filename=filename + properties.psname=psname + properties.name=name + local expansion=parameters.expansion + if expansion then + target.stretch=expansion.stretch + target.shrink=expansion.shrink + target.step=expansion.step + target.auto_expand=expansion.auto + end + local protrusion=parameters.protrusion + if protrusion then + target.auto_protrude=protrusion.auto + end + local extendfactor=parameters.extendfactor or 0 + if extendfactor~=0 and extendfactor~=1 then + hdelta=hdelta*extendfactor + target.extend=extendfactor*1000 + else + target.extend=1000 + end + local slantfactor=parameters.slantfactor or 0 + if slantfactor~=0 then + target.slant=slantfactor*1000 + else + target.slant=0 + end + targetparameters.factor=delta + targetparameters.hfactor=hdelta + targetparameters.vfactor=vdelta + targetparameters.size=scaledpoints + targetparameters.units=units + targetparameters.scaledpoints=askedscaledpoints + local isvirtual=properties.virtualized or tfmdata.type=="virtual" + local hasquality=target.auto_expand or target.auto_protrude + local hasitalics=properties.hasitalics + local autoitalicamount=properties.autoitalicamount + local stackmath=not properties.nostackmath + local nonames=properties.noglyphnames + local haskerns=properties.haskerns or properties.mode=="base" + local hasligatures=properties.hasligatures or properties.mode=="base" + if changed and not next(changed) then + changed=false + end + target.type=isvirtual and "virtual" or "real" + target.postprocessors=tfmdata.postprocessors + local targetslant=(parameters.slant or parameters[1] or 0)*factors.pt + local targetspace=(parameters.space or parameters[2] or 0)*hdelta + local targetspace_stretch=(parameters.space_stretch or parameters[3] or 0)*hdelta + local targetspace_shrink=(parameters.space_shrink or parameters[4] or 0)*hdelta + local targetx_height=(parameters.x_height or parameters[5] or 0)*vdelta + local targetquad=(parameters.quad or parameters[6] or 0)*hdelta + local targetextra_space=(parameters.extra_space or parameters[7] or 0)*hdelta + targetparameters.slant=targetslant + targetparameters.space=targetspace + targetparameters.space_stretch=targetspace_stretch + targetparameters.space_shrink=targetspace_shrink + targetparameters.x_height=targetx_height + targetparameters.quad=targetquad + targetparameters.extra_space=targetextra_space + local ascender=parameters.ascender + if ascender then + targetparameters.ascender=delta*ascender + end + local descender=parameters.descender + if descender then + targetparameters.descender=delta*descender + end + constructors.enhanceparameters(targetparameters) + local protrusionfactor=(targetquad~=0 and 1000/targetquad) or 0 + local scaledwidth=defaultwidth*hdelta + local scaledheight=defaultheight*vdelta + local scaleddepth=defaultdepth*vdelta + local hasmath=(properties.hasmath or next(mathparameters)) and true + if hasmath then + constructors.assignmathparameters(target,tfmdata) + properties.hasmath=true + target.nomath=false + target.MathConstants=target.mathparameters + else + properties.hasmath=false + target.nomath=true + target.mathparameters=nil + end + local italickey="italic" + local useitalics=true + if hasmath then + autoitalicamount=false + elseif properties.textitalics then + italickey="italic_correction" + useitalics=false + if properties.delaytextitalics then + autoitalicamount=false + end + end + if trace_defining then + report_defining("defining tfm, name %a, fullname %a, filename %a, hscale %a, vscale %a, math %a, italics %a", + name,fullname,filename,hdelta,vdelta, + hasmath and "enabled" or "disabled",useitalics and "enabled" or "disabled") + end + constructors.beforecopyingcharacters(target,tfmdata) + local sharedkerns={} + for unicode,character in next,characters do + local chr,description,index + if changed then + local c=changed[unicode] + if c then + description=descriptions[c] or descriptions[unicode] or character + character=characters[c] or character + index=description.index or c + else + description=descriptions[unicode] or character + index=description.index or unicode + end + else + description=descriptions[unicode] or character + index=description.index or unicode + end + local width=description.width + local height=description.height + local depth=description.depth + if width then width=hdelta*width else width=scaledwidth end + if height then height=vdelta*height else height=scaledheight end + if depth and depth~=0 then + depth=delta*depth + if nonames then + chr={ + index=index, + height=height, + depth=depth, + width=width, + } + else + chr={ + name=description.name, + index=index, + height=height, + depth=depth, + width=width, + } + end + else + if nonames then + chr={ + index=index, + height=height, + width=width, + } + else + chr={ + name=description.name, + index=index, + height=height, + width=width, + } + end + end + local isunicode=description.unicode + if isunicode then + chr.unicode=isunicode + chr.tounicode=tounicode(isunicode) + end + if hasquality then + local ve=character.expansion_factor + if ve then + chr.expansion_factor=ve*1000 + end + local vl=character.left_protruding + if vl then + chr.left_protruding=protrusionfactor*width*vl + end + local vr=character.right_protruding + if vr then + chr.right_protruding=protrusionfactor*width*vr + end + end + if autoitalicamount then + local vi=description.italic + if not vi then + local vi=description.boundingbox[3]-description.width+autoitalicamount + if vi>0 then + chr[italickey]=vi*hdelta + end + elseif vi~=0 then + chr[italickey]=vi*hdelta + end + elseif hasitalics then + local vi=description.italic + if vi and vi~=0 then + chr[italickey]=vi*hdelta + end + end + if hasmath then + local vn=character.next + if vn then + chr.next=vn + else + local vv=character.vert_variants + if vv then + local t={} + for i=1,#vv do + local vvi=vv[i] + t[i]={ + ["start"]=(vvi["start"] or 0)*vdelta, + ["end"]=(vvi["end"] or 0)*vdelta, + ["advance"]=(vvi["advance"] or 0)*vdelta, + ["extender"]=vvi["extender"], + ["glyph"]=vvi["glyph"], + } + end + chr.vert_variants=t + else + local hv=character.horiz_variants + if hv then + local t={} + for i=1,#hv do + local hvi=hv[i] + t[i]={ + ["start"]=(hvi["start"] or 0)*hdelta, + ["end"]=(hvi["end"] or 0)*hdelta, + ["advance"]=(hvi["advance"] or 0)*hdelta, + ["extender"]=hvi["extender"], + ["glyph"]=hvi["glyph"], + } + end + chr.horiz_variants=t + end + end + end + local va=character.top_accent + if va then + chr.top_accent=vdelta*va + end + if stackmath then + local mk=character.mathkerns + if mk then + local kerns={} + local v=mk.top_right if v then local k={} for i=1,#v do local vi=v[i] + k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern } + end kerns.top_right=k end + local v=mk.top_left if v then local k={} for i=1,#v do local vi=v[i] + k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern } + end kerns.top_left=k end + local v=mk.bottom_left if v then local k={} for i=1,#v do local vi=v[i] + k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern } + end kerns.bottom_left=k end + local v=mk.bottom_right if v then local k={} for i=1,#v do local vi=v[i] + k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern } + end kerns.bottom_right=k end + chr.mathkern=kerns + end + end + end + if haskerns then + local vk=character.kerns + if vk then + local s=sharedkerns[vk] + if not s then + s={} + for k,v in next,vk do s[k]=v*hdelta end + sharedkerns[vk]=s + end + chr.kerns=s + end + end + if hasligatures then + local vl=character.ligatures + if vl then + if true then + chr.ligatures=vl + else + local tt={} + for i,l in next,vl do + tt[i]=l + end + chr.ligatures=tt + end + end + end + if isvirtual then + local vc=character.commands + if vc then + local ok=false + for i=1,#vc do + local key=vc[i][1] + if key=="right" or key=="down" then + ok=true + break + end + end + if ok then + local tt={} + for i=1,#vc do + local ivc=vc[i] + local key=ivc[1] + if key=="right" then + tt[i]={ key,ivc[2]*hdelta } + elseif key=="down" then + tt[i]={ key,ivc[2]*vdelta } + elseif key=="rule" then + tt[i]={ key,ivc[2]*vdelta,ivc[3]*hdelta } + else + tt[i]=ivc + end + end + chr.commands=tt + else + chr.commands=vc + end + chr.index=nil + end + end + targetcharacters[unicode]=chr + end + constructors.aftercopyingcharacters(target,tfmdata) + constructors.trytosharefont(target,tfmdata) + return target +end +function constructors.finalize(tfmdata) + if tfmdata.properties and tfmdata.properties.finalized then + return + end + if not tfmdata.characters then + return nil + end + if not tfmdata.goodies then + tfmdata.goodies={} + end + local parameters=tfmdata.parameters + if not parameters then + return nil + end + if not parameters.expansion then + parameters.expansion={ + stretch=tfmdata.stretch or 0, + shrink=tfmdata.shrink or 0, + step=tfmdata.step or 0, + auto=tfmdata.auto_expand or false, + } + end + if not parameters.protrusion then + parameters.protrusion={ + auto=auto_protrude + } + end + if not parameters.size then + parameters.size=tfmdata.size + end + if not parameters.extendfactor then + parameters.extendfactor=tfmdata.extend or 0 + end + if not parameters.slantfactor then + parameters.slantfactor=tfmdata.slant or 0 + end + if not parameters.designsize then + parameters.designsize=tfmdata.designsize or (factors.pt*10) + end + if not parameters.units then + parameters.units=tfmdata.units_per_em or 1000 + end + if not tfmdata.descriptions then + local descriptions={} + setmetatableindex(descriptions,function(t,k) local v={} t[k]=v return v end) + tfmdata.descriptions=descriptions + end + local properties=tfmdata.properties + if not properties then + properties={} + tfmdata.properties=properties + end + if not properties.virtualized then + properties.virtualized=tfmdata.type=="virtual" + end + if not tfmdata.properties then + tfmdata.properties={ + fontname=tfmdata.fontname, + filename=tfmdata.filename, + fullname=tfmdata.fullname, + name=tfmdata.name, + psname=tfmdata.psname, + encodingbytes=tfmdata.encodingbytes or 1, + embedding=tfmdata.embedding or "subset", + tounicode=tfmdata.tounicode or 1, + cidinfo=tfmdata.cidinfo or nil, + format=tfmdata.format or "type1", + direction=tfmdata.direction or 0, + } + end + if not tfmdata.resources then + tfmdata.resources={} + end + if not tfmdata.shared then + tfmdata.shared={} + end + if not properties.hasmath then + properties.hasmath=not tfmdata.nomath + end + tfmdata.MathConstants=nil + tfmdata.postprocessors=nil + tfmdata.fontname=nil + tfmdata.filename=nil + tfmdata.fullname=nil + tfmdata.name=nil + tfmdata.psname=nil + tfmdata.encodingbytes=nil + tfmdata.embedding=nil + tfmdata.tounicode=nil + tfmdata.cidinfo=nil + tfmdata.format=nil + tfmdata.direction=nil + tfmdata.type=nil + tfmdata.nomath=nil + tfmdata.designsize=nil + tfmdata.size=nil + tfmdata.stretch=nil + tfmdata.shrink=nil + tfmdata.step=nil + tfmdata.auto_expand=nil + tfmdata.auto_protrude=nil + tfmdata.extend=nil + tfmdata.slant=nil + tfmdata.units_per_em=nil + tfmdata.cache=nil + properties.finalized=true + return tfmdata +end +local hashmethods={} +constructors.hashmethods=hashmethods +function constructors.hashfeatures(specification) + local features=specification.features + if features then + local t,tn={},0 + for category,list in next,features do + if next(list) then + local hasher=hashmethods[category] + if hasher then + local hash=hasher(list) + if hash then + tn=tn+1 + t[tn]=category..":"..hash + end + end + end + end + if tn>0 then + return concat(t," & ") + end + end + return "unknown" +end +hashmethods.normal=function(list) + local s={} + local n=0 + for k,v in next,list do + if not k then + elseif k=="number" or k=="features" then + else + n=n+1 + s[n]=k + end + end + if n>0 then + sort(s) + for i=1,n do + local k=s[i] + s[i]=k..'='..tostring(list[k]) + end + return concat(s,"+") + end +end +function constructors.hashinstance(specification,force) + local hash,size,fallbacks=specification.hash,specification.size,specification.fallbacks + if force or not hash then + hash=constructors.hashfeatures(specification) + specification.hash=hash + end + if size<1000 and designsizes[hash] then + size=math.round(constructors.scaled(size,designsizes[hash])) + specification.size=size + end + if fallbacks then + return hash..' @ '..tostring(size)..' @ '..fallbacks + else + return hash..' @ '..tostring(size) + end +end +function constructors.setname(tfmdata,specification) + if constructors.namemode=="specification" then + local specname=specification.specification + if specname then + tfmdata.properties.name=specname + if trace_defining then + report_otf("overloaded fontname %a",specname) + end + end + end +end +function constructors.checkedfilename(data) + local foundfilename=data.foundfilename + if not foundfilename then + local askedfilename=data.filename or "" + if askedfilename~="" then + askedfilename=resolvers.resolve(askedfilename) + foundfilename=resolvers.findbinfile(askedfilename,"") or "" + if foundfilename=="" then + report_defining("source file %a is not found",askedfilename) + foundfilename=resolvers.findbinfile(file.basename(askedfilename),"") or "" + if foundfilename~="" then + report_defining("using source file %a due to cache mismatch",foundfilename) + end + end + end + data.foundfilename=foundfilename + end + return foundfilename +end +local formats=allocate() +fonts.formats=formats +setmetatableindex(formats,function(t,k) + local l=lower(k) + if rawget(t,k) then + t[k]=l + return l + end + return rawget(t,file.suffix(l)) +end) +local locations={} +local function setindeed(mode,target,group,name,action,position) + local t=target[mode] + if not t then + report_defining("fatal error in setting feature %a, group %a, mode %a",name,group,mode) + os.exit() + elseif position then + insert(t,position,{ name=name,action=action }) + else + for i=1,#t do + local ti=t[i] + if ti.name==name then + ti.action=action + return + end + end + insert(t,{ name=name,action=action }) + end +end +local function set(group,name,target,source) + target=target[group] + if not target then + report_defining("fatal target error in setting feature %a, group %a",name,group) + os.exit() + end + local source=source[group] + if not source then + report_defining("fatal source error in setting feature %a, group %a",name,group) + os.exit() + end + local node=source.node + local base=source.base + local position=source.position + if node then + setindeed("node",target,group,name,node,position) + end + if base then + setindeed("base",target,group,name,base,position) + end +end +local function register(where,specification) + local name=specification.name + if name and name~="" then + local default=specification.default + local description=specification.description + local initializers=specification.initializers + local processors=specification.processors + local manipulators=specification.manipulators + local modechecker=specification.modechecker + if default then + where.defaults[name]=default + end + if description and description~="" then + where.descriptions[name]=description + end + if initializers then + set('initializers',name,where,specification) + end + if processors then + set('processors',name,where,specification) + end + if manipulators then + set('manipulators',name,where,specification) + end + if modechecker then + where.modechecker=modechecker + end + end +end +constructors.registerfeature=register +function constructors.getfeatureaction(what,where,mode,name) + what=handlers[what].features + if what then + where=what[where] + if where then + mode=where[mode] + if mode then + for i=1,#mode do + local m=mode[i] + if m.name==name then + return m.action + end + end + end + end + end +end +function constructors.newhandler(what) + local handler=handlers[what] + if not handler then + handler={} + handlers[what]=handler + end + return handler +end +function constructors.newfeatures(what) + local handler=handlers[what] + local features=handler.features + if not features then + local tables=handler.tables + local statistics=handler.statistics + features=allocate { + defaults={}, + descriptions=tables and tables.features or {}, + used=statistics and statistics.usedfeatures or {}, + initializers={ base={},node={} }, + processors={ base={},node={} }, + manipulators={ base={},node={} }, + } + features.register=function(specification) return register(features,specification) end + handler.features=features + end + return features +end +function constructors.checkedfeatures(what,features) + local defaults=handlers[what].features.defaults + if features and next(features) then + features=fastcopy(features) + for key,value in next,defaults do + if features[key]==nil then + features[key]=value + end + end + return features + else + return fastcopy(defaults) + end +end +function constructors.initializefeatures(what,tfmdata,features,trace,report) + if features and next(features) then + local properties=tfmdata.properties or {} + local whathandler=handlers[what] + local whatfeatures=whathandler.features + local whatinitializers=whatfeatures.initializers + local whatmodechecker=whatfeatures.modechecker + local mode=properties.mode or (whatmodechecker and whatmodechecker(tfmdata,features,features.mode)) or features.mode or "base" + properties.mode=mode + features.mode=mode + local done={} + while true do + local redo=false + local initializers=whatfeatures.initializers[mode] + if initializers then + for i=1,#initializers do + local step=initializers[i] + local feature=step.name + local value=features[feature] + if not value then + elseif done[feature] then + else + local action=step.action + if trace then + report("initializing feature %a to %a for mode %a for font %a",feature, + value,mode,tfmdata.properties.fullname) + end + action(tfmdata,value,features) + if mode~=properties.mode or mode~=features.mode then + if whatmodechecker then + properties.mode=whatmodechecker(tfmdata,features,properties.mode) + features.mode=properties.mode + end + if mode~=properties.mode then + mode=properties.mode + redo=true + end + end + done[feature]=true + end + if redo then + break + end + end + if not redo then + break + end + else + break + end + end + properties.mode=mode + return true + else + return false + end +end +function constructors.collectprocessors(what,tfmdata,features,trace,report) + local processes,nofprocesses={},0 + if features and next(features) then + local properties=tfmdata.properties + local whathandler=handlers[what] + local whatfeatures=whathandler.features + local whatprocessors=whatfeatures.processors + local mode=properties.mode + local processors=whatprocessors[mode] + if processors then + for i=1,#processors do + local step=processors[i] + local feature=step.name + if features[feature] then + local action=step.action + if trace then + report("installing feature processor %a for mode %a for font %a",feature,mode,tfmdata.properties.fullname) + end + if action then + nofprocesses=nofprocesses+1 + processes[nofprocesses]=action + end + end + end + elseif trace then + report("no feature processors for mode %a for font %a",mode,properties.fullname) + end + end + return processes +end +function constructors.applymanipulators(what,tfmdata,features,trace,report) + if features and next(features) then + local properties=tfmdata.properties + local whathandler=handlers[what] + local whatfeatures=whathandler.features + local whatmanipulators=whatfeatures.manipulators + local mode=properties.mode + local manipulators=whatmanipulators[mode] + if manipulators then + for i=1,#manipulators do + local step=manipulators[i] + local feature=step.name + local value=features[feature] + if value then + local action=step.action + if trace then + report("applying feature manipulator %a for mode %a for font %a",feature,mode,properties.fullname) + end + if action then + action(tfmdata,feature,value) + end + end + end + end + end +end +function constructors.addcoreunicodes(unicodes) + if not unicodes then + unicodes={} + end + unicodes.space=0x0020 + unicodes.hyphen=0x002D + unicodes.zwj=0x200D + unicodes.zwnj=0x200C + return unicodes +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['luatex-font-enc']={ + version=1.001, + comment="companion to luatex-*.tex", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end +local fonts=fonts +fonts.encodings={} +fonts.encodings.agl={} +fonts.encodings.known={} +setmetatable(fonts.encodings.agl,{ __index=function(t,k) + if k=="unicodes" then + texio.write(" ") + local unicodes=dofile(resolvers.findfile("font-age.lua")) + fonts.encodings.agl={ unicodes=unicodes } + return unicodes + else + return nil + end +end }) + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-cid']={ + version=1.001, + comment="companion to font-otf.lua (cidmaps)", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local format,match,lower=string.format,string.match,string.lower +local tonumber=tonumber +local P,S,R,C,V,lpegmatch=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.V,lpeg.match +local fonts,logs,trackers=fonts,logs,trackers +local trace_loading=false trackers.register("otf.loading",function(v) trace_loading=v end) +local report_otf=logs.reporter("fonts","otf loading") +local cid={} +fonts.cid=cid +local cidmap={} +local cidmax=10 +local number=C(R("09","af","AF")^1) +local space=S(" \n\r\t") +local spaces=space^0 +local period=P(".") +local periods=period*period +local name=P("/")*C((1-space)^1) +local unicodes,names={},{} +local function do_one(a,b) + unicodes[tonumber(a)]=tonumber(b,16) +end +local function do_range(a,b,c) + c=tonumber(c,16) + for i=tonumber(a),tonumber(b) do + unicodes[i]=c + c=c+1 + end +end +local function do_name(a,b) + names[tonumber(a)]=b +end +local grammar=P { "start", + start=number*spaces*number*V("series"), + series=(spaces*(V("one")+V("range")+V("named")))^1, + one=(number*spaces*number)/do_one, + range=(number*periods*number*spaces*number)/do_range, + named=(number*spaces*name)/do_name +} +local function loadcidfile(filename) + local data=io.loaddata(filename) + if data then + unicodes,names={},{} + lpegmatch(grammar,data) + local supplement,registry,ordering=match(filename,"^(.-)%-(.-)%-()%.(.-)$") + return { + supplement=supplement, + registry=registry, + ordering=ordering, + filename=filename, + unicodes=unicodes, + names=names, + } + end +end +cid.loadfile=loadcidfile +local template="%s-%s-%s.cidmap" +local function locate(registry,ordering,supplement) + local filename=format(template,registry,ordering,supplement) + local hashname=lower(filename) + local found=cidmap[hashname] + if not found then + if trace_loading then + report_otf("checking cidmap, registry %a, ordering %a, supplement %a, filename %a",registry,ordering,supplement,filename) + end + local fullname=resolvers.findfile(filename,'cid') or "" + if fullname~="" then + found=loadcidfile(fullname) + if found then + if trace_loading then + report_otf("using cidmap file %a",filename) + end + cidmap[hashname]=found + found.usedname=file.basename(filename) + end + end + end + return found +end +function cid.getmap(specification) + if not specification then + report_otf("invalid cidinfo specification, table expected") + return + end + local registry=specification.registry + local ordering=specification.ordering + local supplement=specification.supplement + local filename=format(registry,ordering,supplement) + local lowername=lower(filename) + local found=cidmap[lowername] + if found then + return found + end + if ordering=="Identity" then + local found={ + supplement=supplement, + registry=registry, + ordering=ordering, + filename=filename, + unicodes={}, + names={}, + } + cidmap[lowername]=found + return found + end + if trace_loading then + report_otf("cidmap needed, registry %a, ordering %a, supplement %a",registry,ordering,supplement) + end + found=locate(registry,ordering,supplement) + if not found then + local supnum=tonumber(supplement) + local cidnum=nil + if supnum0 then + for s=supnum-1,0,-1 do + local c=locate(registry,ordering,s) + if c then + found,cidnum=c,s + break + end + end + end + registry=lower(registry) + ordering=lower(ordering) + if found and cidnum>0 then + for s=0,cidnum-1 do + local filename=format(template,registry,ordering,s) + if not cidmap[filename] then + cidmap[filename]=found + end + end + end + end + return found +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-map']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local tonumber,next,type=tonumber,next,type +local match,format,find,concat,gsub,lower=string.match,string.format,string.find,table.concat,string.gsub,string.lower +local P,R,S,C,Ct,Cc,lpegmatch=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cc,lpeg.match +local utfbyte=utf.byte +local floor=math.floor +local formatters=string.formatters +local trace_loading=false trackers.register("fonts.loading",function(v) trace_loading=v end) +local trace_mapping=false trackers.register("fonts.mapping",function(v) trace_unimapping=v end) +local report_fonts=logs.reporter("fonts","loading") +local fonts=fonts or {} +local mappings=fonts.mappings or {} +fonts.mappings=mappings +local function loadlumtable(filename) + local lumname=file.replacesuffix(file.basename(filename),"lum") + local lumfile=resolvers.findfile(lumname,"map") or "" + if lumfile~="" and lfs.isfile(lumfile) then + if trace_loading or trace_mapping then + report_fonts("loading map table %a",lumfile) + end + lumunic=dofile(lumfile) + return lumunic,lumfile + end +end +local hex=R("AF","09") +local hexfour=(hex*hex*hex*hex)/function(s) return tonumber(s,16) end +local hexsix=(hex*hex*hex*hex*hex*hex)/function(s) return tonumber(s,16) end +local dec=(R("09")^1)/tonumber +local period=P(".") +local unicode=P("uni")*(hexfour*(period+P(-1))*Cc(false)+Ct(hexfour^1)*Cc(true)) +local ucode=P("u")*(hexsix*(period+P(-1))*Cc(false)+Ct(hexsix^1)*Cc(true)) +local index=P("index")*dec*Cc(false) +local parser=unicode+ucode+index +local parsers={} +local function makenameparser(str) + if not str or str=="" then + return parser + else + local p=parsers[str] + if not p then + p=P(str)*period*dec*Cc(false) + parsers[str]=p + end + return p + end +end +local f_single=formatters["%04X"] +local f_double=formatters["%04X%04X"] +local function tounicode16(unicode,name) + if unicode<0x10000 then + return f_single(unicode) + elseif unicode<0x1FFFFFFFFF then + return f_double(floor(unicode/1024),unicode%1024+0xDC00) + else + report_fonts("can't convert %a in %a into tounicode",unicode,name) + end +end +local function tounicode16sequence(unicodes,name) + local t={} + for l=1,#unicodes do + local u=unicodes[l] + if u<0x10000 then + t[l]=f_single(u) + elseif unicode<0x1FFFFFFFFF then + t[l]=f_double(floor(u/1024),u%1024+0xDC00) + else + report_fonts ("can't convert %a in %a into tounicode",u,name) + return + end + end + return concat(t) +end +local function tounicode(unicode,name) + if type(unicode)=="table" then + local t={} + for l=1,#unicode do + local u=unicode[l] + if u<0x10000 then + t[l]=f_single(u) + elseif u<0x1FFFFFFFFF then + t[l]=f_double(floor(u/1024),u%1024+0xDC00) + else + report_fonts ("can't convert %a in %a into tounicode",u,name) + return + end + end + return concat(t) + else + if unicode<0x10000 then + return f_single(unicode) + elseif unicode<0x1FFFFFFFFF then + return f_double(floor(unicode/1024),unicode%1024+0xDC00) + else + report_fonts("can't convert %a in %a into tounicode",unicode,name) + end + end +end +local function fromunicode16(str) + if #str==4 then + return tonumber(str,16) + else + local l,r=match(str,"(....)(....)") + return (tonumber(l,16))*0x400+tonumber(r,16)-0xDC00 + end +end +mappings.loadlumtable=loadlumtable +mappings.makenameparser=makenameparser +mappings.tounicode=tounicode +mappings.tounicode16=tounicode16 +mappings.tounicode16sequence=tounicode16sequence +mappings.fromunicode16=fromunicode16 +local ligseparator=P("_") +local varseparator=P(".") +local namesplitter=Ct(C((1-ligseparator-varseparator)^1)*(ligseparator*C((1-ligseparator-varseparator)^1))^0) +local overloads={ + IJ={ name="I_J",unicode={ 0x49,0x4A },mess=0x0132 }, + ij={ name="i_j",unicode={ 0x69,0x6A },mess=0x0133 }, + ff={ name="f_f",unicode={ 0x66,0x66 },mess=0xFB00 }, + fi={ name="f_i",unicode={ 0x66,0x69 },mess=0xFB01 }, + fl={ name="f_l",unicode={ 0x66,0x6C },mess=0xFB02 }, + ffi={ name="f_f_i",unicode={ 0x66,0x66,0x69 },mess=0xFB03 }, + ffl={ name="f_f_l",unicode={ 0x66,0x66,0x6C },mess=0xFB04 }, + fj={ name="f_j",unicode={ 0x66,0x6A } }, + fk={ name="f_k",unicode={ 0x66,0x6B } }, +} +for k,v in next,overloads do + local name=v.name + local mess=v.mess + if name then + overloads[name]=v + end + if mess then + overloads[mess]=v + end +end +mappings.overloads=overloads +function mappings.addtounicode(data,filename) + local resources=data.resources + local properties=data.properties + local descriptions=data.descriptions + local unicodes=resources.unicodes + local lookuptypes=resources.lookuptypes + if not unicodes then + return + end + unicodes['space']=unicodes['space'] or 32 + unicodes['hyphen']=unicodes['hyphen'] or 45 + unicodes['zwj']=unicodes['zwj'] or 0x200D + unicodes['zwnj']=unicodes['zwnj'] or 0x200C + local private=fonts.constructors.privateoffset + local unicodevector=fonts.encodings.agl.unicodes + local missing={} + local lumunic,uparser,oparser + local cidinfo,cidnames,cidcodes,usedmap + cidinfo=properties.cidinfo + usedmap=cidinfo and fonts.cid.getmap(cidinfo) + if usedmap then + oparser=usedmap and makenameparser(cidinfo.ordering) + cidnames=usedmap.names + cidcodes=usedmap.unicodes + end + uparser=makenameparser() + local ns,nl=0,0 + for unic,glyph in next,descriptions do + local index=glyph.index + local name=glyph.name + local r=overloads[name] + if r then + glyph.unicode=r.unicode + elseif unic==-1 or unic>=private or (unic>=0xE000 and unic<=0xF8FF) or unic==0xFFFE or unic==0xFFFF then + local unicode=lumunic and lumunic[name] or unicodevector[name] + if unicode then + glyph.unicode=unicode + ns=ns+1 + end + if (not unicode) and usedmap then + local foundindex=lpegmatch(oparser,name) + if foundindex then + unicode=cidcodes[foundindex] + if unicode then + glyph.unicode=unicode + ns=ns+1 + else + local reference=cidnames[foundindex] + if reference then + local foundindex=lpegmatch(oparser,reference) + if foundindex then + unicode=cidcodes[foundindex] + if unicode then + glyph.unicode=unicode + ns=ns+1 + end + end + if not unicode or unicode=="" then + local foundcodes,multiple=lpegmatch(uparser,reference) + if foundcodes then + glyph.unicode=foundcodes + if multiple then + nl=nl+1 + unicode=true + else + ns=ns+1 + unicode=foundcodes + end + end + end + end + end + end + end + if not unicode or unicode=="" then + local split=lpegmatch(namesplitter,name) + local nsplit=split and #split or 0 + local t,n={},0 + unicode=true + for l=1,nsplit do + local base=split[l] + local u=unicodes[base] or unicodevector[base] + if not u then + break + elseif type(u)=="table" then + if u[1]>=private then + unicode=false + break + end + n=n+1 + t[n]=u[1] + else + if u>=private then + unicode=false + break + end + n=n+1 + t[n]=u + end + end + if n==0 then + elseif n==1 then + glyph.unicode=t[1] + else + glyph.unicode=t + end + nl=nl+1 + end + if not unicode or unicode=="" then + local foundcodes,multiple=lpegmatch(uparser,name) + if foundcodes then + glyph.unicode=foundcodes + if multiple then + nl=nl+1 + unicode=true + else + ns=ns+1 + unicode=foundcodes + end + end + end + local r=overloads[unicode] + if r then + unicode=r.unicode + glyph.unicode=unicode + end + if not unicode then + missing[name]=true + end + end + end + if next(missing) then + local guess={} + local function check(gname,code,unicode) + local description=descriptions[code] + local variant=description.name + if variant==gname then + return + end + local unic=unicodes[variant] + if unic==-1 or unic>=private or (unic>=0xE000 and unic<=0xF8FF) or unic==0xFFFE or unic==0xFFFF then + else + return + end + if descriptions[code].unicode then + return + end + local g=guess[variant] + if g then + g[gname]=unicode + else + guess[variant]={ [gname]=unicode } + end + end + for unicode,description in next,descriptions do + local slookups=description.slookups + if slookups then + local gname=description.name + for tag,data in next,slookups do + local lookuptype=lookuptypes[tag] + if lookuptype=="alternate" then + for i=1,#data do + check(gname,data[i],unicode) + end + elseif lookuptype=="substitution" then + check(gname,data,unicode) + end + end + end + local mlookups=description.mlookups + if mlookups then + local gname=description.name + for tag,list in next,mlookups do + local lookuptype=lookuptypes[tag] + if lookuptype=="alternate" then + for i=1,#list do + local data=list[i] + for i=1,#data do + check(gname,data[i],unicode) + end + end + elseif lookuptype=="substitution" then + for i=1,#list do + check(gname,list[i],unicode) + end + end + end + end + end + local done=true + while done do + done=false + for k,v in next,guess do + if type(v)~="number" then + for kk,vv in next,v do + if vv==-1 or vv>=private or (vv>=0xE000 and vv<=0xF8FF) or vv==0xFFFE or vv==0xFFFF then + local uu=guess[kk] + if type(uu)=="number" then + guess[k]=uu + done=true + end + else + guess[k]=vv + done=true + end + end + end + end + end + local orphans=0 + local guessed=0 + for k,v in next,guess do + if type(v)=="number" then + descriptions[unicodes[k]].unicode=descriptions[v].unicode or v + guessed=guessed+1 + else + local t=nil + local l=lower(k) + local u=unicodes[l] + if not u then + orphans=orphans+1 + elseif u==-1 or u>=private or (u>=0xE000 and u<=0xF8FF) or u==0xFFFE or u==0xFFFF then + local unicode=descriptions[u].unicode + if unicode then + descriptions[unicodes[k]].unicode=unicode + guessed=guessed+1 + else + orphans=orphans+1 + end + else + orphans=orphans+1 + end + end + end + if trace_loading and orphans>0 or guessed>0 then + report_fonts("%s glyphs with no related unicode, %s guessed, %s orphans",guessed+orphans,guessed,orphans) + end + end + if trace_mapping then + for unic,glyph in table.sortedhash(descriptions) do + local name=glyph.name + local index=glyph.index + local unicode=glyph.unicode + if unicode then + if type(unicode)=="table" then + local unicodes={} + for i=1,#unicode do + unicodes[i]=formatters("%U",unicode[i]) + end + report_fonts("internal slot %U, name %a, unicode %U, tounicode % t",index,name,unic,unicodes) + else + report_fonts("internal slot %U, name %a, unicode %U, tounicode %U",index,name,unic,unicode) + end + else + report_fonts("internal slot %U, name %a, unicode %U",index,name,unic) + end + end + end + if trace_loading and (ns>0 or nl>0) then + report_fonts("%s tounicode entries added, ligatures %s",nl+ns,ns) + end +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['luatex-fonts-syn']={ + version=1.001, + comment="companion to luatex-*.tex", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end +local fonts=fonts +fonts.names=fonts.names or {} +fonts.names.version=1.001 +fonts.names.basename="luatex-fonts-names" +fonts.names.new_to_old={} +fonts.names.old_to_new={} +fonts.names.cache=containers.define("fonts","data",fonts.names.version,true) +local data,loaded=nil,false +local fileformats={ "lua","tex","other text files" } +function fonts.names.reportmissingbase() + texio.write("") + fonts.names.reportmissingbase=nil +end +function fonts.names.reportmissingname() + texio.write("") + fonts.names.reportmissingname=nil +end +function fonts.names.resolve(name,sub) + if not loaded then + local basename=fonts.names.basename + if basename and basename~="" then + data=containers.read(fonts.names.cache,basename) + if not data then + basename=file.addsuffix(basename,"lua") + for i=1,#fileformats do + local format=fileformats[i] + local foundname=resolvers.findfile(basename,format) or "" + if foundname~="" then + data=dofile(foundname) + texio.write("") + break + end + end + end + end + loaded=true + end + if type(data)=="table" and data.version==fonts.names.version then + local condensed=string.gsub(string.lower(name),"[^%a%d]","") + local found=data.mappings and data.mappings[condensed] + if found then + local fontname,filename,subfont=found[1],found[2],found[3] + if subfont then + return filename,fontname + else + return filename,false + end + elseif fonts.names.reportmissingname then + fonts.names.reportmissingname() + return name,false + end + elseif fonts.names.reportmissingbase then + fonts.names.reportmissingbase() + end +end +fonts.names.resolvespec=fonts.names.resolve +function fonts.names.getfilename(askedname,suffix) + return "" +end +function fonts.names.ignoredfile(filename) + return false +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-tfm']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local next=next +local match=string.match +local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end) +local trace_features=false trackers.register("tfm.features",function(v) trace_features=v end) +local report_defining=logs.reporter("fonts","defining") +local report_tfm=logs.reporter("fonts","tfm loading") +local findbinfile=resolvers.findbinfile +local fonts=fonts +local handlers=fonts.handlers +local readers=fonts.readers +local constructors=fonts.constructors +local encodings=fonts.encodings +local tfm=constructors.newhandler("tfm") +local tfmfeatures=constructors.newfeatures("tfm") +local registertfmfeature=tfmfeatures.register +constructors.resolvevirtualtoo=false +fonts.formats.tfm="type1" +function tfm.setfeatures(tfmdata,features) + local okay=constructors.initializefeatures("tfm",tfmdata,features,trace_features,report_tfm) + if okay then + return constructors.collectprocessors("tfm",tfmdata,features,trace_features,report_tfm) + else + return {} + end +end +local function read_from_tfm(specification) + local filename=specification.filename + local size=specification.size + if trace_defining then + report_defining("loading tfm file %a at size %s",filename,size) + end + local tfmdata=font.read_tfm(filename,size) + if tfmdata then + local features=specification.features and specification.features.normal or {} + local resources=tfmdata.resources or {} + local properties=tfmdata.properties or {} + local parameters=tfmdata.parameters or {} + local shared=tfmdata.shared or {} + properties.name=tfmdata.name + properties.fontname=tfmdata.fontname + properties.psname=tfmdata.psname + properties.filename=specification.filename + properties.format=fonts.formats.tfm + parameters.size=size + shared.rawdata={} + shared.features=features + shared.processes=next(features) and tfm.setfeatures(tfmdata,features) or nil + tfmdata.properties=properties + tfmdata.resources=resources + tfmdata.parameters=parameters + tfmdata.shared=shared + parameters.slant=parameters.slant or parameters[1] or 0 + parameters.space=parameters.space or parameters[2] or 0 + parameters.space_stretch=parameters.space_stretch or parameters[3] or 0 + parameters.space_shrink=parameters.space_shrink or parameters[4] or 0 + parameters.x_height=parameters.x_height or parameters[5] or 0 + parameters.quad=parameters.quad or parameters[6] or 0 + parameters.extra_space=parameters.extra_space or parameters[7] or 0 + constructors.enhanceparameters(parameters) + if constructors.resolvevirtualtoo then + fonts.loggers.register(tfmdata,file.suffix(filename),specification) + local vfname=findbinfile(specification.name,'ovf') + if vfname and vfname~="" then + local vfdata=font.read_vf(vfname,size) + if vfdata then + local chars=tfmdata.characters + for k,v in next,vfdata.characters do + chars[k].commands=v.commands + end + properties.virtualized=true + tfmdata.fonts=vfdata.fonts + end + end + end + local allfeatures=tfmdata.shared.features or specification.features.normal + constructors.applymanipulators("tfm",tfmdata,allfeatures.normal,trace_features,report_tfm) + if not features.encoding then + local encoding,filename=match(properties.filename,"^(.-)%-(.*)$") + if filename and encoding and encodings.known and encodings.known[encoding] then + features.encoding=encoding + end + end + properties.haskerns=true + properties.haslogatures=true + resources.unicodes={} + resources.lookuptags={} + return tfmdata + end +end +local function check_tfm(specification,fullname) + local foundname=findbinfile(fullname,'tfm') or "" + if foundname=="" then + foundname=findbinfile(fullname,'ofm') or "" + end + if foundname=="" then + foundname=fonts.names.getfilename(fullname,"tfm") or "" + end + if foundname~="" then + specification.filename=foundname + specification.format="ofm" + return read_from_tfm(specification) + elseif trace_defining then + report_defining("loading tfm with name %a fails",specification.name) + end +end +readers.check_tfm=check_tfm +function readers.tfm(specification) + local fullname=specification.filename or "" + if fullname=="" then + local forced=specification.forced or "" + if forced~="" then + fullname=specification.name.."."..forced + else + fullname=specification.name + end + end + return check_tfm(specification,fullname) +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-afm']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local fonts,logs,trackers,containers,resolvers=fonts,logs,trackers,containers,resolvers +local next,type,tonumber=next,type,tonumber +local format,match,gmatch,lower,gsub,strip=string.format,string.match,string.gmatch,string.lower,string.gsub,string.strip +local abs=math.abs +local P,S,C,R,lpegmatch,patterns=lpeg.P,lpeg.S,lpeg.C,lpeg.R,lpeg.match,lpeg.patterns +local derivetable=table.derive +local trace_features=false trackers.register("afm.features",function(v) trace_features=v end) +local trace_indexing=false trackers.register("afm.indexing",function(v) trace_indexing=v end) +local trace_loading=false trackers.register("afm.loading",function(v) trace_loading=v end) +local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end) +local report_afm=logs.reporter("fonts","afm loading") +local setmetatableindex=table.setmetatableindex +local findbinfile=resolvers.findbinfile +local definers=fonts.definers +local readers=fonts.readers +local constructors=fonts.constructors +local afm=constructors.newhandler("afm") +local pfb=constructors.newhandler("pfb") +local afmfeatures=constructors.newfeatures("afm") +local registerafmfeature=afmfeatures.register +afm.version=1.500 +afm.cache=containers.define("fonts","afm",afm.version,true) +afm.autoprefixed=true +afm.helpdata={} +afm.syncspace=true +afm.addligatures=true +afm.addtexligatures=true +afm.addkerns=true +local overloads=fonts.mappings.overloads +local applyruntimefixes=fonts.treatments and fonts.treatments.applyfixes +local function setmode(tfmdata,value) + if value then + tfmdata.properties.mode=lower(value) + end +end +registerafmfeature { + name="mode", + description="mode", + initializers={ + base=setmode, + node=setmode, + } +} +local comment=P("Comment") +local spacing=patterns.spacer +local lineend=patterns.newline +local words=C((1-lineend)^1) +local number=C((R("09")+S("."))^1)/tonumber*spacing^0 +local data=lpeg.Carg(1) +local pattern=( + comment*spacing*( + data*( + ("CODINGSCHEME"*spacing*words )/function(fd,a) end+("DESIGNSIZE"*spacing*number*words )/function(fd,a) fd[ 1]=a end+("CHECKSUM"*spacing*number*words )/function(fd,a) fd[ 2]=a end+("SPACE"*spacing*number*"plus"*number*"minus"*number)/function(fd,a,b,c) fd[ 3],fd[ 4],fd[ 5]=a,b,c end+("QUAD"*spacing*number )/function(fd,a) fd[ 6]=a end+("EXTRASPACE"*spacing*number )/function(fd,a) fd[ 7]=a end+("NUM"*spacing*number*number*number )/function(fd,a,b,c) fd[ 8],fd[ 9],fd[10]=a,b,c end+("DENOM"*spacing*number*number )/function(fd,a,b ) fd[11],fd[12]=a,b end+("SUP"*spacing*number*number*number )/function(fd,a,b,c) fd[13],fd[14],fd[15]=a,b,c end+("SUB"*spacing*number*number )/function(fd,a,b) fd[16],fd[17]=a,b end+("SUPDROP"*spacing*number )/function(fd,a) fd[18]=a end+("SUBDROP"*spacing*number )/function(fd,a) fd[19]=a end+("DELIM"*spacing*number*number )/function(fd,a,b) fd[20],fd[21]=a,b end+("AXISHEIGHT"*spacing*number )/function(fd,a) fd[22]=a end + )+(1-lineend)^0 + )+(1-comment)^1 +)^0 +local function scan_comment(str) + local fd={} + lpegmatch(pattern,str,1,fd) + return fd +end +local keys={} +function keys.FontName (data,line) data.metadata.fontname=strip (line) + data.metadata.fullname=strip (line) end +function keys.ItalicAngle (data,line) data.metadata.italicangle=tonumber (line) end +function keys.IsFixedPitch(data,line) data.metadata.isfixedpitch=toboolean(line,true) end +function keys.CharWidth (data,line) data.metadata.charwidth=tonumber (line) end +function keys.XHeight (data,line) data.metadata.xheight=tonumber (line) end +function keys.Descender (data,line) data.metadata.descender=tonumber (line) end +function keys.Ascender (data,line) data.metadata.ascender=tonumber (line) end +function keys.Comment (data,line) + line=lower(line) + local designsize=match(line,"designsize[^%d]*(%d+)") + if designsize then data.metadata.designsize=tonumber(designsize) end +end +local function get_charmetrics(data,charmetrics,vector) + local characters=data.characters + local chr,ind={},0 + for k,v in gmatch(charmetrics,"([%a]+) +(.-) *;") do + if k=='C' then + v=tonumber(v) + if v<0 then + ind=ind+1 + else + ind=v + end + chr={ + index=ind + } + elseif k=='WX' then + chr.width=tonumber(v) + elseif k=='N' then + characters[v]=chr + elseif k=='B' then + local llx,lly,urx,ury=match(v,"^ *(.-) +(.-) +(.-) +(.-)$") + chr.boundingbox={ tonumber(llx),tonumber(lly),tonumber(urx),tonumber(ury) } + elseif k=='L' then + local plus,becomes=match(v,"^(.-) +(.-)$") + local ligatures=chr.ligatures + if ligatures then + ligatures[plus]=becomes + else + chr.ligatures={ [plus]=becomes } + end + end + end +end +local function get_kernpairs(data,kernpairs) + local characters=data.characters + for one,two,value in gmatch(kernpairs,"KPX +(.-) +(.-) +(.-)\n") do + local chr=characters[one] + if chr then + local kerns=chr.kerns + if kerns then + kerns[two]=tonumber(value) + else + chr.kerns={ [two]=tonumber(value) } + end + end + end +end +local function get_variables(data,fontmetrics) + for key,rest in gmatch(fontmetrics,"(%a+) *(.-)[\n\r]") do + local keyhandler=keys[key] + if keyhandler then + keyhandler(data,rest) + end + end +end +local function get_indexes(data,pfbname) + data.resources.filename=resolvers.unresolve(pfbname) + local pfbblob=fontloader.open(pfbname) + if pfbblob then + local characters=data.characters + local pfbdata=fontloader.to_table(pfbblob) + if pfbdata then + local glyphs=pfbdata.glyphs + if glyphs then + if trace_loading then + report_afm("getting index data from %a",pfbname) + end + for index,glyph in next,glyphs do + local name=glyph.name + if name then + local char=characters[name] + if char then + if trace_indexing then + report_afm("glyph %a has index %a",name,index) + end + char.index=index + end + end + end + elseif trace_loading then + report_afm("no glyph data in pfb file %a",pfbname) + end + elseif trace_loading then + report_afm("no data in pfb file %a",pfbname) + end + fontloader.close(pfbblob) + elseif trace_loading then + report_afm("invalid pfb file %a",pfbname) + end +end +local function readafm(filename) + local ok,afmblob,size=resolvers.loadbinfile(filename) + if ok and afmblob then + local data={ + resources={ + filename=resolvers.unresolve(filename), + version=afm.version, + creator="context mkiv", + }, + properties={ + hasitalics=false, + }, + goodies={}, + metadata={ + filename=file.removesuffix(file.basename(filename)) + }, + characters={ + }, + descriptions={ + }, + } + afmblob=gsub(afmblob,"StartCharMetrics(.-)EndCharMetrics",function(charmetrics) + if trace_loading then + report_afm("loading char metrics") + end + get_charmetrics(data,charmetrics,vector) + return "" + end) + afmblob=gsub(afmblob,"StartKernPairs(.-)EndKernPairs",function(kernpairs) + if trace_loading then + report_afm("loading kern pairs") + end + get_kernpairs(data,kernpairs) + return "" + end) + afmblob=gsub(afmblob,"StartFontMetrics%s+([%d%.]+)(.-)EndFontMetrics",function(version,fontmetrics) + if trace_loading then + report_afm("loading variables") + end + data.afmversion=version + get_variables(data,fontmetrics) + data.fontdimens=scan_comment(fontmetrics) + return "" + end) + return data + else + if trace_loading then + report_afm("no valid afm file %a",filename) + end + return nil + end +end +local addkerns,addligatures,addtexligatures,unify,normalize,fixnames +function afm.load(filename) + filename=resolvers.findfile(filename,'afm') or "" + if filename~="" and not fonts.names.ignoredfile(filename) then + local name=file.removesuffix(file.basename(filename)) + local data=containers.read(afm.cache,name) + local attr=lfs.attributes(filename) + local size,time=attr.size or 0,attr.modification or 0 + local pfbfile=file.replacesuffix(name,"pfb") + local pfbname=resolvers.findfile(pfbfile,"pfb") or "" + if pfbname=="" then + pfbname=resolvers.findfile(file.basename(pfbfile),"pfb") or "" + end + local pfbsize,pfbtime=0,0 + if pfbname~="" then + local attr=lfs.attributes(pfbname) + pfbsize=attr.size or 0 + pfbtime=attr.modification or 0 + end + if not data or data.size~=size or data.time~=time or data.pfbsize~=pfbsize or data.pfbtime~=pfbtime then + report_afm("reading %a",filename) + data=readafm(filename) + if data then + if pfbname~="" then + get_indexes(data,pfbname) + elseif trace_loading then + report_afm("no pfb file for %a",filename) + end + report_afm("unifying %a",filename) + unify(data,filename) + if afm.addligatures then + report_afm("add ligatures") + addligatures(data) + end + if afm.addtexligatures then + report_afm("add tex ligatures") + addtexligatures(data) + end + if afm.addkerns then + report_afm("add extra kerns") + addkerns(data) + end + normalize(data) + fixnames(data) + report_afm("add tounicode data") + fonts.mappings.addtounicode(data,filename) + data.size=size + data.time=time + data.pfbsize=pfbsize + data.pfbtime=pfbtime + report_afm("saving %a in cache",name) + data.resources.unicodes=nil + data=containers.write(afm.cache,name,data) + data=containers.read(afm.cache,name) + end + if applyruntimefixes and data then + applyruntimefixes(filename,data) + end + end + return data + else + return nil + end +end +local uparser=fonts.mappings.makenameparser() +unify=function(data,filename) + local unicodevector=fonts.encodings.agl.unicodes + local unicodes,names={},{} + local private=constructors.privateoffset + local descriptions=data.descriptions + for name,blob in next,data.characters do + local code=unicodevector[name] + if not code then + code=lpegmatch(uparser,name) + if not code then + code=private + private=private+1 + report_afm("assigning private slot %U for unknown glyph name %a",code,name) + end + end + local index=blob.index + unicodes[name]=code + names[name]=index + blob.name=name + descriptions[code]={ + boundingbox=blob.boundingbox, + width=blob.width, + kerns=blob.kerns, + index=index, + name=name, + } + end + for unicode,description in next,descriptions do + local kerns=description.kerns + if kerns then + local krn={} + for name,kern in next,kerns do + local unicode=unicodes[name] + if unicode then + krn[unicode]=kern + else + end + end + description.kerns=krn + end + end + data.characters=nil + local resources=data.resources + local filename=resources.filename or file.removesuffix(file.basename(filename)) + resources.filename=resolvers.unresolve(filename) + resources.unicodes=unicodes + resources.marks={} + resources.private=private +end +normalize=function(data) +end +fixnames=function(data) + for k,v in next,data.descriptions do + local n=v.name + local r=overloads[n] + if r then + local name=r.name + if trace_indexing then + report_afm("renaming characters %a to %a",n,name) + end + v.name=name + v.unicode=r.unicode + end + end +end +local addthem=function(rawdata,ligatures) + if ligatures then + local descriptions=rawdata.descriptions + local resources=rawdata.resources + local unicodes=resources.unicodes + for ligname,ligdata in next,ligatures do + local one=descriptions[unicodes[ligname]] + if one then + for _,pair in next,ligdata do + local two,three=unicodes[pair[1]],unicodes[pair[2]] + if two and three then + local ol=one.ligatures + if ol then + if not ol[two] then + ol[two]=three + end + else + one.ligatures={ [two]=three } + end + end + end + end + end + end +end +addligatures=function(rawdata) addthem(rawdata,afm.helpdata.ligatures ) end +addtexligatures=function(rawdata) addthem(rawdata,afm.helpdata.texligatures) end +addkerns=function(rawdata) + local descriptions=rawdata.descriptions + local resources=rawdata.resources + local unicodes=resources.unicodes + local function do_it_left(what) + if what then + for unicode,description in next,descriptions do + local kerns=description.kerns + if kerns then + local extrakerns + for complex,simple in next,what do + complex=unicodes[complex] + simple=unicodes[simple] + if complex and simple then + local ks=kerns[simple] + if ks and not kerns[complex] then + if extrakerns then + extrakerns[complex]=ks + else + extrakerns={ [complex]=ks } + end + end + end + end + if extrakerns then + description.extrakerns=extrakerns + end + end + end + end + end + local function do_it_copy(what) + if what then + for complex,simple in next,what do + complex=unicodes[complex] + simple=unicodes[simple] + if complex and simple then + local complexdescription=descriptions[complex] + if complexdescription then + local simpledescription=descriptions[complex] + if simpledescription then + local extrakerns + local kerns=simpledescription.kerns + if kerns then + for unicode,kern in next,kerns do + if extrakerns then + extrakerns[unicode]=kern + else + extrakerns={ [unicode]=kern } + end + end + end + local extrakerns=simpledescription.extrakerns + if extrakerns then + for unicode,kern in next,extrakerns do + if extrakerns then + extrakerns[unicode]=kern + else + extrakerns={ [unicode]=kern } + end + end + end + if extrakerns then + complexdescription.extrakerns=extrakerns + end + end + end + end + end + end + end + do_it_left(afm.helpdata.leftkerned) + do_it_left(afm.helpdata.bothkerned) + do_it_copy(afm.helpdata.bothkerned) + do_it_copy(afm.helpdata.rightkerned) +end +local function adddimensions(data) + if data then + for unicode,description in next,data.descriptions do + local bb=description.boundingbox + if bb then + local ht,dp=bb[4],-bb[2] + if ht==0 or ht<0 then + else + description.height=ht + end + if dp==0 or dp<0 then + else + description.depth=dp + end + end + end + end +end +local function copytotfm(data) + if data and data.descriptions then + local metadata=data.metadata + local resources=data.resources + local properties=derivetable(data.properties) + local descriptions=derivetable(data.descriptions) + local goodies=derivetable(data.goodies) + local characters={} + local parameters={} + local unicodes=resources.unicodes + for unicode,description in next,data.descriptions do + characters[unicode]={} + end + local filename=constructors.checkedfilename(resources) + local fontname=metadata.fontname or metadata.fullname + local fullname=metadata.fullname or metadata.fontname + local endash=0x0020 + local emdash=0x2014 + local spacer="space" + local spaceunits=500 + local monospaced=metadata.isfixedpitch + local charwidth=metadata.charwidth + local italicangle=metadata.italicangle + local charxheight=metadata.xheight and metadata.xheight>0 and metadata.xheight + properties.monospaced=monospaced + parameters.italicangle=italicangle + parameters.charwidth=charwidth + parameters.charxheight=charxheight + if properties.monospaced then + if descriptions[endash] then + spaceunits,spacer=descriptions[endash].width,"space" + end + if not spaceunits and descriptions[emdash] then + spaceunits,spacer=descriptions[emdash].width,"emdash" + end + if not spaceunits and charwidth then + spaceunits,spacer=charwidth,"charwidth" + end + else + if descriptions[endash] then + spaceunits,spacer=descriptions[endash].width,"space" + end + if not spaceunits and charwidth then + spaceunits,spacer=charwidth,"charwidth" + end + end + spaceunits=tonumber(spaceunits) + if spaceunits<200 then + end + parameters.slant=0 + parameters.space=spaceunits + parameters.space_stretch=500 + parameters.space_shrink=333 + parameters.x_height=400 + parameters.quad=1000 + if italicangle and italicangle~=0 then + parameters.italicangle=italicangle + parameters.italicfactor=math.cos(math.rad(90+italicangle)) + parameters.slant=- math.tan(italicangle*math.pi/180) + end + if monospaced then + parameters.space_stretch=0 + parameters.space_shrink=0 + elseif afm.syncspace then + parameters.space_stretch=spaceunits/2 + parameters.space_shrink=spaceunits/3 + end + parameters.extra_space=parameters.space_shrink + if charxheight then + parameters.x_height=charxheight + else + local x=0x0078 + if x then + local x=descriptions[x] + if x then + parameters.x_height=x.height + end + end + end + local fd=data.fontdimens + if fd and fd[8] and fd[9] and fd[10] then + for k,v in next,fd do + parameters[k]=v + end + end + parameters.designsize=(metadata.designsize or 10)*65536 + parameters.ascender=abs(metadata.ascender or 0) + parameters.descender=abs(metadata.descender or 0) + parameters.units=1000 + properties.spacer=spacer + properties.encodingbytes=2 + properties.format=fonts.formats[filename] or "type1" + properties.filename=filename + properties.fontname=fontname + properties.fullname=fullname + properties.psname=fullname + properties.name=filename or fullname or fontname + if next(characters) then + return { + characters=characters, + descriptions=descriptions, + parameters=parameters, + resources=resources, + properties=properties, + goodies=goodies, + } + end + end + return nil +end +function afm.setfeatures(tfmdata,features) + local okay=constructors.initializefeatures("afm",tfmdata,features,trace_features,report_afm) + if okay then + return constructors.collectprocessors("afm",tfmdata,features,trace_features,report_afm) + else + return {} + end +end +local function addtables(data) + local resources=data.resources + local lookuptags=resources.lookuptags + local unicodes=resources.unicodes + if not lookuptags then + lookuptags={} + resources.lookuptags=lookuptags + end + setmetatableindex(lookuptags,function(t,k) + local v=type(k)=="number" and ("lookup "..k) or k + t[k]=v + return v + end) + if not unicodes then + unicodes={} + resources.unicodes=unicodes + setmetatableindex(unicodes,function(t,k) + setmetatableindex(unicodes,nil) + for u,d in next,data.descriptions do + local n=d.name + if n then + t[n]=u + end + end + return rawget(t,k) + end) + end + constructors.addcoreunicodes(unicodes) +end +local function afmtotfm(specification) + local afmname=specification.filename or specification.name + if specification.forced=="afm" or specification.format=="afm" then + if trace_loading then + report_afm("forcing afm format for %a",afmname) + end + else + local tfmname=findbinfile(afmname,"ofm") or "" + if tfmname~="" then + if trace_loading then + report_afm("fallback from afm to tfm for %a",afmname) + end + return + end + end + if afmname~="" then + local features=constructors.checkedfeatures("afm",specification.features.normal) + specification.features.normal=features + constructors.hashinstance(specification,true) + specification=definers.resolve(specification) + local cache_id=specification.hash + local tfmdata=containers.read(constructors.cache,cache_id) + if not tfmdata then + local rawdata=afm.load(afmname) + if rawdata and next(rawdata) then + addtables(rawdata) + adddimensions(rawdata) + tfmdata=copytotfm(rawdata) + if tfmdata and next(tfmdata) then + local shared=tfmdata.shared + if not shared then + shared={} + tfmdata.shared=shared + end + shared.rawdata=rawdata + shared.features=features + shared.processes=afm.setfeatures(tfmdata,features) + end + elseif trace_loading then + report_afm("no (valid) afm file found with name %a",afmname) + end + tfmdata=containers.write(constructors.cache,cache_id,tfmdata) + end + return tfmdata + end +end +local function read_from_afm(specification) + local tfmdata=afmtotfm(specification) + if tfmdata then + tfmdata.properties.name=specification.name + tfmdata=constructors.scale(tfmdata,specification) + local allfeatures=tfmdata.shared.features or specification.features.normal + constructors.applymanipulators("afm",tfmdata,allfeatures,trace_features,report_afm) + fonts.loggers.register(tfmdata,'afm',specification) + end + return tfmdata +end +local function prepareligatures(tfmdata,ligatures,value) + if value then + local descriptions=tfmdata.descriptions + local hasligatures=false + for unicode,character in next,tfmdata.characters do + local description=descriptions[unicode] + local dligatures=description.ligatures + if dligatures then + local cligatures=character.ligatures + if not cligatures then + cligatures={} + character.ligatures=cligatures + end + for unicode,ligature in next,dligatures do + cligatures[unicode]={ + char=ligature, + type=0 + } + end + hasligatures=true + end + end + tfmdata.properties.hasligatures=hasligatures + end +end +local function preparekerns(tfmdata,kerns,value) + if value then + local rawdata=tfmdata.shared.rawdata + local resources=rawdata.resources + local unicodes=resources.unicodes + local descriptions=tfmdata.descriptions + local haskerns=false + for u,chr in next,tfmdata.characters do + local d=descriptions[u] + local newkerns=d[kerns] + if newkerns then + local kerns=chr.kerns + if not kerns then + kerns={} + chr.kerns=kerns + end + for k,v in next,newkerns do + local uk=unicodes[k] + if uk then + kerns[uk]=v + end + end + haskerns=true + end + end + tfmdata.properties.haskerns=haskerns + end +end +local list={ + [0x0027]=0x2019, +} +local function texreplacements(tfmdata,value) + local descriptions=tfmdata.descriptions + local characters=tfmdata.characters + for k,v in next,list do + characters [k]=characters [v] + descriptions[k]=descriptions[v] + end +end +local function ligatures (tfmdata,value) prepareligatures(tfmdata,'ligatures',value) end +local function texligatures(tfmdata,value) prepareligatures(tfmdata,'texligatures',value) end +local function kerns (tfmdata,value) preparekerns (tfmdata,'kerns',value) end +local function extrakerns (tfmdata,value) preparekerns (tfmdata,'extrakerns',value) end +registerafmfeature { + name="liga", + description="traditional ligatures", + initializers={ + base=ligatures, + node=ligatures, + } +} +registerafmfeature { + name="kern", + description="intercharacter kerning", + initializers={ + base=kerns, + node=kerns, + } +} +registerafmfeature { + name="extrakerns", + description="additional intercharacter kerning", + initializers={ + base=extrakerns, + node=extrakerns, + } +} +registerafmfeature { + name='tlig', + description='tex ligatures', + initializers={ + base=texligatures, + node=texligatures, + } +} +registerafmfeature { + name='trep', + description='tex replacements', + initializers={ + base=texreplacements, + node=texreplacements, + } +} +local check_tfm=readers.check_tfm +fonts.formats.afm="type1" +fonts.formats.pfb="type1" +local function check_afm(specification,fullname) + local foundname=findbinfile(fullname,'afm') or "" + if foundname=="" then + foundname=fonts.names.getfilename(fullname,"afm") or "" + end + if foundname=="" and afm.autoprefixed then + local encoding,shortname=match(fullname,"^(.-)%-(.*)$") + if encoding and shortname and fonts.encodings.known[encoding] then + shortname=findbinfile(shortname,'afm') or "" + if shortname~="" then + foundname=shortname + if trace_defining then + report_afm("stripping encoding prefix from filename %a",afmname) + end + end + end + end + if foundname~="" then + specification.filename=foundname + specification.format="afm" + return read_from_afm(specification) + end +end +function readers.afm(specification,method) + local fullname,tfmdata=specification.filename or "",nil + if fullname=="" then + local forced=specification.forced or "" + if forced~="" then + tfmdata=check_afm(specification,specification.name.."."..forced) + end + if not tfmdata then + method=method or definers.method or "afm or tfm" + if method=="tfm" then + tfmdata=check_tfm(specification,specification.name) + elseif method=="afm" then + tfmdata=check_afm(specification,specification.name) + elseif method=="tfm or afm" then + tfmdata=check_tfm(specification,specification.name) or check_afm(specification,specification.name) + else + tfmdata=check_afm(specification,specification.name) or check_tfm(specification,specification.name) + end + end + else + tfmdata=check_afm(specification,fullname) + end + return tfmdata +end +function readers.pfb(specification,method) + local original=specification.specification + if trace_defining then + report_afm("using afm reader for %a",original) + end + specification.specification=gsub(original,"%.pfb",".afm") + specification.forced="afm" + return readers.afm(specification,method) +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-afk']={ + version=1.001, + comment="companion to font-afm.lua", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files", + dataonly=true, +} +local allocate=utilities.storage.allocate +fonts.handlers.afm.helpdata={ + ligatures=allocate { + ['f']={ + { 'f','ff' }, + { 'i','fi' }, + { 'l','fl' }, + }, + ['ff']={ + { 'i','ffi' } + }, + ['fi']={ + { 'i','fii' } + }, + ['fl']={ + { 'i','fli' } + }, + ['s']={ + { 't','st' } + }, + ['i']={ + { 'j','ij' } + }, + }, + texligatures=allocate { + ['quoteleft']={ + { 'quoteleft','quotedblleft' } + }, + ['quoteright']={ + { 'quoteright','quotedblright' } + }, + ['hyphen']={ + { 'hyphen','endash' } + }, + ['endash']={ + { 'hyphen','emdash' } + } + }, + leftkerned=allocate { + AEligature="A",aeligature="a", + OEligature="O",oeligature="o", + IJligature="I",ijligature="i", + AE="A",ae="a", + OE="O",oe="o", + IJ="I",ij="i", + Ssharp="S",ssharp="s", + }, + rightkerned=allocate { + AEligature="E",aeligature="e", + OEligature="E",oeligature="e", + IJligature="J",ijligature="j", + AE="E",ae="e", + OE="E",oe="e", + IJ="J",ij="j", + Ssharp="S",ssharp="s", + }, + bothkerned=allocate { + Acircumflex="A",acircumflex="a", + Ccircumflex="C",ccircumflex="c", + Ecircumflex="E",ecircumflex="e", + Gcircumflex="G",gcircumflex="g", + Hcircumflex="H",hcircumflex="h", + Icircumflex="I",icircumflex="i", + Jcircumflex="J",jcircumflex="j", + Ocircumflex="O",ocircumflex="o", + Scircumflex="S",scircumflex="s", + Ucircumflex="U",ucircumflex="u", + Wcircumflex="W",wcircumflex="w", + Ycircumflex="Y",ycircumflex="y", + Agrave="A",agrave="a", + Egrave="E",egrave="e", + Igrave="I",igrave="i", + Ograve="O",ograve="o", + Ugrave="U",ugrave="u", + Ygrave="Y",ygrave="y", + Atilde="A",atilde="a", + Itilde="I",itilde="i", + Otilde="O",otilde="o", + Utilde="U",utilde="u", + Ntilde="N",ntilde="n", + Adiaeresis="A",adiaeresis="a",Adieresis="A",adieresis="a", + Ediaeresis="E",ediaeresis="e",Edieresis="E",edieresis="e", + Idiaeresis="I",idiaeresis="i",Idieresis="I",idieresis="i", + Odiaeresis="O",odiaeresis="o",Odieresis="O",odieresis="o", + Udiaeresis="U",udiaeresis="u",Udieresis="U",udieresis="u", + Ydiaeresis="Y",ydiaeresis="y",Ydieresis="Y",ydieresis="y", + Aacute="A",aacute="a", + Cacute="C",cacute="c", + Eacute="E",eacute="e", + Iacute="I",iacute="i", + Lacute="L",lacute="l", + Nacute="N",nacute="n", + Oacute="O",oacute="o", + Racute="R",racute="r", + Sacute="S",sacute="s", + Uacute="U",uacute="u", + Yacute="Y",yacute="y", + Zacute="Z",zacute="z", + Dstroke="D",dstroke="d", + Hstroke="H",hstroke="h", + Tstroke="T",tstroke="t", + Cdotaccent="C",cdotaccent="c", + Edotaccent="E",edotaccent="e", + Gdotaccent="G",gdotaccent="g", + Idotaccent="I",idotaccent="i", + Zdotaccent="Z",zdotaccent="z", + Amacron="A",amacron="a", + Emacron="E",emacron="e", + Imacron="I",imacron="i", + Omacron="O",omacron="o", + Umacron="U",umacron="u", + Ccedilla="C",ccedilla="c", + Kcedilla="K",kcedilla="k", + Lcedilla="L",lcedilla="l", + Ncedilla="N",ncedilla="n", + Rcedilla="R",rcedilla="r", + Scedilla="S",scedilla="s", + Tcedilla="T",tcedilla="t", + Ohungarumlaut="O",ohungarumlaut="o", + Uhungarumlaut="U",uhungarumlaut="u", + Aogonek="A",aogonek="a", + Eogonek="E",eogonek="e", + Iogonek="I",iogonek="i", + Uogonek="U",uogonek="u", + Aring="A",aring="a", + Uring="U",uring="u", + Abreve="A",abreve="a", + Ebreve="E",ebreve="e", + Gbreve="G",gbreve="g", + Ibreve="I",ibreve="i", + Obreve="O",obreve="o", + Ubreve="U",ubreve="u", + Ccaron="C",ccaron="c", + Dcaron="D",dcaron="d", + Ecaron="E",ecaron="e", + Lcaron="L",lcaron="l", + Ncaron="N",ncaron="n", + Rcaron="R",rcaron="r", + Scaron="S",scaron="s", + Tcaron="T",tcaron="t", + Zcaron="Z",zcaron="z", + dotlessI="I",dotlessi="i", + dotlessJ="J",dotlessj="j", + AEligature="AE",aeligature="ae",AE="AE",ae="ae", + OEligature="OE",oeligature="oe",OE="OE",oe="oe", + IJligature="IJ",ijligature="ij",IJ="IJ",ij="ij", + Lstroke="L",lstroke="l",Lslash="L",lslash="l", + Ostroke="O",ostroke="o",Oslash="O",oslash="o", + Ssharp="SS",ssharp="ss", + Aumlaut="A",aumlaut="a", + Eumlaut="E",eumlaut="e", + Iumlaut="I",iumlaut="i", + Oumlaut="O",oumlaut="o", + Uumlaut="U",uumlaut="u", + } +} + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['luatex-fonts-tfm']={ + version=1.001, + comment="companion to luatex-*.tex", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end +local fonts=fonts +local tfm={} +fonts.handlers.tfm=tfm +fonts.formats.tfm="type1" +function fonts.readers.tfm(specification) + local fullname=specification.filename or "" + if fullname=="" then + local forced=specification.forced or "" + if forced~="" then + fullname=specification.name.."."..forced + else + fullname=specification.name + end + end + local foundname=resolvers.findbinfile(fullname,'tfm') or "" + if foundname=="" then + foundname=resolvers.findbinfile(fullname,'ofm') or "" + end + if foundname~="" then + specification.filename=foundname + specification.format="ofm" + return font.read_tfm(specification.filename,specification.size) + end +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-oti']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local lower=string.lower +local fonts=fonts +local constructors=fonts.constructors +local otf=constructors.newhandler("otf") +local otffeatures=constructors.newfeatures("otf") +local otftables=otf.tables +local registerotffeature=otffeatures.register +local allocate=utilities.storage.allocate +registerotffeature { + name="features", + description="initialization of feature handler", + default=true, +} +local function setmode(tfmdata,value) + if value then + tfmdata.properties.mode=lower(value) + end +end +local function setlanguage(tfmdata,value) + if value then + local cleanvalue=lower(value) + local languages=otftables and otftables.languages + local properties=tfmdata.properties + if not languages then + properties.language=cleanvalue + elseif languages[value] then + properties.language=cleanvalue + else + properties.language="dflt" + end + end +end +local function setscript(tfmdata,value) + if value then + local cleanvalue=lower(value) + local scripts=otftables and otftables.scripts + local properties=tfmdata.properties + if not scripts then + properties.script=cleanvalue + elseif scripts[value] then + properties.script=cleanvalue + else + properties.script="dflt" + end + end +end +registerotffeature { + name="mode", + description="mode", + initializers={ + base=setmode, + node=setmode, + } +} +registerotffeature { + name="language", + description="language", + initializers={ + base=setlanguage, + node=setlanguage, + } +} +registerotffeature { + name="script", + description="script", + initializers={ + base=setscript, + node=setscript, + } +} + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-otf']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local utfbyte=utf.byte +local format,gmatch,gsub,find,match,lower,strip=string.format,string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip +local type,next,tonumber,tostring=type,next,tonumber,tostring +local abs=math.abs +local insert=table.insert +local lpegmatch=lpeg.match +local reversed,concat,remove,sortedkeys=table.reversed,table.concat,table.remove,table.sortedkeys +local ioflush=io.flush +local fastcopy,tohash,derivetable=table.fastcopy,table.tohash,table.derive +local formatters=string.formatters +local P,R,S,C,Ct,lpegmatch=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Ct,lpeg.match +local setmetatableindex=table.setmetatableindex +local allocate=utilities.storage.allocate +local registertracker=trackers.register +local registerdirective=directives.register +local starttiming=statistics.starttiming +local stoptiming=statistics.stoptiming +local elapsedtime=statistics.elapsedtime +local findbinfile=resolvers.findbinfile +local trace_private=false registertracker("otf.private",function(v) trace_private=v end) +local trace_loading=false registertracker("otf.loading",function(v) trace_loading=v end) +local trace_features=false registertracker("otf.features",function(v) trace_features=v end) +local trace_dynamics=false registertracker("otf.dynamics",function(v) trace_dynamics=v end) +local trace_sequences=false registertracker("otf.sequences",function(v) trace_sequences=v end) +local trace_markwidth=false registertracker("otf.markwidth",function(v) trace_markwidth=v end) +local trace_defining=false registertracker("fonts.defining",function(v) trace_defining=v end) +local compact_lookups=true registertracker("otf.compactlookups",function(v) compact_lookups=v end) +local purge_names=true registertracker("otf.purgenames",function(v) purge_names=v end) +local report_otf=logs.reporter("fonts","otf loading") +local fonts=fonts +local otf=fonts.handlers.otf +otf.glists={ "gsub","gpos" } +otf.version=2.802 +otf.cache=containers.define("fonts","otf",otf.version,true) +local fontdata=fonts.hashes.identifiers +local chardata=characters and characters.data +local definers=fonts.definers +local readers=fonts.readers +local constructors=fonts.constructors +local otffeatures=constructors.newfeatures("otf") +local registerotffeature=otffeatures.register +local enhancers=allocate() +otf.enhancers=enhancers +local patches={} +enhancers.patches=patches +local forceload=false +local cleanup=0 +local packdata=true +local syncspace=true +local forcenotdef=false +local includesubfonts=false +local overloadkerns=false +local applyruntimefixes=fonts.treatments and fonts.treatments.applyfixes +local wildcard="*" +local default="dflt" +local fontloaderfields=fontloader.fields +local mainfields=nil +local glyphfields=nil +local formats=fonts.formats +formats.otf="opentype" +formats.ttf="truetype" +formats.ttc="truetype" +formats.dfont="truetype" +registerdirective("fonts.otf.loader.cleanup",function(v) cleanup=tonumber(v) or (v and 1) or 0 end) +registerdirective("fonts.otf.loader.force",function(v) forceload=v end) +registerdirective("fonts.otf.loader.pack",function(v) packdata=v end) +registerdirective("fonts.otf.loader.syncspace",function(v) syncspace=v end) +registerdirective("fonts.otf.loader.forcenotdef",function(v) forcenotdef=v end) +registerdirective("fonts.otf.loader.overloadkerns",function(v) overloadkerns=v end) +function otf.fileformat(filename) + local leader=lower(io.loadchunk(filename,4)) + local suffix=lower(file.suffix(filename)) + if leader=="otto" then + return formats.otf,suffix=="otf" + elseif leader=="ttcf" then + return formats.ttc,suffix=="ttc" + elseif suffix=="ttc" then + return formats.ttc,true + elseif suffix=="dfont" then + return formats.dfont,true + else + return formats.ttf,suffix=="ttf" + end +end +local function otf_format(filename) + local format,okay=otf.fileformat(filename) + if not okay then + report_otf("font %a is actually an %a file",filename,format) + end + return format +end +local function load_featurefile(raw,featurefile) + if featurefile and featurefile~="" then + if trace_loading then + report_otf("using featurefile %a",featurefile) + end + fontloader.apply_featurefile(raw,featurefile) + end +end +local function showfeatureorder(rawdata,filename) + local sequences=rawdata.resources.sequences + if sequences and #sequences>0 then + if trace_loading then + report_otf("font %a has %s sequences",filename,#sequences) + report_otf(" ") + end + for nos=1,#sequences do + local sequence=sequences[nos] + local typ=sequence.type or "no-type" + local name=sequence.name or "no-name" + local subtables=sequence.subtables or { "no-subtables" } + local features=sequence.features + if trace_loading then + report_otf("%3i %-15s %-20s [% t]",nos,name,typ,subtables) + end + if features then + for feature,scripts in next,features do + local tt={} + if type(scripts)=="table" then + for script,languages in next,scripts do + local ttt={} + for language,_ in next,languages do + ttt[#ttt+1]=language + end + tt[#tt+1]=formatters["[%s: % t]"](script,ttt) + end + if trace_loading then + report_otf(" %s: % t",feature,tt) + end + else + if trace_loading then + report_otf(" %s: %S",feature,scripts) + end + end + end + end + end + if trace_loading then + report_otf("\n") + end + elseif trace_loading then + report_otf("font %a has no sequences",filename) + end +end +local valid_fields=table.tohash { + "ascent", + "cidinfo", + "copyright", + "descent", + "design_range_bottom", + "design_range_top", + "design_size", + "encodingchanged", + "extrema_bound", + "familyname", + "fontname", + "fontstyle_id", + "fontstyle_name", + "fullname", + "hasvmetrics", + "horiz_base", + "issans", + "isserif", + "italicangle", + "macstyle", + "onlybitmaps", + "origname", + "os2_version", + "pfminfo", + "serifcheck", + "sfd_version", + "strokedfont", + "strokewidth", + "table_version", + "ttf_tables", + "uni_interp", + "uniqueid", + "units_per_em", + "upos", + "use_typo_metrics", + "uwidth", + "validation_state", + "version", + "vert_base", + "weight", + "weight_width_slope_only", +} +local ordered_enhancers={ + "prepare tables", + "prepare glyphs", + "prepare lookups", + "analyze glyphs", + "analyze math", + "reorganize lookups", + "reorganize mark classes", + "reorganize anchor classes", + "reorganize glyph kerns", + "reorganize glyph lookups", + "reorganize glyph anchors", + "merge kern classes", + "reorganize features", + "reorganize subtables", + "check glyphs", + "check metadata", + "check extra features", + "prepare tounicode", + "check encoding", + "add duplicates", + "cleanup tables", + "compact lookups", + "purge names", +} +local actions=allocate() +local before=allocate() +local after=allocate() +patches.before=before +patches.after=after +local function enhance(name,data,filename,raw) + local enhancer=actions[name] + if enhancer then + if trace_loading then + report_otf("apply enhancement %a to file %a",name,filename) + ioflush() + end + enhancer(data,filename,raw) + else + end +end +function enhancers.apply(data,filename,raw) + local basename=file.basename(lower(filename)) + if trace_loading then + report_otf("%s enhancing file %a","start",filename) + end + ioflush() + for e=1,#ordered_enhancers do + local enhancer=ordered_enhancers[e] + local b=before[enhancer] + if b then + for pattern,action in next,b do + if find(basename,pattern) then + action(data,filename,raw) + end + end + end + enhance(enhancer,data,filename,raw) + local a=after[enhancer] + if a then + for pattern,action in next,a do + if find(basename,pattern) then + action(data,filename,raw) + end + end + end + ioflush() + end + if trace_loading then + report_otf("%s enhancing file %a","stop",filename) + end + ioflush() +end +function patches.register(what,where,pattern,action) + local pw=patches[what] + if pw then + local ww=pw[where] + if ww then + ww[pattern]=action + else + pw[where]={ [pattern]=action} + end + end +end +function patches.report(fmt,...) + if trace_loading then + report_otf("patching: %s",formatters[fmt](...)) + end +end +function enhancers.register(what,action) + actions[what]=action +end +function otf.load(filename,sub,featurefile) + local base=file.basename(file.removesuffix(filename)) + local name=file.removesuffix(base) + local attr=lfs.attributes(filename) + local size=attr and attr.size or 0 + local time=attr and attr.modification or 0 + if featurefile then + name=name.."@"..file.removesuffix(file.basename(featurefile)) + end + if sub=="" then + sub=false + end + local hash=name + if sub then + hash=hash.."-"..sub + end + hash=containers.cleanname(hash) + local featurefiles + if featurefile then + featurefiles={} + for s in gmatch(featurefile,"[^,]+") do + local name=resolvers.findfile(file.addsuffix(s,'fea'),'fea') or "" + if name=="" then + report_otf("loading error, no featurefile %a",s) + else + local attr=lfs.attributes(name) + featurefiles[#featurefiles+1]={ + name=name, + size=attr and attr.size or 0, + time=attr and attr.modification or 0, + } + end + end + if #featurefiles==0 then + featurefiles=nil + end + end + local data=containers.read(otf.cache,hash) + local reload=not data or data.size~=size or data.time~=time + if forceload then + report_otf("forced reload of %a due to hard coded flag",filename) + reload=true + end + if not reload then + local featuredata=data.featuredata + if featurefiles then + if not featuredata or #featuredata~=#featurefiles then + reload=true + else + for i=1,#featurefiles do + local fi,fd=featurefiles[i],featuredata[i] + if fi.name~=fd.name or fi.size~=fd.size or fi.time~=fd.time then + reload=true + break + end + end + end + elseif featuredata then + reload=true + end + if reload then + report_otf("loading: forced reload due to changed featurefile specification %a",featurefile) + end + end + if reload then + report_otf("loading %a, hash %a",filename,hash) + local fontdata,messages + if sub then + fontdata,messages=fontloader.open(filename,sub) + else + fontdata,messages=fontloader.open(filename) + end + if fontdata then + mainfields=mainfields or (fontloaderfields and fontloaderfields(fontdata)) + end + if trace_loading and messages and #messages>0 then + if type(messages)=="string" then + report_otf("warning: %s",messages) + else + for m=1,#messages do + report_otf("warning: %S",messages[m]) + end + end + else + report_otf("loading done") + end + if fontdata then + if featurefiles then + for i=1,#featurefiles do + load_featurefile(fontdata,featurefiles[i].name) + end + end + local unicodes={ + } + local splitter=lpeg.splitter(" ",unicodes) + data={ + size=size, + time=time, + format=otf_format(filename), + featuredata=featurefiles, + resources={ + filename=resolvers.unresolve(filename), + version=otf.version, + creator="context mkiv", + unicodes=unicodes, + indices={ + }, + duplicates={ + }, + variants={ + }, + lookuptypes={}, + }, + warnings={}, + metadata={ + }, + properties={ + }, + descriptions={}, + goodies={}, + helpers={ + tounicodelist=splitter, + tounicodetable=Ct(splitter), + }, + } + starttiming(data) + report_otf("file size: %s",size) + enhancers.apply(data,filename,fontdata) + local packtime={} + if packdata then + if cleanup>0 then + collectgarbage("collect") + end + starttiming(packtime) + enhance("pack",data,filename,nil) + stoptiming(packtime) + end + report_otf("saving %a in cache",filename) + data=containers.write(otf.cache,hash,data) + if cleanup>1 then + collectgarbage("collect") + end + stoptiming(data) + if elapsedtime then + report_otf("preprocessing and caching time %s, packtime %s", + elapsedtime(data),packdata and elapsedtime(packtime) or 0) + end + fontloader.close(fontdata) + if cleanup>3 then + collectgarbage("collect") + end + data=containers.read(otf.cache,hash) + if cleanup>2 then + collectgarbage("collect") + end + else + data=nil + report_otf("loading failed due to read error") + end + end + if data then + if trace_defining then + report_otf("loading from cache using hash %a",hash) + end + enhance("unpack",data,filename,nil,false) + local resources=data.resources + local lookuptags=resources.lookuptags + local unicodes=resources.unicodes + if not lookuptags then + lookuptags={} + resources.lookuptags=lookuptags + end + setmetatableindex(lookuptags,function(t,k) + local v=type(k)=="number" and ("lookup "..k) or k + t[k]=v + return v + end) + if not unicodes then + unicodes={} + resources.unicodes=unicodes + setmetatableindex(unicodes,function(t,k) + setmetatableindex(unicodes,nil) + for u,d in next,data.descriptions do + local n=d.name + if n then + t[n]=u + else + end + end + return rawget(t,k) + end) + end + constructors.addcoreunicodes(unicodes) + if applyruntimefixes then + applyruntimefixes(filename,data) + end + enhance("add dimensions",data,filename,nil,false) + if trace_sequences then + showfeatureorder(data,filename) + end + end + return data +end +local mt={ + __index=function(t,k) + if k=="height" then + local ht=t.boundingbox[4] + return ht<0 and 0 or ht + elseif k=="depth" then + local dp=-t.boundingbox[2] + return dp<0 and 0 or dp + elseif k=="width" then + return 0 + elseif k=="name" then + return forcenotdef and ".notdef" + end + end +} +actions["prepare tables"]=function(data,filename,raw) + data.properties.hasitalics=false +end +actions["add dimensions"]=function(data,filename) + if data then + local descriptions=data.descriptions + local resources=data.resources + local defaultwidth=resources.defaultwidth or 0 + local defaultheight=resources.defaultheight or 0 + local defaultdepth=resources.defaultdepth or 0 + local basename=trace_markwidth and file.basename(filename) + for _,d in next,descriptions do + local bb,wd=d.boundingbox,d.width + if not wd then + d.width=defaultwidth + elseif trace_markwidth and wd~=0 and d.class=="mark" then + report_otf("mark %a with width %b found in %a",d.name or "",wd,basename) + end + if bb then + local ht,dp=bb[4],-bb[2] + if ht==0 or ht<0 then + else + d.height=ht + end + if dp==0 or dp<0 then + else + d.depth=dp + end + end + end + end +end +local function somecopy(old) + if old then + local new={} + if type(old)=="table" then + for k,v in next,old do + if k=="glyphs" then + elseif type(v)=="table" then + new[k]=somecopy(v) + else + new[k]=v + end + end + else + for i=1,#mainfields do + local k=mainfields[i] + local v=old[k] + if k=="glyphs" then + elseif type(v)=="table" then + new[k]=somecopy(v) + else + new[k]=v + end + end + end + return new + else + return {} + end +end +actions["prepare glyphs"]=function(data,filename,raw) + local rawglyphs=raw.glyphs + local rawsubfonts=raw.subfonts + local rawcidinfo=raw.cidinfo + local criterium=constructors.privateoffset + local private=criterium + local resources=data.resources + local metadata=data.metadata + local properties=data.properties + local descriptions=data.descriptions + local unicodes=resources.unicodes + local indices=resources.indices + local duplicates=resources.duplicates + local variants=resources.variants + if rawsubfonts then + metadata.subfonts=includesubfonts and {} + properties.cidinfo=rawcidinfo + if rawcidinfo.registry then + local cidmap=fonts.cid.getmap(rawcidinfo) + if cidmap then + rawcidinfo.usedname=cidmap.usedname + local nofnames,nofunicodes=0,0 + local cidunicodes,cidnames=cidmap.unicodes,cidmap.names + for cidindex=1,#rawsubfonts do + local subfont=rawsubfonts[cidindex] + local cidglyphs=subfont.glyphs + if includesubfonts then + metadata.subfonts[cidindex]=somecopy(subfont) + end + for index=0,subfont.glyphcnt-1 do + local glyph=cidglyphs[index] + if glyph then + local unicode=glyph.unicode + if unicode>=0x00E000 and unicode<=0x00F8FF then + unicode=-1 + elseif unicode>=0x0F0000 and unicode<=0x0FFFFD then + unicode=-1 + elseif unicode>=0x100000 and unicode<=0x10FFFD then + unicode=-1 + end + local name=glyph.name or cidnames[index] + if not unicode or unicode==-1 then + unicode=cidunicodes[index] + end + if unicode and descriptions[unicode] then + if trace_private then + report_otf("preventing glyph %a at index %H to overload unicode %U",name or "noname",index,unicode) + end + unicode=-1 + end + if not unicode or unicode==-1 then + if not name then + name=format("u%06X.ctx",private) + end + unicode=private + unicodes[name]=private + if trace_private then + report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private) + end + private=private+1 + nofnames=nofnames+1 + else + if not name then + name=format("u%06X.ctx",unicode) + end + unicodes[name]=unicode + nofunicodes=nofunicodes+1 + end + indices[index]=unicode + local description={ + boundingbox=glyph.boundingbox, + name=glyph.name or name or "unknown", + cidindex=cidindex, + index=index, + glyph=glyph, + } + descriptions[unicode]=description + else + end + end + end + if trace_loading then + report_otf("cid font remapped, %s unicode points, %s symbolic names, %s glyphs",nofunicodes,nofnames,nofunicodes+nofnames) + end + elseif trace_loading then + report_otf("unable to remap cid font, missing cid file for %a",filename) + end + elseif trace_loading then + report_otf("font %a has no glyphs",filename) + end + else + for index=0,raw.glyphcnt-1 do + local glyph=rawglyphs[index] + if glyph then + local unicode=glyph.unicode + local name=glyph.name + if not unicode or unicode==-1 then + unicode=private + unicodes[name]=private + if trace_private then + report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private) + end + private=private+1 + else + if unicode>criterium then + local taken=descriptions[unicode] + if taken then + if unicode>=private then + private=unicode+1 + else + private=private+1 + end + descriptions[private]=taken + unicodes[taken.name]=private + indices[taken.index]=private + if trace_private then + report_otf("slot %U is moved to %U due to private in font",unicode) + end + else + if unicode>=private then + private=unicode+1 + end + end + end + unicodes[name]=unicode + end + indices[index]=unicode + descriptions[unicode]={ + boundingbox=glyph.boundingbox, + name=name, + index=index, + glyph=glyph, + } + local altuni=glyph.altuni + if altuni then + for i=1,#altuni do + local a=altuni[i] + local u=a.unicode + local v=a.variant + if v then + local vv=variants[v] + if vv then + vv[u]=unicode + else + vv={ [u]=unicode } + variants[v]=vv + end + end + end + end + else + report_otf("potential problem: glyph %U is used but empty",index) + end + end + end + resources.private=private +end +actions["check encoding"]=function(data,filename,raw) + local descriptions=data.descriptions + local resources=data.resources + local properties=data.properties + local unicodes=resources.unicodes + local indices=resources.indices + local duplicates=resources.duplicates + local mapdata=raw.map or {} + local unicodetoindex=mapdata and mapdata.map or {} + local indextounicode=mapdata and mapdata.backmap or {} + local encname=lower(data.enc_name or mapdata.enc_name or "") + local criterium=0xFFFF + local privateoffset=constructors.privateoffset + if find(encname,"unicode") then + if trace_loading then + report_otf("checking embedded unicode map %a",encname) + end + local reported={} + for maybeunicode,index in next,unicodetoindex do + if descriptions[maybeunicode] then + else + local unicode=indices[index] + if not unicode then + elseif maybeunicode==unicode then + elseif unicode>privateoffset then + else + local d=descriptions[unicode] + if d then + local c=d.copies + if c then + c[maybeunicode]=true + else + d.copies={ [maybeunicode]=true } + end + elseif index and not reported[index] then + report_otf("missing index %i",index) + reported[index]=true + end + end + end + end + for unicode,data in next,descriptions do + local d=data.copies + if d then + duplicates[unicode]=sortedkeys(d) + data.copies=nil + end + end + elseif properties.cidinfo then + report_otf("warning: no unicode map, used cidmap %a",properties.cidinfo.usedname) + else + report_otf("warning: non unicode map %a, only using glyph unicode data",encname or "whatever") + end + if mapdata then + mapdata.map={} + mapdata.backmap={} + end +end +actions["add duplicates"]=function(data,filename,raw) + local descriptions=data.descriptions + local resources=data.resources + local properties=data.properties + local unicodes=resources.unicodes + local indices=resources.indices + local duplicates=resources.duplicates + for unicode,d in next,duplicates do + local nofduplicates=#d + if nofduplicates>4 then + if trace_loading then + report_otf("ignoring excessive duplicates of %U (n=%s)",unicode,nofduplicates) + end + else + for i=1,nofduplicates do + local u=d[i] + if not descriptions[u] then + local description=descriptions[unicode] + local n=0 + for _,description in next,descriptions do + if kerns then + local kerns=description.kerns + for _,k in next,kerns do + local ku=k[unicode] + if ku then + k[u]=ku + n=n+1 + end + end + end + end + if u>0 then + local duplicate=table.copy(description) + duplicate.comment=format("copy of U+%05X",unicode) + descriptions[u]=duplicate + if trace_loading then + report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n) + end + end + end + end + end + end +end +actions["analyze glyphs"]=function(data,filename,raw) + local descriptions=data.descriptions + local resources=data.resources + local metadata=data.metadata + local properties=data.properties + local hasitalics=false + local widths={} + local marks={} + for unicode,description in next,descriptions do + local glyph=description.glyph + local italic=glyph.italic_correction + if not italic then + elseif italic==0 then + else + description.italic=italic + hasitalics=true + end + local width=glyph.width + widths[width]=(widths[width] or 0)+1 + local class=glyph.class + if class then + if class=="mark" then + marks[unicode]=true + end + description.class=class + end + end + properties.hasitalics=hasitalics + resources.marks=marks + local wd,most=0,1 + for k,v in next,widths do + if v>most then + wd,most=k,v + end + end + if most>1000 then + if trace_loading then + report_otf("most common width: %s (%s times), sharing (cjk font)",wd,most) + end + for unicode,description in next,descriptions do + if description.width==wd then + else + description.width=description.glyph.width + end + end + resources.defaultwidth=wd + else + for unicode,description in next,descriptions do + description.width=description.glyph.width + end + end +end +actions["reorganize mark classes"]=function(data,filename,raw) + local mark_classes=raw.mark_classes + if mark_classes then + local resources=data.resources + local unicodes=resources.unicodes + local markclasses={} + resources.markclasses=markclasses + for name,class in next,mark_classes do + local t={} + for s in gmatch(class,"[^ ]+") do + t[unicodes[s]]=true + end + markclasses[name]=t + end + end +end +actions["reorganize features"]=function(data,filename,raw) + local features={} + data.resources.features=features + for k,what in next,otf.glists do + local dw=raw[what] + if dw then + local f={} + features[what]=f + for i=1,#dw do + local d=dw[i] + local dfeatures=d.features + if dfeatures then + for i=1,#dfeatures do + local df=dfeatures[i] + local tag=strip(lower(df.tag)) + local ft=f[tag] + if not ft then + ft={} + f[tag]=ft + end + local dscripts=df.scripts + for i=1,#dscripts do + local d=dscripts[i] + local languages=d.langs + local script=strip(lower(d.script)) + local fts=ft[script] if not fts then fts={} ft[script]=fts end + for i=1,#languages do + fts[strip(lower(languages[i]))]=true + end + end + end + end + end + end + end +end +actions["reorganize anchor classes"]=function(data,filename,raw) + local resources=data.resources + local anchor_to_lookup={} + local lookup_to_anchor={} + resources.anchor_to_lookup=anchor_to_lookup + resources.lookup_to_anchor=lookup_to_anchor + local classes=raw.anchor_classes + if classes then + for c=1,#classes do + local class=classes[c] + local anchor=class.name + local lookups=class.lookup + if type(lookups)~="table" then + lookups={ lookups } + end + local a=anchor_to_lookup[anchor] + if not a then + a={} + anchor_to_lookup[anchor]=a + end + for l=1,#lookups do + local lookup=lookups[l] + local l=lookup_to_anchor[lookup] + if l then + l[anchor]=true + else + l={ [anchor]=true } + lookup_to_anchor[lookup]=l + end + a[lookup]=true + end + end + end +end +actions["prepare tounicode"]=function(data,filename,raw) + fonts.mappings.addtounicode(data,filename) +end +local g_directions={ + gsub_contextchain=1, + gpos_contextchain=1, + gsub_reversecontextchain=-1, + gpos_reversecontextchain=-1, +} +actions["reorganize subtables"]=function(data,filename,raw) + local resources=data.resources + local sequences={} + local lookups={} + local chainedfeatures={} + resources.sequences=sequences + resources.lookups=lookups + for _,what in next,otf.glists do + local dw=raw[what] + if dw then + for k=1,#dw do + local gk=dw[k] + local features=gk.features + local typ=gk.type + local chain=g_directions[typ] or 0 + local subtables=gk.subtables + if subtables then + local t={} + for s=1,#subtables do + t[s]=subtables[s].name + end + subtables=t + end + local flags,markclass=gk.flags,nil + if flags then + local t={ + (flags.ignorecombiningmarks and "mark") or false, + (flags.ignoreligatures and "ligature") or false, + (flags.ignorebaseglyphs and "base") or false, + flags.r2l or false, + } + markclass=flags.mark_class + if markclass then + markclass=resources.markclasses[markclass] + end + flags=t + end + local name=gk.name + if not name then + report_otf("skipping weird lookup number %s",k) + elseif features then + local f={} + local o={} + for i=1,#features do + local df=features[i] + local tag=strip(lower(df.tag)) + local ft=f[tag] + if not ft then + ft={} + f[tag]=ft + o[#o+1]=tag + end + local dscripts=df.scripts + for i=1,#dscripts do + local d=dscripts[i] + local languages=d.langs + local script=strip(lower(d.script)) + local fts=ft[script] if not fts then fts={} ft[script]=fts end + for i=1,#languages do + fts[strip(lower(languages[i]))]=true + end + end + end + sequences[#sequences+1]={ + type=typ, + chain=chain, + flags=flags, + name=name, + subtables=subtables, + markclass=markclass, + features=f, + order=o, + } + else + lookups[name]={ + type=typ, + chain=chain, + flags=flags, + subtables=subtables, + markclass=markclass, + } + end + end + end + end +end +actions["prepare lookups"]=function(data,filename,raw) + local lookups=raw.lookups + if lookups then + data.lookups=lookups + end +end +local function t_uncover(splitter,cache,covers) + local result={} + for n=1,#covers do + local cover=covers[n] + local uncovered=cache[cover] + if not uncovered then + uncovered=lpegmatch(splitter,cover) + cache[cover]=uncovered + end + result[n]=uncovered + end + return result +end +local function s_uncover(splitter,cache,cover) + if cover=="" then + return nil + else + local uncovered=cache[cover] + if not uncovered then + uncovered=lpegmatch(splitter,cover) + cache[cover]=uncovered + end + return { uncovered } + end +end +local function t_hashed(t,cache) + if t then + local ht={} + for i=1,#t do + local ti=t[i] + local tih=cache[ti] + if not tih then + local tn=#ti + if tn==1 then + tih={ [ti[1]]=true } + else + tih={} + for i=1,tn do + tih[ti[i]]=true + end + end + cache[ti]=tih + end + ht[i]=tih + end + return ht + else + return nil + end +end +local function s_hashed(t,cache) + if t then + local tf=t[1] + local nf=#tf + if nf==1 then + return { [tf[1]]=true } + else + local ht={} + for i=1,nf do + ht[i]={ [tf[i]]=true } + end + return ht + end + else + return nil + end +end +local function r_uncover(splitter,cache,cover,replacements) + if cover=="" then + return nil + else + local uncovered=cover[1] + local replaced=cache[replacements] + if not replaced then + replaced=lpegmatch(splitter,replacements) + cache[replacements]=replaced + end + local nu,nr=#uncovered,#replaced + local r={} + if nu==nr then + for i=1,nu do + r[uncovered[i]]=replaced[i] + end + end + return r + end +end +actions["reorganize lookups"]=function(data,filename,raw) + if data.lookups then + local splitter=data.helpers.tounicodetable + local t_u_cache={} + local s_u_cache=t_u_cache + local t_h_cache={} + local s_h_cache=t_h_cache + local r_u_cache={} + for _,lookup in next,data.lookups do + local rules=lookup.rules + if rules then + local format=lookup.format + if format=="class" then + local before_class=lookup.before_class + if before_class then + before_class=t_uncover(splitter,t_u_cache,reversed(before_class)) + end + local current_class=lookup.current_class + if current_class then + current_class=t_uncover(splitter,t_u_cache,current_class) + end + local after_class=lookup.after_class + if after_class then + after_class=t_uncover(splitter,t_u_cache,after_class) + end + for i=1,#rules do + local rule=rules[i] + local class=rule.class + local before=class.before + if before then + for i=1,#before do + before[i]=before_class[before[i]] or {} + end + rule.before=t_hashed(before,t_h_cache) + end + local current=class.current + local lookups=rule.lookups + if current then + for i=1,#current do + current[i]=current_class[current[i]] or {} + if lookups and not lookups[i] then + lookups[i]="" + end + end + rule.current=t_hashed(current,t_h_cache) + end + local after=class.after + if after then + for i=1,#after do + after[i]=after_class[after[i]] or {} + end + rule.after=t_hashed(after,t_h_cache) + end + rule.class=nil + end + lookup.before_class=nil + lookup.current_class=nil + lookup.after_class=nil + lookup.format="coverage" + elseif format=="coverage" then + for i=1,#rules do + local rule=rules[i] + local coverage=rule.coverage + if coverage then + local before=coverage.before + if before then + before=t_uncover(splitter,t_u_cache,reversed(before)) + rule.before=t_hashed(before,t_h_cache) + end + local current=coverage.current + if current then + current=t_uncover(splitter,t_u_cache,current) + local lookups=rule.lookups + if lookups then + for i=1,#current do + if not lookups[i] then + lookups[i]="" + end + end + end + rule.current=t_hashed(current,t_h_cache) + end + local after=coverage.after + if after then + after=t_uncover(splitter,t_u_cache,after) + rule.after=t_hashed(after,t_h_cache) + end + rule.coverage=nil + end + end + elseif format=="reversecoverage" then + for i=1,#rules do + local rule=rules[i] + local reversecoverage=rule.reversecoverage + if reversecoverage then + local before=reversecoverage.before + if before then + before=t_uncover(splitter,t_u_cache,reversed(before)) + rule.before=t_hashed(before,t_h_cache) + end + local current=reversecoverage.current + if current then + current=t_uncover(splitter,t_u_cache,current) + rule.current=t_hashed(current,t_h_cache) + end + local after=reversecoverage.after + if after then + after=t_uncover(splitter,t_u_cache,after) + rule.after=t_hashed(after,t_h_cache) + end + local replacements=reversecoverage.replacements + if replacements then + rule.replacements=r_uncover(splitter,r_u_cache,current,replacements) + end + rule.reversecoverage=nil + end + end + elseif format=="glyphs" then + for i=1,#rules do + local rule=rules[i] + local glyphs=rule.glyphs + if glyphs then + local fore=glyphs.fore + if fore and fore~="" then + fore=s_uncover(splitter,s_u_cache,fore) + rule.after=s_hashed(fore,s_h_cache) + end + local back=glyphs.back + if back then + back=s_uncover(splitter,s_u_cache,back) + rule.before=s_hashed(back,s_h_cache) + end + local names=glyphs.names + if names then + names=s_uncover(splitter,s_u_cache,names) + rule.current=s_hashed(names,s_h_cache) + end + rule.glyphs=nil + local lookups=rule.lookups + if lookups then + for i=1,#names do + if not lookups[i] then + lookups[i]="" + end + end + end + end + end + end + end + end + end +end +local function check_variants(unicode,the_variants,splitter,unicodes) + local variants=the_variants.variants + if variants then + local glyphs=lpegmatch(splitter,variants) + local done={ [unicode]=true } + local n=0 + for i=1,#glyphs do + local g=glyphs[i] + if done[g] then + if i>1 then + report_otf("skipping cyclic reference %U in math variant %U",g,unicode) + end + else + if n==0 then + n=1 + variants={ g } + else + n=n+1 + variants[n]=g + end + done[g]=true + end + end + if n==0 then + variants=nil + end + end + local parts=the_variants.parts + if parts then + local p=#parts + if p>0 then + for i=1,p do + local pi=parts[i] + pi.glyph=unicodes[pi.component] or 0 + pi.component=nil + end + else + parts=nil + end + end + local italic_correction=the_variants.italic_correction + if italic_correction and italic_correction==0 then + italic_correction=nil + end + return variants,parts,italic_correction +end +actions["analyze math"]=function(data,filename,raw) + if raw.math then + data.metadata.math=raw.math + local unicodes=data.resources.unicodes + local splitter=data.helpers.tounicodetable + for unicode,description in next,data.descriptions do + local glyph=description.glyph + local mathkerns=glyph.mathkern + local horiz_variants=glyph.horiz_variants + local vert_variants=glyph.vert_variants + local top_accent=glyph.top_accent + if mathkerns or horiz_variants or vert_variants or top_accent then + local math={} + if top_accent then + math.top_accent=top_accent + end + if mathkerns then + for k,v in next,mathkerns do + if not next(v) then + mathkerns[k]=nil + else + for k,v in next,v do + if v==0 then + k[v]=nil + end + end + end + end + math.kerns=mathkerns + end + if horiz_variants then + math.horiz_variants,math.horiz_parts,math.horiz_italic_correction=check_variants(unicode,horiz_variants,splitter,unicodes) + end + if vert_variants then + math.vert_variants,math.vert_parts,math.vert_italic_correction=check_variants(unicode,vert_variants,splitter,unicodes) + end + local italic_correction=description.italic + if italic_correction and italic_correction~=0 then + math.italic_correction=italic_correction + end + description.math=math + end + end + end +end +actions["reorganize glyph kerns"]=function(data,filename,raw) + local descriptions=data.descriptions + local resources=data.resources + local unicodes=resources.unicodes + for unicode,description in next,descriptions do + local kerns=description.glyph.kerns + if kerns then + local newkerns={} + for k,kern in next,kerns do + local name=kern.char + local offset=kern.off + local lookup=kern.lookup + if name and offset and lookup then + local unicode=unicodes[name] + if unicode then + if type(lookup)=="table" then + for l=1,#lookup do + local lookup=lookup[l] + local lookupkerns=newkerns[lookup] + if lookupkerns then + lookupkerns[unicode]=offset + else + newkerns[lookup]={ [unicode]=offset } + end + end + else + local lookupkerns=newkerns[lookup] + if lookupkerns then + lookupkerns[unicode]=offset + else + newkerns[lookup]={ [unicode]=offset } + end + end + elseif trace_loading then + report_otf("problems with unicode %a of kern %a of glyph %U",name,k,unicode) + end + end + end + description.kerns=newkerns + end + end +end +actions["merge kern classes"]=function(data,filename,raw) + local gposlist=raw.gpos + if gposlist then + local descriptions=data.descriptions + local resources=data.resources + local unicodes=resources.unicodes + local splitter=data.helpers.tounicodetable + local ignored=0 + local blocked=0 + for gp=1,#gposlist do + local gpos=gposlist[gp] + local subtables=gpos.subtables + if subtables then + local first_done={} + local split={} + for s=1,#subtables do + local subtable=subtables[s] + local kernclass=subtable.kernclass + local lookup=subtable.lookup or subtable.name + if kernclass then + if #kernclass>0 then + kernclass=kernclass[1] + lookup=type(kernclass.lookup)=="string" and kernclass.lookup or lookup + report_otf("fixing kernclass table of lookup %a",lookup) + end + local firsts=kernclass.firsts + local seconds=kernclass.seconds + local offsets=kernclass.offsets + for n,s in next,firsts do + split[s]=split[s] or lpegmatch(splitter,s) + end + local maxseconds=0 + for n,s in next,seconds do + if n>maxseconds then + maxseconds=n + end + split[s]=split[s] or lpegmatch(splitter,s) + end + for fk=1,#firsts do + local fv=firsts[fk] + local splt=split[fv] + if splt then + local extrakerns={} + local baseoffset=(fk-1)*maxseconds + for sk=2,maxseconds do + local sv=seconds[sk] + local splt=split[sv] + if splt then + local offset=offsets[baseoffset+sk] + if offset then + for i=1,#splt do + extrakerns[splt[i]]=offset + end + end + end + end + for i=1,#splt do + local first_unicode=splt[i] + if first_done[first_unicode] then + report_otf("lookup %a: ignoring further kerns of %C",lookup,first_unicode) + blocked=blocked+1 + else + first_done[first_unicode]=true + local description=descriptions[first_unicode] + if description then + local kerns=description.kerns + if not kerns then + kerns={} + description.kerns=kerns + end + local lookupkerns=kerns[lookup] + if not lookupkerns then + lookupkerns={} + kerns[lookup]=lookupkerns + end + if overloadkerns then + for second_unicode,kern in next,extrakerns do + lookupkerns[second_unicode]=kern + end + else + for second_unicode,kern in next,extrakerns do + local k=lookupkerns[second_unicode] + if not k then + lookupkerns[second_unicode]=kern + elseif k~=kern then + if trace_loading then + report_otf("lookup %a: ignoring overload of kern between %C and %C, rejecting %a, keeping %a",lookup,first_unicode,second_unicode,k,kern) + end + ignored=ignored+1 + end + end + end + elseif trace_loading then + report_otf("no glyph data for %U",first_unicode) + end + end + end + end + end + subtable.kernclass={} + end + end + end + end + if ignored>0 then + report_otf("%s kern overloads ignored",ignored) + end + if blocked>0 then + report_otf("%s succesive kerns blocked",blocked) + end + end +end +actions["check glyphs"]=function(data,filename,raw) + for unicode,description in next,data.descriptions do + description.glyph=nil + end +end +local valid=(R("\x00\x7E")-S("(){}[]<>%/ \n\r\f\v"))^0*P(-1) +local function valid_ps_name(str) + return str and str~="" and #str<64 and lpegmatch(valid,str) and true or false +end +actions["check metadata"]=function(data,filename,raw) + local metadata=data.metadata + for _,k in next,mainfields do + if valid_fields[k] then + local v=raw[k] + if not metadata[k] then + metadata[k]=v + end + end + end + local ttftables=metadata.ttf_tables + if ttftables then + for i=1,#ttftables do + ttftables[i].data="deleted" + end + end + if metadata.validation_state and table.contains(metadata.validation_state,"bad_ps_fontname") then + local function valid(what) + local names=raw.names + for i=1,#names do + local list=names[i] + local names=list.names + if names then + local name=names[what] + if name and valid_ps_name(name) then + return name + end + end + end + end + local function check(what) + local oldname=metadata[what] + if valid_ps_name(oldname) then + report_otf("ignoring warning %a because %s %a is proper ASCII","bad_ps_fontname",what,oldname) + else + local newname=valid(what) + if not newname then + newname=formatters["bad-%s-%s"](what,file.nameonly(filename)) + end + local warning=formatters["overloading %s from invalid ASCII name %a to %a"](what,oldname,newname) + data.warnings[#data.warnings+1]=warning + report_otf(warning) + metadata[what]=newname + end + end + check("fontname") + check("fullname") + end +end +actions["cleanup tables"]=function(data,filename,raw) + local duplicates=data.resources.duplicates + if duplicates then + for k,v in next,duplicates do + if #v==1 then + duplicates[k]=v[1] + end + end + end + data.resources.indices=nil + data.resources.unicodes=nil + data.helpers=nil +end +actions["reorganize glyph lookups"]=function(data,filename,raw) + local resources=data.resources + local unicodes=resources.unicodes + local descriptions=data.descriptions + local splitter=data.helpers.tounicodelist + local lookuptypes=resources.lookuptypes + for unicode,description in next,descriptions do + local lookups=description.glyph.lookups + if lookups then + for tag,lookuplist in next,lookups do + for l=1,#lookuplist do + local lookup=lookuplist[l] + local specification=lookup.specification + local lookuptype=lookup.type + local lt=lookuptypes[tag] + if not lt then + lookuptypes[tag]=lookuptype + elseif lt~=lookuptype then + report_otf("conflicting lookuptypes, %a points to %a and %a",tag,lt,lookuptype) + end + if lookuptype=="ligature" then + lookuplist[l]={ lpegmatch(splitter,specification.components) } + elseif lookuptype=="alternate" then + lookuplist[l]={ lpegmatch(splitter,specification.components) } + elseif lookuptype=="substitution" then + lookuplist[l]=unicodes[specification.variant] + elseif lookuptype=="multiple" then + lookuplist[l]={ lpegmatch(splitter,specification.components) } + elseif lookuptype=="position" then + lookuplist[l]={ + specification.x or 0, + specification.y or 0, + specification.h or 0, + specification.v or 0 + } + elseif lookuptype=="pair" then + local one=specification.offsets[1] + local two=specification.offsets[2] + local paired=unicodes[specification.paired] + if one then + if two then + lookuplist[l]={ paired,{ one.x or 0,one.y or 0,one.h or 0,one.v or 0 },{ two.x or 0,two.y or 0,two.h or 0,two.v or 0 } } + else + lookuplist[l]={ paired,{ one.x or 0,one.y or 0,one.h or 0,one.v or 0 } } + end + else + if two then + lookuplist[l]={ paired,{},{ two.x or 0,two.y or 0,two.h or 0,two.v or 0} } + else + lookuplist[l]={ paired } + end + end + end + end + end + local slookups,mlookups + for tag,lookuplist in next,lookups do + if #lookuplist==1 then + if slookups then + slookups[tag]=lookuplist[1] + else + slookups={ [tag]=lookuplist[1] } + end + else + if mlookups then + mlookups[tag]=lookuplist + else + mlookups={ [tag]=lookuplist } + end + end + end + if slookups then + description.slookups=slookups + end + if mlookups then + description.mlookups=mlookups + end + end + end +end +actions["reorganize glyph anchors"]=function(data,filename,raw) + local descriptions=data.descriptions + for unicode,description in next,descriptions do + local anchors=description.glyph.anchors + if anchors then + for class,data in next,anchors do + if class=="baselig" then + for tag,specification in next,data do + for i=1,#specification do + local si=specification[i] + specification[i]={ si.x or 0,si.y or 0 } + end + end + else + for tag,specification in next,data do + data[tag]={ specification.x or 0,specification.y or 0 } + end + end + end + description.anchors=anchors + end + end +end +local bogusname=(P("uni")+P("u"))*R("AF","09")^4+(P("index")+P("glyph")+S("Ii")*P("dentity")*P(".")^0)*R("09")^1 +local uselessname=(1-bogusname)^0*bogusname +actions["purge names"]=function(data,filename,raw) + if purge_names then + local n=0 + for u,d in next,data.descriptions do + if lpegmatch(uselessname,d.name) then + n=n+1 + d.name=nil + end + end + if n>0 then + report_otf("%s bogus names removed",n) + end + end +end +actions["compact lookups"]=function(data,filename,raw) + if not compact_lookups then + report_otf("not compacting") + return + end + local last=0 + local tags=table.setmetatableindex({}, + function(t,k) + last=last+1 + t[k]=last + return last + end + ) + local descriptions=data.descriptions + local resources=data.resources + for u,d in next,descriptions do + local slookups=d.slookups + if type(slookups)=="table" then + local s={} + for k,v in next,slookups do + s[tags[k]]=v + end + d.slookups=s + end + local mlookups=d.mlookups + if type(mlookups)=="table" then + local m={} + for k,v in next,mlookups do + m[tags[k]]=v + end + d.mlookups=m + end + local kerns=d.kerns + if type(kerns)=="table" then + local t={} + for k,v in next,kerns do + t[tags[k]]=v + end + d.kerns=t + end + end + local lookups=data.lookups + if lookups then + local l={} + for k,v in next,lookups do + local rules=v.rules + if rules then + for i=1,#rules do + local l=rules[i].lookups + if type(l)=="table" then + for i=1,#l do + l[i]=tags[l[i]] + end + end + end + end + l[tags[k]]=v + end + data.lookups=l + end + local lookups=resources.lookups + if lookups then + local l={} + for k,v in next,lookups do + local s=v.subtables + if type(s)=="table" then + for i=1,#s do + s[i]=tags[s[i]] + end + end + l[tags[k]]=v + end + resources.lookups=l + end + local sequences=resources.sequences + if sequences then + for i=1,#sequences do + local s=sequences[i] + local n=s.name + if n then + s.name=tags[n] + end + local t=s.subtables + if type(t)=="table" then + for i=1,#t do + t[i]=tags[t[i]] + end + end + end + end + local lookuptypes=resources.lookuptypes + if lookuptypes then + local l={} + for k,v in next,lookuptypes do + l[tags[k]]=v + end + resources.lookuptypes=l + end + local anchor_to_lookup=resources.anchor_to_lookup + if anchor_to_lookup then + for anchor,lookups in next,anchor_to_lookup do + local l={} + for lookup,value in next,lookups do + l[tags[lookup]]=value + end + anchor_to_lookup[anchor]=l + end + end + local lookup_to_anchor=resources.lookup_to_anchor + if lookup_to_anchor then + local l={} + for lookup,value in next,lookup_to_anchor do + l[tags[lookup]]=value + end + resources.lookup_to_anchor=l + end + tags=table.swapped(tags) + report_otf("%s lookup tags compacted",#tags) + resources.lookuptags=tags +end +function otf.setfeatures(tfmdata,features) + local okay=constructors.initializefeatures("otf",tfmdata,features,trace_features,report_otf) + if okay then + return constructors.collectprocessors("otf",tfmdata,features,trace_features,report_otf) + else + return {} + end +end +local function copytotfm(data,cache_id) + if data then + local metadata=data.metadata + local warnings=data.warnings + local resources=data.resources + local properties=derivetable(data.properties) + local descriptions=derivetable(data.descriptions) + local goodies=derivetable(data.goodies) + local characters={} + local parameters={} + local mathparameters={} + local pfminfo=metadata.pfminfo or {} + local resources=data.resources + local unicodes=resources.unicodes + local spaceunits=500 + local spacer="space" + local designsize=metadata.designsize or metadata.design_size or 100 + local mathspecs=metadata.math + if designsize==0 then + designsize=100 + end + if mathspecs then + for name,value in next,mathspecs do + mathparameters[name]=value + end + end + for unicode,_ in next,data.descriptions do + characters[unicode]={} + end + if mathspecs then + for unicode,character in next,characters do + local d=descriptions[unicode] + local m=d.math + if m then + local variants=m.horiz_variants + local parts=m.horiz_parts + if variants then + local c=character + for i=1,#variants do + local un=variants[i] + c.next=un + c=characters[un] + end + c.horiz_variants=parts + elseif parts then + character.horiz_variants=parts + end + local variants=m.vert_variants + local parts=m.vert_parts + if variants then + local c=character + for i=1,#variants do + local un=variants[i] + c.next=un + c=characters[un] + end + c.vert_variants=parts + elseif parts then + character.vert_variants=parts + end + local italic_correction=m.vert_italic_correction + if italic_correction then + character.vert_italic_correction=italic_correction + end + local top_accent=m.top_accent + if top_accent then + character.top_accent=top_accent + end + local kerns=m.kerns + if kerns then + character.mathkerns=kerns + end + end + end + end + local filename=constructors.checkedfilename(resources) + local fontname=metadata.fontname + local fullname=metadata.fullname or fontname + local psname=fontname or fullname + local units=metadata.units_per_em or 1000 + if units==0 then + units=1000 + metadata.units_per_em=1000 + report_otf("changing %a units to %a",0,units) + end + local monospaced=metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose.proportion=="Monospaced") + local charwidth=pfminfo.avgwidth + local charxheight=pfminfo.os2_xheight and pfminfo.os2_xheight>0 and pfminfo.os2_xheight + local italicangle=metadata.italicangle + properties.monospaced=monospaced + parameters.italicangle=italicangle + parameters.charwidth=charwidth + parameters.charxheight=charxheight + local space=0x0020 + local emdash=0x2014 + if monospaced then + if descriptions[space] then + spaceunits,spacer=descriptions[space].width,"space" + end + if not spaceunits and descriptions[emdash] then + spaceunits,spacer=descriptions[emdash].width,"emdash" + end + if not spaceunits and charwidth then + spaceunits,spacer=charwidth,"charwidth" + end + else + if descriptions[space] then + spaceunits,spacer=descriptions[space].width,"space" + end + if not spaceunits and descriptions[emdash] then + spaceunits,spacer=descriptions[emdash].width/2,"emdash/2" + end + if not spaceunits and charwidth then + spaceunits,spacer=charwidth,"charwidth" + end + end + spaceunits=tonumber(spaceunits) or 500 + parameters.slant=0 + parameters.space=spaceunits + parameters.space_stretch=units/2 + parameters.space_shrink=1*units/3 + parameters.x_height=2*units/5 + parameters.quad=units + if spaceunits<2*units/5 then + end + if italicangle and italicangle~=0 then + parameters.italicangle=italicangle + parameters.italicfactor=math.cos(math.rad(90+italicangle)) + parameters.slant=- math.tan(italicangle*math.pi/180) + end + if monospaced then + parameters.space_stretch=0 + parameters.space_shrink=0 + elseif syncspace then + parameters.space_stretch=spaceunits/2 + parameters.space_shrink=spaceunits/3 + end + parameters.extra_space=parameters.space_shrink + if charxheight then + parameters.x_height=charxheight + else + local x=0x0078 + if x then + local x=descriptions[x] + if x then + parameters.x_height=x.height + end + end + end + parameters.designsize=(designsize/10)*65536 + parameters.ascender=abs(metadata.ascent or 0) + parameters.descender=abs(metadata.descent or 0) + parameters.units=units + properties.space=spacer + properties.encodingbytes=2 + properties.format=data.format or otf_format(filename) or formats.otf + properties.noglyphnames=true + properties.filename=filename + properties.fontname=fontname + properties.fullname=fullname + properties.psname=psname + properties.name=filename or fullname + if warnings and #warnings>0 then + report_otf("warnings for font: %s",filename) + report_otf() + for i=1,#warnings do + report_otf(" %s",warnings[i]) + end + report_otf() + end + return { + characters=characters, + descriptions=descriptions, + parameters=parameters, + mathparameters=mathparameters, + resources=resources, + properties=properties, + goodies=goodies, + warnings=warnings, + } + end +end +local function otftotfm(specification) + local cache_id=specification.hash + local tfmdata=containers.read(constructors.cache,cache_id) + if not tfmdata then + local name=specification.name + local sub=specification.sub + local filename=specification.filename + local features=specification.features.normal + local rawdata=otf.load(filename,sub,features and features.featurefile) + if rawdata and next(rawdata) then + local descriptions=rawdata.descriptions + local duplicates=rawdata.resources.duplicates + if duplicates then + local nofduplicates,nofduplicated=0,0 + for parent,list in next,duplicates do + if type(list)=="table" then + local n=#list + for i=1,n do + local unicode=list[i] + if not descriptions[unicode] then + descriptions[unicode]=descriptions[parent] + nofduplicated=nofduplicated+1 + end + end + nofduplicates=nofduplicates+n + else + if not descriptions[list] then + descriptions[list]=descriptions[parent] + nofduplicated=nofduplicated+1 + end + nofduplicates=nofduplicates+1 + end + end + if trace_otf and nofduplicated~=nofduplicates then + report_otf("%i extra duplicates copied out of %i",nofduplicated,nofduplicates) + end + end + rawdata.lookuphash={} + tfmdata=copytotfm(rawdata,cache_id) + if tfmdata and next(tfmdata) then + local features=constructors.checkedfeatures("otf",features) + local shared=tfmdata.shared + if not shared then + shared={} + tfmdata.shared=shared + end + shared.rawdata=rawdata + shared.dynamics={} + tfmdata.changed={} + shared.features=features + shared.processes=otf.setfeatures(tfmdata,features) + end + end + containers.write(constructors.cache,cache_id,tfmdata) + end + return tfmdata +end +local function read_from_otf(specification) + local tfmdata=otftotfm(specification) + if tfmdata then + tfmdata.properties.name=specification.name + tfmdata.properties.sub=specification.sub + tfmdata=constructors.scale(tfmdata,specification) + local allfeatures=tfmdata.shared.features or specification.features.normal + constructors.applymanipulators("otf",tfmdata,allfeatures,trace_features,report_otf) + constructors.setname(tfmdata,specification) + fonts.loggers.register(tfmdata,file.suffix(specification.filename),specification) + end + return tfmdata +end +local function checkmathsize(tfmdata,mathsize) + local mathdata=tfmdata.shared.rawdata.metadata.math + local mathsize=tonumber(mathsize) + if mathdata then + local parameters=tfmdata.parameters + parameters.scriptpercentage=mathdata.ScriptPercentScaleDown + parameters.scriptscriptpercentage=mathdata.ScriptScriptPercentScaleDown + parameters.mathsize=mathsize + end +end +registerotffeature { + name="mathsize", + description="apply mathsize specified in the font", + initializers={ + base=checkmathsize, + node=checkmathsize, + } +} +function otf.collectlookups(rawdata,kind,script,language) + local sequences=rawdata.resources.sequences + if sequences then + local featuremap,featurelist={},{} + for s=1,#sequences do + local sequence=sequences[s] + local features=sequence.features + features=features and features[kind] + features=features and (features[script] or features[default] or features[wildcard]) + features=features and (features[language] or features[default] or features[wildcard]) + if features then + local subtables=sequence.subtables + if subtables then + for s=1,#subtables do + local ss=subtables[s] + if not featuremap[s] then + featuremap[ss]=true + featurelist[#featurelist+1]=ss + end + end + end + end + end + if #featurelist>0 then + return featuremap,featurelist + end + end + return nil,nil +end +local function check_otf(forced,specification,suffix) + local name=specification.name + if forced then + name=specification.forcedname + end + local fullname=findbinfile(name,suffix) or "" + if fullname=="" then + fullname=fonts.names.getfilename(name,suffix) or "" + end + if fullname~="" and not fonts.names.ignoredfile(fullname) then + specification.filename=fullname + return read_from_otf(specification) + end +end +local function opentypereader(specification,suffix) + local forced=specification.forced or "" + if formats[forced] then + return check_otf(true,specification,forced) + else + return check_otf(false,specification,suffix) + end +end +readers.opentype=opentypereader +function readers.otf (specification) return opentypereader(specification,"otf") end +function readers.ttf (specification) return opentypereader(specification,"ttf") end +function readers.ttc (specification) return opentypereader(specification,"ttf") end +function readers.dfont(specification) return opentypereader(specification,"ttf") end +function otf.scriptandlanguage(tfmdata,attr) + local properties=tfmdata.properties + return properties.script or "dflt",properties.language or "dflt" +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-otb']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local concat=table.concat +local format,gmatch,gsub,find,match,lower,strip=string.format,string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip +local type,next,tonumber,tostring,rawget=type,next,tonumber,tostring,rawget +local lpegmatch=lpeg.match +local utfchar=utf.char +local trace_baseinit=false trackers.register("otf.baseinit",function(v) trace_baseinit=v end) +local trace_singles=false trackers.register("otf.singles",function(v) trace_singles=v end) +local trace_multiples=false trackers.register("otf.multiples",function(v) trace_multiples=v end) +local trace_alternatives=false trackers.register("otf.alternatives",function(v) trace_alternatives=v end) +local trace_ligatures=false trackers.register("otf.ligatures",function(v) trace_ligatures=v end) +local trace_ligatures_detail=false trackers.register("otf.ligatures.detail",function(v) trace_ligatures_detail=v end) +local trace_kerns=false trackers.register("otf.kerns",function(v) trace_kerns=v end) +local trace_preparing=false trackers.register("otf.preparing",function(v) trace_preparing=v end) +local report_prepare=logs.reporter("fonts","otf prepare") +local fonts=fonts +local otf=fonts.handlers.otf +local otffeatures=otf.features +local registerotffeature=otffeatures.register +otf.defaultbasealternate="none" +local wildcard="*" +local default="dflt" +local formatters=string.formatters +local f_unicode=formatters["%U"] +local f_uniname=formatters["%U (%s)"] +local f_unilist=formatters["% t (% t)"] +local function gref(descriptions,n) + if type(n)=="number" then + local name=descriptions[n].name + if name then + return f_uniname(n,name) + else + return f_unicode(n) + end + elseif n then + local num,nam,j={},{},0 + for i=1,#n do + local ni=n[i] + if tonumber(ni) then + j=j+1 + local di=descriptions[ni] + num[j]=f_unicode(ni) + nam[j]=di and di.name or "-" + end + end + return f_unilist(num,nam) + else + return "" + end +end +local function cref(feature,lookuptags,lookupname) + if lookupname then + return formatters["feature %a, lookup %a"](feature,lookuptags[lookupname]) + else + return formatters["feature %a"](feature) + end +end +local function report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,comment) + report_prepare("%s: base alternate %s => %s (%S => %S)", + cref(feature,lookuptags,lookupname), + gref(descriptions,unicode), + replacement and gref(descriptions,replacement), + value, + comment) +end +local function report_substitution(feature,lookuptags,lookupname,descriptions,unicode,substitution) + report_prepare("%s: base substitution %s => %S", + cref(feature,lookuptags,lookupname), + gref(descriptions,unicode), + gref(descriptions,substitution)) +end +local function report_ligature(feature,lookuptags,lookupname,descriptions,unicode,ligature) + report_prepare("%s: base ligature %s => %S", + cref(feature,lookuptags,lookupname), + gref(descriptions,ligature), + gref(descriptions,unicode)) +end +local function report_kern(feature,lookuptags,lookupname,descriptions,unicode,otherunicode,value) + report_prepare("%s: base kern %s + %s => %S", + cref(feature,lookuptags,lookupname), + gref(descriptions,unicode), + gref(descriptions,otherunicode), + value) +end +local basemethods={} +local basemethod="" +local function applybasemethod(what,...) + local m=basemethods[basemethod][what] + if m then + return m(...) + end +end +local basehash,basehashes,applied={},1,{} +local function registerbasehash(tfmdata) + local properties=tfmdata.properties + local hash=concat(applied," ") + local base=basehash[hash] + if not base then + basehashes=basehashes+1 + base=basehashes + basehash[hash]=base + end + properties.basehash=base + properties.fullname=properties.fullname.."-"..base + applied={} +end +local function registerbasefeature(feature,value) + applied[#applied+1]=feature.."="..tostring(value) +end +local trace=false +local function finalize_ligatures(tfmdata,ligatures) + local nofligatures=#ligatures + if nofligatures>0 then + local characters=tfmdata.characters + local descriptions=tfmdata.descriptions + local resources=tfmdata.resources + local unicodes=resources.unicodes + local private=resources.private + local alldone=false + while not alldone do + local done=0 + for i=1,nofligatures do + local ligature=ligatures[i] + if ligature then + local unicode,lookupdata=ligature[1],ligature[2] + if trace_ligatures_detail then + report_prepare("building % a into %a",lookupdata,unicode) + end + local size=#lookupdata + local firstcode=lookupdata[1] + local firstdata=characters[firstcode] + local okay=false + if firstdata then + local firstname="ctx_"..firstcode + for i=1,size-1 do + local firstdata=characters[firstcode] + if not firstdata then + firstcode=private + if trace_ligatures_detail then + report_prepare("defining %a as %a",firstname,firstcode) + end + unicodes[firstname]=firstcode + firstdata={ intermediate=true,ligatures={} } + characters[firstcode]=firstdata + descriptions[firstcode]={ name=firstname } + private=private+1 + end + local target + local secondcode=lookupdata[i+1] + local secondname=firstname.."_"..secondcode + if i==size-1 then + target=unicode + if not rawget(unicodes,secondname) then + unicodes[secondname]=unicode + end + okay=true + else + target=rawget(unicodes,secondname) + if not target then + break + end + end + if trace_ligatures_detail then + report_prepare("codes (%a,%a) + (%a,%a) -> %a",firstname,firstcode,secondname,secondcode,target) + end + local firstligs=firstdata.ligatures + if firstligs then + firstligs[secondcode]={ char=target } + else + firstdata.ligatures={ [secondcode]={ char=target } } + end + firstcode=target + firstname=secondname + end + elseif trace_ligatures_detail then + report_prepare("no glyph (%a,%a) for building %a",firstname,firstcode,target) + end + if okay then + ligatures[i]=false + done=done+1 + end + end + end + alldone=done==0 + end + if trace_ligatures_detail then + for k,v in table.sortedhash(characters) do + if v.ligatures then + table.print(v,k) + end + end + end + resources.private=private + return true + end +end +local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist) + local characters=tfmdata.characters + local descriptions=tfmdata.descriptions + local resources=tfmdata.resources + local properties=tfmdata.properties + local changed=tfmdata.changed + local lookuphash=resources.lookuphash + local lookuptypes=resources.lookuptypes + local lookuptags=resources.lookuptags + local ligatures={} + local alternate=tonumber(value) or true and 1 + local defaultalt=otf.defaultbasealternate + local trace_singles=trace_baseinit and trace_singles + local trace_alternatives=trace_baseinit and trace_alternatives + local trace_ligatures=trace_baseinit and trace_ligatures + local actions={ + substitution=function(lookupdata,lookuptags,lookupname,description,unicode) + if trace_singles then + report_substitution(feature,lookuptags,lookupname,descriptions,unicode,lookupdata) + end + changed[unicode]=lookupdata + end, + alternate=function(lookupdata,lookuptags,lookupname,description,unicode) + local replacement=lookupdata[alternate] + if replacement then + changed[unicode]=replacement + if trace_alternatives then + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"normal") + end + elseif defaultalt=="first" then + replacement=lookupdata[1] + changed[unicode]=replacement + if trace_alternatives then + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt) + end + elseif defaultalt=="last" then + replacement=lookupdata[#data] + if trace_alternatives then + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt) + end + else + if trace_alternatives then + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"unknown") + end + end + end, + ligature=function(lookupdata,lookuptags,lookupname,description,unicode) + if trace_ligatures then + report_ligature(feature,lookuptags,lookupname,descriptions,unicode,lookupdata) + end + ligatures[#ligatures+1]={ unicode,lookupdata } + end, + } + for unicode,character in next,characters do + local description=descriptions[unicode] + local lookups=description.slookups + if lookups then + for l=1,#lookuplist do + local lookupname=lookuplist[l] + local lookupdata=lookups[lookupname] + if lookupdata then + local lookuptype=lookuptypes[lookupname] + local action=actions[lookuptype] + if action then + action(lookupdata,lookuptags,lookupname,description,unicode) + end + end + end + end + local lookups=description.mlookups + if lookups then + for l=1,#lookuplist do + local lookupname=lookuplist[l] + local lookuplist=lookups[lookupname] + if lookuplist then + local lookuptype=lookuptypes[lookupname] + local action=actions[lookuptype] + if action then + for i=1,#lookuplist do + action(lookuplist[i],lookuptags,lookupname,description,unicode) + end + end + end + end + end + end + properties.hasligatures=finalize_ligatures(tfmdata,ligatures) +end +local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist) + local characters=tfmdata.characters + local descriptions=tfmdata.descriptions + local resources=tfmdata.resources + local properties=tfmdata.properties + local lookuptags=resources.lookuptags + local sharedkerns={} + local traceindeed=trace_baseinit and trace_kerns + local haskerns=false + for unicode,character in next,characters do + local description=descriptions[unicode] + local rawkerns=description.kerns + if rawkerns then + local s=sharedkerns[rawkerns] + if s==false then + elseif s then + character.kerns=s + else + local newkerns=character.kerns + local done=false + for l=1,#lookuplist do + local lookup=lookuplist[l] + local kerns=rawkerns[lookup] + if kerns then + for otherunicode,value in next,kerns do + if value==0 then + elseif not newkerns then + newkerns={ [otherunicode]=value } + done=true + if traceindeed then + report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,value) + end + elseif not newkerns[otherunicode] then + newkerns[otherunicode]=value + done=true + if traceindeed then + report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,value) + end + end + end + end + end + if done then + sharedkerns[rawkerns]=newkerns + character.kerns=newkerns + haskerns=true + else + sharedkerns[rawkerns]=false + end + end + end + end + properties.haskerns=haskerns +end +basemethods.independent={ + preparesubstitutions=preparesubstitutions, + preparepositionings=preparepositionings, +} +local function makefake(tfmdata,name,present) + local resources=tfmdata.resources + local private=resources.private + local character={ intermediate=true,ligatures={} } + resources.unicodes[name]=private + tfmdata.characters[private]=character + tfmdata.descriptions[private]={ name=name } + resources.private=private+1 + present[name]=private + return character +end +local function make_1(present,tree,name) + for k,v in next,tree do + if k=="ligature" then + present[name]=v + else + make_1(present,v,name.."_"..k) + end + end +end +local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,done,lookuptags,lookupname) + for k,v in next,tree do + if k=="ligature" then + local character=characters[preceding] + if not character then + if trace_baseinit then + report_prepare("weird ligature in lookup %a, current %C, preceding %C",lookuptags[lookupname],v,preceding) + end + character=makefake(tfmdata,name,present) + end + local ligatures=character.ligatures + if ligatures then + ligatures[unicode]={ char=v } + else + character.ligatures={ [unicode]={ char=v } } + end + if done then + local d=done[lookupname] + if not d then + done[lookupname]={ "dummy",v } + else + d[#d+1]=v + end + end + else + local code=present[name] or unicode + local name=name.."_"..k + make_2(present,tfmdata,characters,v,name,code,k,done,lookuptags,lookupname) + end + end +end +local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist) + local characters=tfmdata.characters + local descriptions=tfmdata.descriptions + local resources=tfmdata.resources + local changed=tfmdata.changed + local lookuphash=resources.lookuphash + local lookuptypes=resources.lookuptypes + local lookuptags=resources.lookuptags + local ligatures={} + local alternate=tonumber(value) or true and 1 + local defaultalt=otf.defaultbasealternate + local trace_singles=trace_baseinit and trace_singles + local trace_alternatives=trace_baseinit and trace_alternatives + local trace_ligatures=trace_baseinit and trace_ligatures + for l=1,#lookuplist do + local lookupname=lookuplist[l] + local lookupdata=lookuphash[lookupname] + local lookuptype=lookuptypes[lookupname] + for unicode,data in next,lookupdata do + if lookuptype=="substitution" then + if trace_singles then + report_substitution(feature,lookuptags,lookupname,descriptions,unicode,data) + end + changed[unicode]=data + elseif lookuptype=="alternate" then + local replacement=data[alternate] + if replacement then + changed[unicode]=replacement + if trace_alternatives then + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"normal") + end + elseif defaultalt=="first" then + replacement=data[1] + changed[unicode]=replacement + if trace_alternatives then + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt) + end + elseif defaultalt=="last" then + replacement=data[#data] + if trace_alternatives then + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt) + end + else + if trace_alternatives then + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"unknown") + end + end + elseif lookuptype=="ligature" then + ligatures[#ligatures+1]={ unicode,data,lookupname } + if trace_ligatures then + report_ligature(feature,lookuptags,lookupname,descriptions,unicode,data) + end + end + end + end + local nofligatures=#ligatures + if nofligatures>0 then + local characters=tfmdata.characters + local present={} + local done=trace_baseinit and trace_ligatures and {} + for i=1,nofligatures do + local ligature=ligatures[i] + local unicode,tree=ligature[1],ligature[2] + make_1(present,tree,"ctx_"..unicode) + end + for i=1,nofligatures do + local ligature=ligatures[i] + local unicode,tree,lookupname=ligature[1],ligature[2],ligature[3] + make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,lookuptags,lookupname) + end + end +end +local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist) + local characters=tfmdata.characters + local descriptions=tfmdata.descriptions + local resources=tfmdata.resources + local properties=tfmdata.properties + local lookuphash=resources.lookuphash + local lookuptags=resources.lookuptags + local traceindeed=trace_baseinit and trace_kerns + for l=1,#lookuplist do + local lookupname=lookuplist[l] + local lookupdata=lookuphash[lookupname] + for unicode,data in next,lookupdata do + local character=characters[unicode] + local kerns=character.kerns + if not kerns then + kerns={} + character.kerns=kerns + end + if traceindeed then + for otherunicode,kern in next,data do + if not kerns[otherunicode] and kern~=0 then + kerns[otherunicode]=kern + report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,kern) + end + end + else + for otherunicode,kern in next,data do + if not kerns[otherunicode] and kern~=0 then + kerns[otherunicode]=kern + end + end + end + end + end +end +local function initializehashes(tfmdata) + nodeinitializers.features(tfmdata) +end +basemethods.shared={ + initializehashes=initializehashes, + preparesubstitutions=preparesubstitutions, + preparepositionings=preparepositionings, +} +basemethod="independent" +local function featuresinitializer(tfmdata,value) + if true then + local starttime=trace_preparing and os.clock() + local features=tfmdata.shared.features + local fullname=tfmdata.properties.fullname or "?" + if features then + applybasemethod("initializehashes",tfmdata) + local collectlookups=otf.collectlookups + local rawdata=tfmdata.shared.rawdata + local properties=tfmdata.properties + local script=properties.script + local language=properties.language + local basesubstitutions=rawdata.resources.features.gsub + local basepositionings=rawdata.resources.features.gpos + if basesubstitutions or basepositionings then + local sequences=tfmdata.resources.sequences + for s=1,#sequences do + local sequence=sequences[s] + local sfeatures=sequence.features + if sfeatures then + local order=sequence.order + if order then + for i=1,#order do + local feature=order[i] + local value=features[feature] + if value then + local validlookups,lookuplist=collectlookups(rawdata,feature,script,language) + if not validlookups then + elseif basesubstitutions and basesubstitutions[feature] then + if trace_preparing then + report_prepare("filtering base %s feature %a for %a with value %a","sub",feature,fullname,value) + end + applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist) + registerbasefeature(feature,value) + elseif basepositionings and basepositionings[feature] then + if trace_preparing then + report_prepare("filtering base %a feature %a for %a with value %a","pos",feature,fullname,value) + end + applybasemethod("preparepositionings",tfmdata,feature,value,validlookups,lookuplist) + registerbasefeature(feature,value) + end + end + end + end + end + end + end + registerbasehash(tfmdata) + end + if trace_preparing then + report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,fullname) + end + end +end +registerotffeature { + name="features", + description="features", + default=true, + initializers={ + base=featuresinitializer, + } +} +directives.register("fonts.otf.loader.basemethod",function(v) + if basemethods[v] then + basemethod=v + end +end) + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['node-inj']={ + version=1.001, + comment="companion to node-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files", +} +local next=next +local utfchar=utf.char +local trace_injections=false trackers.register("nodes.injections",function(v) trace_injections=v end) +local report_injections=logs.reporter("nodes","injections") +local attributes,nodes,node=attributes,nodes,node +fonts=fonts +local fontdata=fonts.hashes.identifiers +nodes.injections=nodes.injections or {} +local injections=nodes.injections +local nodecodes=nodes.nodecodes +local glyph_code=nodecodes.glyph +local kern_code=nodecodes.kern +local nuts=nodes.nuts +local nodepool=nuts.pool +local newkern=nodepool.kern +local tonode=nuts.tonode +local tonut=nuts.tonut +local getfield=nuts.getfield +local getnext=nuts.getnext +local getprev=nuts.getprev +local getid=nuts.getid +local getattr=nuts.getattr +local getfont=nuts.getfont +local getsubtype=nuts.getsubtype +local getchar=nuts.getchar +local setfield=nuts.setfield +local setattr=nuts.setattr +local traverse_id=nuts.traverse_id +local insert_node_before=nuts.insert_before +local insert_node_after=nuts.insert_after +local a_kernpair=attributes.private('kernpair') +local a_ligacomp=attributes.private('ligacomp') +local a_markbase=attributes.private('markbase') +local a_markmark=attributes.private('markmark') +local a_markdone=attributes.private('markdone') +local a_cursbase=attributes.private('cursbase') +local a_curscurs=attributes.private('curscurs') +local a_cursdone=attributes.private('cursdone') +local unsetvalue=attributes.unsetvalue +function injections.installnewkern(nk) + newkern=nk or newkern +end +local cursives={} +local marks={} +local kerns={} +function injections.reset(n) +end +function injections.setligaindex(n,index) + setattr(n,a_ligacomp,index) +end +function injections.getligaindex(n,default) + return getattr(n,a_ligacomp) or default +end +function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext) + local dx,dy=factor*(exit[1]-entry[1]),factor*(exit[2]-entry[2]) + local ws,wn=tfmstart.width,tfmnext.width + local bound=#cursives+1 + setattr(start,a_cursbase,bound) + setattr(nxt,a_curscurs,bound) + cursives[bound]={ rlmode,dx,dy,ws,wn } + return dx,dy,bound +end +function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr) + local x,y,w,h=factor*spec[1],factor*spec[2],factor*spec[3],factor*spec[4] + if x~=0 or w~=0 or y~=0 or h~=0 then + local bound=getattr(current,a_kernpair) + if bound then + local kb=kerns[bound] + kb[2],kb[3],kb[4],kb[5]=(kb[2] or 0)+x,(kb[3] or 0)+y,(kb[4] or 0)+w,(kb[5] or 0)+h + else + bound=#kerns+1 + setattr(current,a_kernpair,bound) + kerns[bound]={ rlmode,x,y,w,h,r2lflag,tfmchr.width } + end + return x,y,w,h,bound + end + return x,y,w,h +end +function injections.setkern(current,factor,rlmode,x,tfmchr) + local dx=factor*x + if dx~=0 then + local bound=#kerns+1 + setattr(current,a_kernpair,bound) + kerns[bound]={ rlmode,dx } + return dx,bound + else + return 0,0 + end +end +function injections.setmark(start,base,factor,rlmode,ba,ma) + local dx,dy=factor*(ba[1]-ma[1]),factor*(ba[2]-ma[2]) + local bound=getattr(base,a_markbase) + local index=1 + if bound then + local mb=marks[bound] + if mb then + index=#mb+1 + mb[index]={ dx,dy,rlmode } + setattr(start,a_markmark,bound) + setattr(start,a_markdone,index) + return dx,dy,bound + else + report_injections("possible problem, %U is base mark without data (id %a)",getchar(base),bound) + end + end + index=index or 1 + bound=#marks+1 + setattr(base,a_markbase,bound) + setattr(start,a_markmark,bound) + setattr(start,a_markdone,index) + marks[bound]={ [index]={ dx,dy,rlmode } } + return dx,dy,bound +end +local function dir(n) + return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset" +end +local function trace(head) + report_injections("begin run") + for n in traverse_id(glyph_code,head) do + if getsubtype(n)<256 then + local kp=getattr(n,a_kernpair) + local mb=getattr(n,a_markbase) + local mm=getattr(n,a_markmark) + local md=getattr(n,a_markdone) + local cb=getattr(n,a_cursbase) + local cc=getattr(n,a_curscurs) + local char=getchar(n) + report_injections("font %s, char %U, glyph %c",getfont(n),char,char) + if kp then + local k=kerns[kp] + if k[3] then + report_injections(" pairkern: dir %a, x %p, y %p, w %p, h %p",dir(k[1]),k[2],k[3],k[4],k[5]) + else + report_injections(" kern: dir %a, dx %p",dir(k[1]),k[2]) + end + end + if mb then + report_injections(" markbase: bound %a",mb) + end + if mm then + local m=marks[mm] + if mb then + local m=m[mb] + if m then + report_injections(" markmark: bound %a, index %a, dx %p, dy %p",mm,md,m[1],m[2]) + else + report_injections(" markmark: bound %a, missing index",mm) + end + else + m=m[1] + report_injections(" markmark: bound %a, dx %p, dy %p",mm,m and m[1],m and m[2]) + end + end + if cb then + report_injections(" cursbase: bound %a",cb) + end + if cc then + local c=cursives[cc] + report_injections(" curscurs: bound %a, dir %a, dx %p, dy %p",cc,dir(c[1]),c[2],c[3]) + end + end + end + report_injections("end run") +end +local function show_result(head) + local current=head + local skipping=false + while current do + local id=getid(current) + if id==glyph_code then + report_injections("char: %C, width %p, xoffset %p, yoffset %p", + getchar(current),getfield(current,"width"),getfield(current,"xoffset"),getfield(current,"yoffset")) + skipping=false + elseif id==kern_code then + report_injections("kern: %p",getfield(current,"kern")) + skipping=false + elseif not skipping then + report_injections() + skipping=true + end + current=getnext(current) + end +end +function injections.handler(head,where,keep) + head=tonut(head) + local has_marks,has_cursives,has_kerns=next(marks),next(cursives),next(kerns) + if has_marks or has_cursives then + if trace_injections then + trace(head) + end + local done,ky,rl,valid,cx,wx,mk,nofvalid=false,{},{},{},{},{},{},0 + if has_kerns then + local nf,tm=nil,nil + for n in traverse_id(glyph_code,head) do + if getsubtype(n)<256 then + nofvalid=nofvalid+1 + valid[nofvalid]=n + local f=getfont(n) + if f~=nf then + nf=f + tm=fontdata[nf].resources.marks + end + if tm then + mk[n]=tm[getchar(n)] + end + local k=getattr(n,a_kernpair) + if k then + local kk=kerns[k] + if kk then + local x,y,w,h=kk[2] or 0,kk[3] or 0,kk[4] or 0,kk[5] or 0 + local dy=y-h + if dy~=0 then + ky[n]=dy + end + if w~=0 or x~=0 then + wx[n]=kk + end + rl[n]=kk[1] + end + end + end + end + else + local nf,tm=nil,nil + for n in traverse_id(glyph_code,head) do + if getsubtype(n)<256 then + nofvalid=nofvalid+1 + valid[nofvalid]=n + local f=getfont(n) + if f~=nf then + nf=f + tm=fontdata[nf].resources.marks + end + if tm then + mk[n]=tm[getchar(n)] + end + end + end + end + if nofvalid>0 then + local cx={} + if has_kerns and next(ky) then + for n,k in next,ky do + setfield(n,"yoffset",k) + end + end + if has_cursives then + local p_cursbase,p=nil,nil + local t,d,maxt={},{},0 + for i=1,nofvalid do + local n=valid[i] + if not mk[n] then + local n_cursbase=getattr(n,a_cursbase) + if p_cursbase then + local n_curscurs=getattr(n,a_curscurs) + if p_cursbase==n_curscurs then + local c=cursives[n_curscurs] + if c then + local rlmode,dx,dy,ws,wn=c[1],c[2],c[3],c[4],c[5] + if rlmode>=0 then + dx=dx-ws + else + dx=dx+wn + end + if dx~=0 then + cx[n]=dx + rl[n]=rlmode + end + dy=-dy + maxt=maxt+1 + t[maxt]=p + d[maxt]=dy + else + maxt=0 + end + end + elseif maxt>0 then + local ny=getfield(n,"yoffset") + for i=maxt,1,-1 do + ny=ny+d[i] + local ti=t[i] + setfield(ti,"yoffset",getfield(ti,"yoffset")+ny) + end + maxt=0 + end + if not n_cursbase and maxt>0 then + local ny=getfield(n,"yoffset") + for i=maxt,1,-1 do + ny=ny+d[i] + local ti=t[i] + setfield(ti,"yoffset",ny) + end + maxt=0 + end + p_cursbase,p=n_cursbase,n + end + end + if maxt>0 then + local ny=getfield(n,"yoffset") + for i=maxt,1,-1 do + ny=ny+d[i] + local ti=t[i] + setfield(ti,"yoffset",ny) + end + maxt=0 + end + if not keep then + cursives={} + end + end + if has_marks then + for i=1,nofvalid do + local p=valid[i] + local p_markbase=getattr(p,a_markbase) + if p_markbase then + local mrks=marks[p_markbase] + local nofmarks=#mrks + for n in traverse_id(glyph_code,getnext(p)) do + local n_markmark=getattr(n,a_markmark) + if p_markbase==n_markmark then + local index=getattr(n,a_markdone) or 1 + local d=mrks[index] + if d then + local rlmode=d[3] + local k=wx[p] + local px=getfield(p,"xoffset") + local ox=0 + if k then + local x=k[2] + local w=k[4] + if w then + if rlmode and rlmode>=0 then + ox=px-getfield(p,"width")+d[1]-(w-x) + else + ox=px-d[1]-x + end + else + if rlmode and rlmode>=0 then + ox=px-getfield(p,"width")+d[1] + else + ox=px-d[1]-x + end + end + else + local wp=getfield(p,"width") + local wn=getfield(n,"width") + if rlmode and rlmode>=0 then + ox=px-wp+d[1] + else + ox=px-d[1] + end + if wn~=0 then + insert_node_before(head,n,newkern(-wn/2)) + insert_node_after(head,n,newkern(-wn/2)) + end + end + setfield(n,"xoffset",ox) + local py=getfield(p,"yoffset") + local oy=0 + if mk[p] then + oy=py+d[2] + else + oy=getfield(n,"yoffset")+py+d[2] + end + setfield(n,"yoffset",oy) + if nofmarks==1 then + break + else + nofmarks=nofmarks-1 + end + end + elseif not n_markmark then + break + else + end + end + end + end + if not keep then + marks={} + end + end + if next(wx) then + for n,k in next,wx do + local x=k[2] + local w=k[4] + if w then + local rl=k[1] + local wx=w-x + if rl<0 then + if wx~=0 then + insert_node_before(head,n,newkern(wx)) + end + if x~=0 then + insert_node_after (head,n,newkern(x)) + end + else + if x~=0 then + insert_node_before(head,n,newkern(x)) + end + if wx~=0 then + insert_node_after (head,n,newkern(wx)) + end + end + elseif x~=0 then + insert_node_before(head,n,newkern(x)) + end + end + end + if next(cx) then + for n,k in next,cx do + if k~=0 then + local rln=rl[n] + if rln and rln<0 then + insert_node_before(head,n,newkern(-k)) + else + insert_node_before(head,n,newkern(k)) + end + end + end + end + if not keep then + kerns={} + end + return tonode(head),true + elseif not keep then + kerns,cursives,marks={},{},{} + end + elseif has_kerns then + if trace_injections then + trace(head) + end + for n in traverse_id(glyph_code,head) do + if getsubtype(n)<256 then + local k=getattr(n,a_kernpair) + if k then + local kk=kerns[k] + if kk then + local rl,x,y,w=kk[1],kk[2] or 0,kk[3],kk[4] + if y and y~=0 then + setfield(n,"yoffset",y) + end + if w then + local wx=w-x + if rl<0 then + if wx~=0 then + insert_node_before(head,n,newkern(wx)) + end + if x~=0 then + insert_node_after (head,n,newkern(x)) + end + else + if x~=0 then + insert_node_before(head,n,newkern(x)) + end + if wx~=0 then + insert_node_after(head,n,newkern(wx)) + end + end + else + if x~=0 then + insert_node_before(head,n,newkern(x)) + end + end + end + end + end + end + if not keep then + kerns={} + end + return tonode(head),true + else + end + return tonode(head),false +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-otx']={ + version=1.001, + comment="companion to font-otf.lua (analysing)", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local type=type +if not trackers then trackers={ register=function() end } end +local fonts,nodes,node=fonts,nodes,node +local allocate=utilities.storage.allocate +local otf=fonts.handlers.otf +local analyzers=fonts.analyzers +local initializers=allocate() +local methods=allocate() +analyzers.initializers=initializers +analyzers.methods=methods +analyzers.useunicodemarks=false +local a_state=attributes.private('state') +local nuts=nodes.nuts +local tonut=nuts.tonut +local getfield=nuts.getfield +local getnext=nuts.getnext +local getprev=nuts.getprev +local getid=nuts.getid +local getprop=nuts.getprop +local setprop=nuts.setprop +local getfont=nuts.getfont +local getsubtype=nuts.getsubtype +local getchar=nuts.getchar +local traverse_id=nuts.traverse_id +local traverse_node_list=nuts.traverse +local end_of_math=nuts.end_of_math +local nodecodes=nodes.nodecodes +local glyph_code=nodecodes.glyph +local disc_code=nodecodes.disc +local math_code=nodecodes.math +local fontdata=fonts.hashes.identifiers +local categories=characters and characters.categories or {} +local otffeatures=fonts.constructors.newfeatures("otf") +local registerotffeature=otffeatures.register +local s_init=1 local s_rphf=7 +local s_medi=2 local s_half=8 +local s_fina=3 local s_pref=9 +local s_isol=4 local s_blwf=10 +local s_mark=5 local s_pstf=11 +local s_rest=6 +local states={ + init=s_init, + medi=s_medi, + fina=s_fina, + isol=s_isol, + mark=s_mark, + rest=s_rest, + rphf=s_rphf, + half=s_half, + pref=s_pref, + blwf=s_blwf, + pstf=s_pstf, +} +local features={ + init=s_init, + medi=s_medi, + fina=s_fina, + isol=s_isol, + rphf=s_rphf, + half=s_half, + pref=s_pref, + blwf=s_blwf, + pstf=s_pstf, +} +analyzers.states=states +analyzers.features=features +function analyzers.setstate(head,font) + local useunicodemarks=analyzers.useunicodemarks + local tfmdata=fontdata[font] + local descriptions=tfmdata.descriptions + local first,last,current,n,done=nil,nil,head,0,false + current=tonut(current) + while current do + local id=getid(current) + if id==glyph_code and getfont(current)==font then + done=true + local char=getchar(current) + local d=descriptions[char] + if d then + if d.class=="mark" or (useunicodemarks and categories[char]=="mn") then + done=true + setprop(current,a_state,s_mark) + elseif n==0 then + first,last,n=current,current,1 + setprop(current,a_state,s_init) + else + last,n=current,n+1 + setprop(current,a_state,s_medi) + end + else + if first and first==last then + setprop(last,a_state,s_isol) + elseif last then + setprop(last,a_state,s_fina) + end + first,last,n=nil,nil,0 + end + elseif id==disc_code then + setprop(current,a_state,s_medi) + last=current + else + if first and first==last then + setprop(last,a_state,s_isol) + elseif last then + setprop(last,a_state,s_fina) + end + first,last,n=nil,nil,0 + if id==math_code then + current=end_of_math(current) + end + end + current=getnext(current) + end + if first and first==last then + setprop(last,a_state,s_isol) + elseif last then + setprop(last,a_state,s_fina) + end + return head,done +end +local function analyzeinitializer(tfmdata,value) + local script,language=otf.scriptandlanguage(tfmdata) + local action=initializers[script] + if not action then + elseif type(action)=="function" then + return action(tfmdata,value) + else + local action=action[language] + if action then + return action(tfmdata,value) + end + end +end +local function analyzeprocessor(head,font,attr) + local tfmdata=fontdata[font] + local script,language=otf.scriptandlanguage(tfmdata,attr) + local action=methods[script] + if not action then + elseif type(action)=="function" then + return action(head,font,attr) + else + action=action[language] + if action then + return action(head,font,attr) + end + end + return head,false +end +registerotffeature { + name="analyze", + description="analysis of character classes", + default=true, + initializers={ + node=analyzeinitializer, + }, + processors={ + position=1, + node=analyzeprocessor, + } +} +methods.latn=analyzers.setstate +local tatweel=0x0640 +local zwnj=0x200C +local zwj=0x200D +local isolated={ + [0x0600]=true,[0x0601]=true,[0x0602]=true,[0x0603]=true, + [0x0604]=true, + [0x0608]=true,[0x060B]=true,[0x0621]=true,[0x0674]=true, + [0x06DD]=true, + [0x0856]=true,[0x0858]=true,[0x0857]=true, + [0x07FA]=true, + [zwnj]=true, + [0x08AD]=true, +} +local final={ + [0x0622]=true,[0x0623]=true,[0x0624]=true,[0x0625]=true, + [0x0627]=true,[0x0629]=true,[0x062F]=true,[0x0630]=true, + [0x0631]=true,[0x0632]=true,[0x0648]=true,[0x0671]=true, + [0x0672]=true,[0x0673]=true,[0x0675]=true,[0x0676]=true, + [0x0677]=true,[0x0688]=true,[0x0689]=true,[0x068A]=true, + [0x068B]=true,[0x068C]=true,[0x068D]=true,[0x068E]=true, + [0x068F]=true,[0x0690]=true,[0x0691]=true,[0x0692]=true, + [0x0693]=true,[0x0694]=true,[0x0695]=true,[0x0696]=true, + [0x0697]=true,[0x0698]=true,[0x0699]=true,[0x06C0]=true, + [0x06C3]=true,[0x06C4]=true,[0x06C5]=true,[0x06C6]=true, + [0x06C7]=true,[0x06C8]=true,[0x06C9]=true,[0x06CA]=true, + [0x06CB]=true,[0x06CD]=true,[0x06CF]=true,[0x06D2]=true, + [0x06D3]=true,[0x06D5]=true,[0x06EE]=true,[0x06EF]=true, + [0x0759]=true,[0x075A]=true,[0x075B]=true,[0x076B]=true, + [0x076C]=true,[0x0771]=true,[0x0773]=true,[0x0774]=true, + [0x0778]=true,[0x0779]=true, + [0x08AA]=true,[0x08AB]=true,[0x08AC]=true, + [0xFEF5]=true,[0xFEF7]=true,[0xFEF9]=true,[0xFEFB]=true, + [0x0710]=true,[0x0715]=true,[0x0716]=true,[0x0717]=true, + [0x0718]=true,[0x0719]=true,[0x0728]=true,[0x072A]=true, + [0x072C]=true,[0x071E]=true, + [0x072F]=true,[0x074D]=true, + [0x0840]=true,[0x0849]=true,[0x0854]=true,[0x0846]=true, + [0x084F]=true, + [0x08AE]=true,[0x08B1]=true,[0x08B2]=true, +} +local medial={ + [0x0626]=true,[0x0628]=true,[0x062A]=true,[0x062B]=true, + [0x062C]=true,[0x062D]=true,[0x062E]=true,[0x0633]=true, + [0x0634]=true,[0x0635]=true,[0x0636]=true,[0x0637]=true, + [0x0638]=true,[0x0639]=true,[0x063A]=true,[0x063B]=true, + [0x063C]=true,[0x063D]=true,[0x063E]=true,[0x063F]=true, + [0x0641]=true,[0x0642]=true,[0x0643]=true, + [0x0644]=true,[0x0645]=true,[0x0646]=true,[0x0647]=true, + [0x0649]=true,[0x064A]=true,[0x066E]=true,[0x066F]=true, + [0x0678]=true,[0x0679]=true,[0x067A]=true,[0x067B]=true, + [0x067C]=true,[0x067D]=true,[0x067E]=true,[0x067F]=true, + [0x0680]=true,[0x0681]=true,[0x0682]=true,[0x0683]=true, + [0x0684]=true,[0x0685]=true,[0x0686]=true,[0x0687]=true, + [0x069A]=true,[0x069B]=true,[0x069C]=true,[0x069D]=true, + [0x069E]=true,[0x069F]=true,[0x06A0]=true,[0x06A1]=true, + [0x06A2]=true,[0x06A3]=true,[0x06A4]=true,[0x06A5]=true, + [0x06A6]=true,[0x06A7]=true,[0x06A8]=true,[0x06A9]=true, + [0x06AA]=true,[0x06AB]=true,[0x06AC]=true,[0x06AD]=true, + [0x06AE]=true,[0x06AF]=true,[0x06B0]=true,[0x06B1]=true, + [0x06B2]=true,[0x06B3]=true,[0x06B4]=true,[0x06B5]=true, + [0x06B6]=true,[0x06B7]=true,[0x06B8]=true,[0x06B9]=true, + [0x06BA]=true,[0x06BB]=true,[0x06BC]=true,[0x06BD]=true, + [0x06BE]=true,[0x06BF]=true,[0x06C1]=true,[0x06C2]=true, + [0x06CC]=true,[0x06CE]=true,[0x06D0]=true,[0x06D1]=true, + [0x06FA]=true,[0x06FB]=true,[0x06FC]=true,[0x06FF]=true, + [0x0750]=true,[0x0751]=true,[0x0752]=true,[0x0753]=true, + [0x0754]=true,[0x0755]=true,[0x0756]=true,[0x0757]=true, + [0x0758]=true,[0x075C]=true,[0x075D]=true,[0x075E]=true, + [0x075F]=true,[0x0760]=true,[0x0761]=true,[0x0762]=true, + [0x0763]=true,[0x0764]=true,[0x0765]=true,[0x0766]=true, + [0x0767]=true,[0x0768]=true,[0x0769]=true,[0x076A]=true, + [0x076D]=true,[0x076E]=true,[0x076F]=true,[0x0770]=true, + [0x0772]=true,[0x0775]=true,[0x0776]=true,[0x0777]=true, + [0x077A]=true,[0x077B]=true,[0x077C]=true,[0x077D]=true, + [0x077E]=true,[0x077F]=true, + [0x08A0]=true,[0x08A2]=true,[0x08A4]=true,[0x08A5]=true, + [0x08A6]=true,[0x0620]=true,[0x08A8]=true,[0x08A9]=true, + [0x08A7]=true,[0x08A3]=true, + [0x0712]=true,[0x0713]=true,[0x0714]=true,[0x071A]=true, + [0x071B]=true,[0x071C]=true,[0x071D]=true,[0x071F]=true, + [0x0720]=true,[0x0721]=true,[0x0722]=true,[0x0723]=true, + [0x0724]=true,[0x0725]=true,[0x0726]=true,[0x0727]=true, + [0x0729]=true,[0x072B]=true,[0x072D]=true,[0x072E]=true, + [0x074E]=true,[0x074F]=true, + [0x0841]=true,[0x0842]=true,[0x0843]=true,[0x0844]=true, + [0x0845]=true,[0x0847]=true,[0x0848]=true,[0x0855]=true, + [0x0851]=true,[0x084E]=true,[0x084D]=true,[0x084A]=true, + [0x084B]=true,[0x084C]=true,[0x0850]=true,[0x0852]=true, + [0x0853]=true, + [0x07D7]=true,[0x07E8]=true,[0x07D9]=true,[0x07EA]=true, + [0x07CA]=true,[0x07DB]=true,[0x07CC]=true,[0x07DD]=true, + [0x07CE]=true,[0x07DF]=true,[0x07D4]=true,[0x07E5]=true, + [0x07E9]=true,[0x07E7]=true,[0x07E3]=true,[0x07E2]=true, + [0x07E0]=true,[0x07E1]=true,[0x07DE]=true,[0x07DC]=true, + [0x07D1]=true,[0x07DA]=true,[0x07D8]=true,[0x07D6]=true, + [0x07D2]=true,[0x07D0]=true,[0x07CF]=true,[0x07CD]=true, + [0x07CB]=true,[0x07D3]=true,[0x07E4]=true,[0x07D5]=true, + [0x07E6]=true, + [tatweel]=true,[zwj]=true, + [0x08A1]=true,[0x08AF]=true,[0x08B0]=true, +} +local arab_warned={} +local function warning(current,what) + local char=getchar(current) + if not arab_warned[char] then + log.report("analyze","arab: character %C has no %a class",char,what) + arab_warned[char]=true + end +end +local function finish(first,last) + if last then + if first==last then + local fc=getchar(first) + if medial[fc] or final[fc] then + setprop(first,a_state,s_isol) + else + warning(first,"isol") + setprop(first,a_state,s_error) + end + else + local lc=getchar(last) + if medial[lc] or final[lc] then + setprop(last,a_state,s_fina) + else + warning(last,"fina") + setprop(last,a_state,s_error) + end + end + first,last=nil,nil + elseif first then + local fc=getchar(first) + if medial[fc] or final[fc] then + setprop(first,a_state,s_isol) + else + warning(first,"isol") + setprop(first,a_state,s_error) + end + first=nil + end + return first,last +end +function methods.arab(head,font,attr) + local useunicodemarks=analyzers.useunicodemarks + local tfmdata=fontdata[font] + local marks=tfmdata.resources.marks + local first,last,current,done=nil,nil,head,false + current=tonut(current) + while current do + local id=getid(current) + if id==glyph_code and getfont(current)==font and getsubtype(current)<256 and not getprop(current,a_state) then + done=true + local char=getchar(current) + if marks[char] or (useunicodemarks and categories[char]=="mn") then + setprop(current,a_state,s_mark) + elseif isolated[char] then + first,last=finish(first,last) + setprop(current,a_state,s_isol) + first,last=nil,nil + elseif not first then + if medial[char] then + setprop(current,a_state,s_init) + first,last=first or current,current + elseif final[char] then + setprop(current,a_state,s_isol) + first,last=nil,nil + else + first,last=finish(first,last) + end + elseif medial[char] then + first,last=first or current,current + setprop(current,a_state,s_medi) + elseif final[char] then + if getprop(last,a_state)~=s_init then + setprop(last,a_state,s_medi) + end + setprop(current,a_state,s_fina) + first,last=nil,nil + elseif char>=0x0600 and char<=0x06FF then + setprop(current,a_state,s_rest) + first,last=finish(first,last) + else + first,last=finish(first,last) + end + else + if first or last then + first,last=finish(first,last) + end + if id==math_code then + current=end_of_math(current) + end + end + current=getnext(current) + end + if first or last then + finish(first,last) + end + return head,done +end +methods.syrc=methods.arab +methods.mand=methods.arab +methods.nko=methods.arab +directives.register("otf.analyze.useunicodemarks",function(v) + analyzers.useunicodemarks=v +end) + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-otn']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files", +} +local concat,insert,remove=table.concat,table.insert,table.remove +local gmatch,gsub,find,match,lower,strip=string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip +local type,next,tonumber,tostring=type,next,tonumber,tostring +local lpegmatch=lpeg.match +local random=math.random +local formatters=string.formatters +local logs,trackers,nodes,attributes=logs,trackers,nodes,attributes +local registertracker=trackers.register +local fonts=fonts +local otf=fonts.handlers.otf +local trace_lookups=false registertracker("otf.lookups",function(v) trace_lookups=v end) +local trace_singles=false registertracker("otf.singles",function(v) trace_singles=v end) +local trace_multiples=false registertracker("otf.multiples",function(v) trace_multiples=v end) +local trace_alternatives=false registertracker("otf.alternatives",function(v) trace_alternatives=v end) +local trace_ligatures=false registertracker("otf.ligatures",function(v) trace_ligatures=v end) +local trace_contexts=false registertracker("otf.contexts",function(v) trace_contexts=v end) +local trace_marks=false registertracker("otf.marks",function(v) trace_marks=v end) +local trace_kerns=false registertracker("otf.kerns",function(v) trace_kerns=v end) +local trace_cursive=false registertracker("otf.cursive",function(v) trace_cursive=v end) +local trace_preparing=false registertracker("otf.preparing",function(v) trace_preparing=v end) +local trace_bugs=false registertracker("otf.bugs",function(v) trace_bugs=v end) +local trace_details=false registertracker("otf.details",function(v) trace_details=v end) +local trace_applied=false registertracker("otf.applied",function(v) trace_applied=v end) +local trace_steps=false registertracker("otf.steps",function(v) trace_steps=v end) +local trace_skips=false registertracker("otf.skips",function(v) trace_skips=v end) +local trace_directions=false registertracker("otf.directions",function(v) trace_directions=v end) +local report_direct=logs.reporter("fonts","otf direct") +local report_subchain=logs.reporter("fonts","otf subchain") +local report_chain=logs.reporter("fonts","otf chain") +local report_process=logs.reporter("fonts","otf process") +local report_prepare=logs.reporter("fonts","otf prepare") +local report_warning=logs.reporter("fonts","otf warning") +registertracker("otf.verbose_chain",function(v) otf.setcontextchain(v and "verbose") end) +registertracker("otf.normal_chain",function(v) otf.setcontextchain(v and "normal") end) +registertracker("otf.replacements","otf.singles,otf.multiples,otf.alternatives,otf.ligatures") +registertracker("otf.positions","otf.marks,otf.kerns,otf.cursive") +registertracker("otf.actions","otf.replacements,otf.positions") +registertracker("otf.injections","nodes.injections") +registertracker("*otf.sample","otf.steps,otf.actions,otf.analyzing") +local nuts=nodes.nuts +local tonode=nuts.tonode +local tonut=nuts.tonut +local getfield=nuts.getfield +local setfield=nuts.setfield +local getnext=nuts.getnext +local getprev=nuts.getprev +local getid=nuts.getid +local getattr=nuts.getattr +local setattr=nuts.setattr +local getprop=nuts.getprop +local setprop=nuts.setprop +local getfont=nuts.getfont +local getsubtype=nuts.getsubtype +local getchar=nuts.getchar +local insert_node_after=nuts.insert_after +local delete_node=nuts.delete +local copy_node=nuts.copy +local find_node_tail=nuts.tail +local flush_node_list=nuts.flush_list +local end_of_math=nuts.end_of_math +local setmetatableindex=table.setmetatableindex +local zwnj=0x200C +local zwj=0x200D +local wildcard="*" +local default="dflt" +local nodecodes=nodes.nodecodes +local whatcodes=nodes.whatcodes +local glyphcodes=nodes.glyphcodes +local disccodes=nodes.disccodes +local glyph_code=nodecodes.glyph +local glue_code=nodecodes.glue +local disc_code=nodecodes.disc +local whatsit_code=nodecodes.whatsit +local math_code=nodecodes.math +local dir_code=whatcodes.dir +local localpar_code=whatcodes.localpar +local discretionary_code=disccodes.discretionary +local ligature_code=glyphcodes.ligature +local privateattribute=attributes.private +local a_state=privateattribute('state') +local a_cursbase=privateattribute('cursbase') +local injections=nodes.injections +local setmark=injections.setmark +local setcursive=injections.setcursive +local setkern=injections.setkern +local setpair=injections.setpair +local resetinjection=injections.reset +local setligaindex=injections.setligaindex +local getligaindex=injections.getligaindex +local cursonce=true +local fonthashes=fonts.hashes +local fontdata=fonthashes.identifiers +local otffeatures=fonts.constructors.newfeatures("otf") +local registerotffeature=otffeatures.register +local onetimemessage=fonts.loggers.onetimemessage or function() end +otf.defaultnodealternate="none" +local tfmdata=false +local characters=false +local descriptions=false +local resources=false +local marks=false +local currentfont=false +local lookuptable=false +local anchorlookups=false +local lookuptypes=false +local lookuptags=false +local handlers={} +local rlmode=0 +local featurevalue=false +local checkstep=(nodes and nodes.tracers and nodes.tracers.steppers.check) or function() end +local registerstep=(nodes and nodes.tracers and nodes.tracers.steppers.register) or function() end +local registermessage=(nodes and nodes.tracers and nodes.tracers.steppers.message) or function() end +local function logprocess(...) + if trace_steps then + registermessage(...) + end + report_direct(...) +end +local function logwarning(...) + report_direct(...) +end +local f_unicode=formatters["%U"] +local f_uniname=formatters["%U (%s)"] +local f_unilist=formatters["% t (% t)"] +local function gref(n) + if type(n)=="number" then + local description=descriptions[n] + local name=description and description.name + if name then + return f_uniname(n,name) + else + return f_unicode(n) + end + elseif n then + local num,nam={},{} + for i=1,#n do + local ni=n[i] + if tonumber(ni) then + local di=descriptions[ni] + num[i]=f_unicode(ni) + nam[i]=di and di.name or "-" + end + end + return f_unilist(num,nam) + else + return "" + end +end +local function cref(kind,chainname,chainlookupname,lookupname,index) + if index then + return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookuptags[lookupname],index) + elseif lookupname then + return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookuptags[lookupname]) + elseif chainlookupname then + return formatters["feature %a, chain %a, sub %a"](kind,lookuptags[chainname],lookuptags[chainlookupname]) + elseif chainname then + return formatters["feature %a, chain %a"](kind,lookuptags[chainname]) + else + return formatters["feature %a"](kind) + end +end +local function pref(kind,lookupname) + return formatters["feature %a, lookup %a"](kind,lookuptags[lookupname]) +end +local function copy_glyph(g) + local components=getfield(g,"components") + if components then + setfield(g,"components",nil) + local n=copy_node(g) + setfield(g,"components",components) + return n + else + return copy_node(g) + end +end +local function markstoligature(kind,lookupname,head,start,stop,char) + if start==stop and getchar(start)==char then + return head,start + else + local prev=getprev(start) + local next=getnext(stop) + setfield(start,"prev",nil) + setfield(stop,"next",nil) + local base=copy_glyph(start) + if head==start then + head=base + end + resetinjection(base) + setfield(base,"char",char) + setfield(base,"subtype",ligature_code) + setfield(base,"components",start) + if prev then + setfield(prev,"next",base) + end + if next then + setfield(next,"prev",base) + end + setfield(base,"next",next) + setfield(base,"prev",prev) + return head,base + end +end +local function getcomponentindex(start) + if getid(start)~=glyph_code then + return 0 + elseif getsubtype(start)==ligature_code then + local i=0 + local components=getfield(start,"components") + while components do + i=i+getcomponentindex(components) + components=getnext(components) + end + return i + elseif not marks[getchar(start)] then + return 1 + else + return 0 + end +end +local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound) + if start==stop and getchar(start)==char then + resetinjection(start) + setfield(start,"char",char) + return head,start + end + local prev=getprev(start) + local next=getnext(stop) + setfield(start,"prev",nil) + setfield(stop,"next",nil) + local base=copy_glyph(start) + if start==head then + head=base + end + resetinjection(base) + setfield(base,"char",char) + setfield(base,"subtype",ligature_code) + setfield(base,"components",start) + if prev then + setfield(prev,"next",base) + end + if next then + setfield(next,"prev",base) + end + setfield(base,"next",next) + setfield(base,"prev",prev) + if not discfound then + local deletemarks=markflag~="mark" + local components=start + local baseindex=0 + local componentindex=0 + local head=base + local current=base + while start do + local char=getchar(start) + if not marks[char] then + baseindex=baseindex+componentindex + componentindex=getcomponentindex(start) + elseif not deletemarks then + setligaindex(start,baseindex+getligaindex(start,componentindex)) + if trace_marks then + logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),getligaindex(start)) + end + head,current=insert_node_after(head,current,copy_node(start)) + elseif trace_marks then + logwarning("%s: delete mark %s",pref(kind,lookupname),gref(char)) + end + start=getnext(start) + end + local start=getnext(current) + while start and getid(start)==glyph_code do + local char=getchar(start) + if marks[char] then + setligaindex(start,baseindex+getligaindex(start,componentindex)) + if trace_marks then + logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),getligaindex(start)) + end + else + break + end + start=getnext(start) + end + end + return head,base +end +function handlers.gsub_single(head,start,kind,lookupname,replacement) + if trace_singles then + logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(getchar(start)),gref(replacement)) + end + resetinjection(start) + setfield(start,"char",replacement) + return head,start,true +end +local function get_alternative_glyph(start,alternatives,value,trace_alternatives) + local n=#alternatives + if value=="random" then + local r=random(1,n) + return alternatives[r],trace_alternatives and formatters["value %a, taking %a"](value,r) + elseif value=="first" then + return alternatives[1],trace_alternatives and formatters["value %a, taking %a"](value,1) + elseif value=="last" then + return alternatives[n],trace_alternatives and formatters["value %a, taking %a"](value,n) + else + value=tonumber(value) + if type(value)~="number" then + return alternatives[1],trace_alternatives and formatters["invalid value %s, taking %a"](value,1) + elseif value>n then + local defaultalt=otf.defaultnodealternate + if defaultalt=="first" then + return alternatives[n],trace_alternatives and formatters["invalid value %s, taking %a"](value,1) + elseif defaultalt=="last" then + return alternatives[1],trace_alternatives and formatters["invalid value %s, taking %a"](value,n) + else + return false,trace_alternatives and formatters["invalid value %a, %s"](value,"out of range") + end + elseif value==0 then + return getchar(start),trace_alternatives and formatters["invalid value %a, %s"](value,"no change") + elseif value<1 then + return alternatives[1],trace_alternatives and formatters["invalid value %a, taking %a"](value,1) + else + return alternatives[value],trace_alternatives and formatters["value %a, taking %a"](value,value) + end + end +end +local function multiple_glyphs(head,start,multiple,ignoremarks) + local nofmultiples=#multiple + if nofmultiples>0 then + resetinjection(start) + setfield(start,"char",multiple[1]) + if nofmultiples>1 then + local sn=getnext(start) + for k=2,nofmultiples do + local n=copy_node(start) + resetinjection(n) + setfield(n,"char",multiple[k]) + setfield(n,"next",sn) + setfield(n,"prev",start) + if sn then + setfield(sn,"prev",n) + end + setfield(start,"next",n) + start=n + end + end + return head,start,true + else + if trace_multiples then + logprocess("no multiple for %s",gref(getchar(start))) + end + return head,start,false + end +end +function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence) + local value=featurevalue==true and tfmdata.shared.features[kind] or featurevalue + local choice,comment=get_alternative_glyph(start,alternative,value,trace_alternatives) + if choice then + if trace_alternatives then + logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(getchar(start)),choice,gref(choice),comment) + end + resetinjection(start) + setfield(start,"char",choice) + else + if trace_alternatives then + logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(getchar(start)),comment) + end + end + return head,start,true +end +function handlers.gsub_multiple(head,start,kind,lookupname,multiple,sequence) + if trace_multiples then + logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(getchar(start)),gref(multiple)) + end + return multiple_glyphs(head,start,multiple,sequence.flags[1]) +end +function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence) + local s,stop,discfound=getnext(start),nil,false + local startchar=getchar(start) + if marks[startchar] then + while s do + local id=getid(s) + if id==glyph_code and getfont(s)==currentfont and getsubtype(s)<256 then + local lg=ligature[getchar(s)] + if lg then + stop=s + ligature=lg + s=getnext(s) + else + break + end + else + break + end + end + if stop then + local lig=ligature.ligature + if lig then + if trace_ligatures then + local stopchar=getchar(stop) + head,start=markstoligature(kind,lookupname,head,start,stop,lig) + logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start))) + else + head,start=markstoligature(kind,lookupname,head,start,stop,lig) + end + return head,start,true + else + end + end + else + local skipmark=sequence.flags[1] + while s do + local id=getid(s) + if id==glyph_code and getsubtype(s)<256 then + if getfont(s)==currentfont then + local char=getchar(s) + if skipmark and marks[char] then + s=getnext(s) + else + local lg=ligature[char] + if lg then + stop=s + ligature=lg + s=getnext(s) + else + break + end + end + else + break + end + elseif id==disc_code then + discfound=true + s=getnext(s) + else + break + end + end + local lig=ligature.ligature + if lig then + if stop then + if trace_ligatures then + local stopchar=getchar(stop) + head,start=toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound) + logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start))) + else + head,start=toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound) + end + else + resetinjection(start) + setfield(start,"char",lig) + if trace_ligatures then + logprocess("%s: replacing %s by (no real) ligature %s case 3",pref(kind,lookupname),gref(startchar),gref(lig)) + end + end + return head,start,true + else + end + end + return head,start,false +end +function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence) + local markchar=getchar(start) + if marks[markchar] then + local base=getprev(start) + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + local basechar=getchar(base) + if marks[basechar] then + while true do + base=getprev(base) + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + basechar=getchar(base) + if not marks[basechar] then + break + end + else + if trace_bugs then + logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar)) + end + return head,start,false + end + end + end + local baseanchors=descriptions[basechar] + if baseanchors then + baseanchors=baseanchors.anchors + end + if baseanchors then + local baseanchors=baseanchors['basechar'] + if baseanchors then + local al=anchorlookups[lookupname] + for anchor,ba in next,baseanchors do + if al[anchor] then + local ma=markanchors[anchor] + if ma then + local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) + if trace_marks then + logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)", + pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) + end + return head,start,true + end + end + end + if trace_bugs then + logwarning("%s, no matching anchors for mark %s and base %s",pref(kind,lookupname),gref(markchar),gref(basechar)) + end + end + elseif trace_bugs then + onetimemessage(currentfont,basechar,"no base anchors",report_fonts) + end + elseif trace_bugs then + logwarning("%s: prev node is no char",pref(kind,lookupname)) + end + elseif trace_bugs then + logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar)) + end + return head,start,false +end +function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence) + local markchar=getchar(start) + if marks[markchar] then + local base=getprev(start) + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + local basechar=getchar(base) + if marks[basechar] then + while true do + base=getprev(base) + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + basechar=getchar(base) + if not marks[basechar] then + break + end + else + if trace_bugs then + logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar)) + end + return head,start,false + end + end + end + local index=getligaindex(start) + local baseanchors=descriptions[basechar] + if baseanchors then + baseanchors=baseanchors.anchors + if baseanchors then + local baseanchors=baseanchors['baselig'] + if baseanchors then + local al=anchorlookups[lookupname] + for anchor,ba in next,baseanchors do + if al[anchor] then + local ma=markanchors[anchor] + if ma then + ba=ba[index] + if ba then + local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) + if trace_marks then + logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)", + pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy) + end + return head,start,true + else + if trace_bugs then + logwarning("%s: no matching anchors for mark %s and baselig %s with index %a",pref(kind,lookupname),gref(markchar),gref(basechar),index) + end + end + end + end + end + if trace_bugs then + logwarning("%s: no matching anchors for mark %s and baselig %s",pref(kind,lookupname),gref(markchar),gref(basechar)) + end + end + end + elseif trace_bugs then + onetimemessage(currentfont,basechar,"no base anchors",report_fonts) + end + elseif trace_bugs then + logwarning("%s: prev node is no char",pref(kind,lookupname)) + end + elseif trace_bugs then + logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar)) + end + return head,start,false +end +function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence) + local markchar=getchar(start) + if marks[markchar] then + local base=getprev(start) + local slc=getligaindex(start) + if slc then + while base do + local blc=getligaindex(base) + if blc and blc~=slc then + base=getprev(base) + else + break + end + end + end + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + local basechar=getchar(base) + local baseanchors=descriptions[basechar] + if baseanchors then + baseanchors=baseanchors.anchors + if baseanchors then + baseanchors=baseanchors['basemark'] + if baseanchors then + local al=anchorlookups[lookupname] + for anchor,ba in next,baseanchors do + if al[anchor] then + local ma=markanchors[anchor] + if ma then + local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) + if trace_marks then + logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)", + pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) + end + return head,start,true + end + end + end + if trace_bugs then + logwarning("%s: no matching anchors for mark %s and basemark %s",pref(kind,lookupname),gref(markchar),gref(basechar)) + end + end + end + elseif trace_bugs then + onetimemessage(currentfont,basechar,"no base anchors",report_fonts) + end + elseif trace_bugs then + logwarning("%s: prev node is no mark",pref(kind,lookupname)) + end + elseif trace_bugs then + logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar)) + end + return head,start,false +end +function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence) + local alreadydone=cursonce and getprop(start,a_cursbase) + if not alreadydone then + local done=false + local startchar=getchar(start) + if marks[startchar] then + if trace_cursive then + logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar)) + end + else + local nxt=getnext(start) + while not done and nxt and getid(nxt)==glyph_code and getfont(nxt)==currentfont and getsubtype(nxt)<256 do + local nextchar=getchar(nxt) + if marks[nextchar] then + nxt=getnext(nxt) + else + local entryanchors=descriptions[nextchar] + if entryanchors then + entryanchors=entryanchors.anchors + if entryanchors then + entryanchors=entryanchors['centry'] + if entryanchors then + local al=anchorlookups[lookupname] + for anchor,entry in next,entryanchors do + if al[anchor] then + local exit=exitanchors[anchor] + if exit then + local dx,dy,bound=setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar]) + if trace_cursive then + logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode) + end + done=true + break + end + end + end + end + end + elseif trace_bugs then + onetimemessage(currentfont,startchar,"no entry anchors",report_fonts) + end + break + end + end + end + return head,start,done + else + if trace_cursive and trace_details then + logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone) + end + return head,start,false + end +end +function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence) + local startchar=getchar(start) + local dx,dy,w,h=setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar]) + if trace_kerns then + logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h) + end + return head,start,false +end +function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence) + local snext=getnext(start) + if not snext then + return head,start,false + else + local prev,done=start,false + local factor=tfmdata.parameters.factor + local lookuptype=lookuptypes[lookupname] + while snext and getid(snext)==glyph_code and getfont(snext)==currentfont and getsubtype(snext)<256 do + local nextchar=getchar(snext) + local krn=kerns[nextchar] + if not krn and marks[nextchar] then + prev=snext + snext=getnext(snext) + else + if not krn then + elseif type(krn)=="table" then + if lookuptype=="pair" then + local a,b=krn[2],krn[3] + if a and #a>0 then + local startchar=getchar(start) + local x,y,w,h=setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar]) + if trace_kerns then + logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h) + end + end + if b and #b>0 then + local startchar=getchar(start) + local x,y,w,h=setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar]) + if trace_kerns then + logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h) + end + end + else + report_process("%s: check this out (old kern stuff)",pref(kind,lookupname)) + end + done=true + elseif krn~=0 then + local k=setkern(snext,factor,rlmode,krn) + if trace_kerns then + logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(getchar(prev)),gref(nextchar)) + end + done=true + end + break + end + end + return head,start,done + end +end +local chainmores={} +local chainprocs={} +local function logprocess(...) + if trace_steps then + registermessage(...) + end + report_subchain(...) +end +local logwarning=report_subchain +local function logprocess(...) + if trace_steps then + registermessage(...) + end + report_chain(...) +end +local logwarning=report_chain +function chainprocs.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname) + logwarning("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname)) + return head,start,false +end +function chainmores.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname,n) + logprocess("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname)) + return head,start,false +end +function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements) + local char=getchar(start) + local replacement=replacements[char] + if replacement then + if trace_singles then + logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement)) + end + resetinjection(start) + setfield(start,"char",replacement) + return head,start,true + else + return head,start,false + end +end +function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex) + local current=start + local subtables=currentlookup.subtables + if #subtables>1 then + logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," ")) + end + while current do + if getid(current)==glyph_code then + local currentchar=getchar(current) + local lookupname=subtables[1] + local replacement=lookuphash[lookupname] + if not replacement then + if trace_bugs then + logwarning("%s: no single hits",cref(kind,chainname,chainlookupname,lookupname,chainindex)) + end + else + replacement=replacement[currentchar] + if not replacement or replacement=="" then + if trace_bugs then + logwarning("%s: no single for %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar)) + end + else + if trace_singles then + logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement)) + end + resetinjection(current) + setfield(current,"char",replacement) + end + end + return head,start,true + elseif current==stop then + break + else + current=getnext(current) + end + end + return head,start,false +end +chainmores.gsub_single=chainprocs.gsub_single +function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) + local startchar=getchar(start) + local subtables=currentlookup.subtables + local lookupname=subtables[1] + local replacements=lookuphash[lookupname] + if not replacements then + if trace_bugs then + logwarning("%s: no multiple hits",cref(kind,chainname,chainlookupname,lookupname)) + end + else + replacements=replacements[startchar] + if not replacements or replacement=="" then + if trace_bugs then + logwarning("%s: no multiple for %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar)) + end + else + if trace_multiples then + logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements)) + end + return multiple_glyphs(head,start,replacements,currentlookup.flags[1]) + end + end + return head,start,false +end +chainmores.gsub_multiple=chainprocs.gsub_multiple +function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) + local current=start + local subtables=currentlookup.subtables + local value=featurevalue==true and tfmdata.shared.features[kind] or featurevalue + while current do + if getid(current)==glyph_code then + local currentchar=getchar(current) + local lookupname=subtables[1] + local alternatives=lookuphash[lookupname] + if not alternatives then + if trace_bugs then + logwarning("%s: no alternative hit",cref(kind,chainname,chainlookupname,lookupname)) + end + else + alternatives=alternatives[currentchar] + if alternatives then + local choice,comment=get_alternative_glyph(current,alternatives,value,trace_alternatives) + if choice then + if trace_alternatives then + logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment) + end + resetinjection(start) + setfield(start,"char",choice) + else + if trace_alternatives then + logwarning("%s: no variant %a for %s, %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(char),comment) + end + end + elseif trace_bugs then + logwarning("%s: no alternative for %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(currentchar),comment) + end + end + return head,start,true + elseif current==stop then + break + else + current=getnext(current) + end + end + return head,start,false +end +chainmores.gsub_alternate=chainprocs.gsub_alternate +function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex) + local startchar=getchar(start) + local subtables=currentlookup.subtables + local lookupname=subtables[1] + local ligatures=lookuphash[lookupname] + if not ligatures then + if trace_bugs then + logwarning("%s: no ligature hits",cref(kind,chainname,chainlookupname,lookupname,chainindex)) + end + else + ligatures=ligatures[startchar] + if not ligatures then + if trace_bugs then + logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar)) + end + else + local s=getnext(start) + local discfound=false + local last=stop + local nofreplacements=0 + local skipmark=currentlookup.flags[1] + while s do + local id=getid(s) + if id==disc_code then + s=getnext(s) + discfound=true + else + local schar=getchar(s) + if skipmark and marks[schar] then + s=getnext(s) + else + local lg=ligatures[schar] + if lg then + ligatures,last,nofreplacements=lg,s,nofreplacements+1 + if s==stop then + break + else + s=getnext(s) + end + else + break + end + end + end + end + local l2=ligatures.ligature + if l2 then + if chainindex then + stop=last + end + if trace_ligatures then + if start==stop then + logprocess("%s: replacing character %s by ligature %s case 3",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2)) + else + logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop)),gref(l2)) + end + end + head,start=toligature(kind,lookupname,head,start,stop,l2,currentlookup.flags[1],discfound) + return head,start,true,nofreplacements + elseif trace_bugs then + if start==stop then + logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar)) + else + logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop))) + end + end + end + end + return head,start,false,0 +end +chainmores.gsub_ligature=chainprocs.gsub_ligature +function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) + local markchar=getchar(start) + if marks[markchar] then + local subtables=currentlookup.subtables + local lookupname=subtables[1] + local markanchors=lookuphash[lookupname] + if markanchors then + markanchors=markanchors[markchar] + end + if markanchors then + local base=getprev(start) + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + local basechar=getchar(base) + if marks[basechar] then + while true do + base=getprev(base) + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + basechar=getchar(base) + if not marks[basechar] then + break + end + else + if trace_bugs then + logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar)) + end + return head,start,false + end + end + end + local baseanchors=descriptions[basechar].anchors + if baseanchors then + local baseanchors=baseanchors['basechar'] + if baseanchors then + local al=anchorlookups[lookupname] + for anchor,ba in next,baseanchors do + if al[anchor] then + local ma=markanchors[anchor] + if ma then + local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) + if trace_marks then + logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)", + cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) + end + return head,start,true + end + end + end + if trace_bugs then + logwarning("%s, no matching anchors for mark %s and base %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar)) + end + end + end + elseif trace_bugs then + logwarning("%s: prev node is no char",cref(kind,chainname,chainlookupname,lookupname)) + end + elseif trace_bugs then + logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar)) + end + elseif trace_bugs then + logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar)) + end + return head,start,false +end +function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) + local markchar=getchar(start) + if marks[markchar] then + local subtables=currentlookup.subtables + local lookupname=subtables[1] + local markanchors=lookuphash[lookupname] + if markanchors then + markanchors=markanchors[markchar] + end + if markanchors then + local base=getprev(start) + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + local basechar=getchar(base) + if marks[basechar] then + while true do + base=getprev(base) + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + basechar=getchar(base) + if not marks[basechar] then + break + end + else + if trace_bugs then + logwarning("%s: no base for mark %s",cref(kind,chainname,chainlookupname,lookupname),markchar) + end + return head,start,false + end + end + end + local index=getligaindex(start) + local baseanchors=descriptions[basechar].anchors + if baseanchors then + local baseanchors=baseanchors['baselig'] + if baseanchors then + local al=anchorlookups[lookupname] + for anchor,ba in next,baseanchors do + if al[anchor] then + local ma=markanchors[anchor] + if ma then + ba=ba[index] + if ba then + local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) + if trace_marks then + logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)", + cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy) + end + return head,start,true + end + end + end + end + if trace_bugs then + logwarning("%s: no matching anchors for mark %s and baselig %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar)) + end + end + end + elseif trace_bugs then + logwarning("feature %s, lookup %s: prev node is no char",kind,lookupname) + end + elseif trace_bugs then + logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar)) + end + elseif trace_bugs then + logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar)) + end + return head,start,false +end +function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) + local markchar=getchar(start) + if marks[markchar] then + local subtables=currentlookup.subtables + local lookupname=subtables[1] + local markanchors=lookuphash[lookupname] + if markanchors then + markanchors=markanchors[markchar] + end + if markanchors then + local base=getprev(start) + local slc=getligaindex(start) + if slc then + while base do + local blc=getligaindex(base) + if blc and blc~=slc then + base=getprev(base) + else + break + end + end + end + if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then + local basechar=getchar(base) + local baseanchors=descriptions[basechar].anchors + if baseanchors then + baseanchors=baseanchors['basemark'] + if baseanchors then + local al=anchorlookups[lookupname] + for anchor,ba in next,baseanchors do + if al[anchor] then + local ma=markanchors[anchor] + if ma then + local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) + if trace_marks then + logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)", + cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) + end + return head,start,true + end + end + end + if trace_bugs then + logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar)) + end + end + end + elseif trace_bugs then + logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname)) + end + elseif trace_bugs then + logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar)) + end + elseif trace_bugs then + logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar)) + end + return head,start,false +end +function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname) + local alreadydone=cursonce and getprop(start,a_cursbase) + if not alreadydone then + local startchar=getchar(start) + local subtables=currentlookup.subtables + local lookupname=subtables[1] + local exitanchors=lookuphash[lookupname] + if exitanchors then + exitanchors=exitanchors[startchar] + end + if exitanchors then + local done=false + if marks[startchar] then + if trace_cursive then + logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar)) + end + else + local nxt=getnext(start) + while not done and nxt and getid(nxt)==glyph_code and getfont(nxt)==currentfont and getsubtype(nxt)<256 do + local nextchar=getchar(nxt) + if marks[nextchar] then + nxt=getnext(nxt) + else + local entryanchors=descriptions[nextchar] + if entryanchors then + entryanchors=entryanchors.anchors + if entryanchors then + entryanchors=entryanchors['centry'] + if entryanchors then + local al=anchorlookups[lookupname] + for anchor,entry in next,entryanchors do + if al[anchor] then + local exit=exitanchors[anchor] + if exit then + local dx,dy,bound=setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar]) + if trace_cursive then + logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode) + end + done=true + break + end + end + end + end + end + elseif trace_bugs then + onetimemessage(currentfont,startchar,"no entry anchors",report_fonts) + end + break + end + end + end + return head,start,done + else + if trace_cursive and trace_details then + logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone) + end + return head,start,false + end + end + return head,start,false +end +function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence) + local startchar=getchar(start) + local subtables=currentlookup.subtables + local lookupname=subtables[1] + local kerns=lookuphash[lookupname] + if kerns then + kerns=kerns[startchar] + if kerns then + local dx,dy,w,h=setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar]) + if trace_kerns then + logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h) + end + end + end + return head,start,false +end +chainmores.gpos_single=chainprocs.gpos_single +function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence) + local snext=getnext(start) + if snext then + local startchar=getchar(start) + local subtables=currentlookup.subtables + local lookupname=subtables[1] + local kerns=lookuphash[lookupname] + if kerns then + kerns=kerns[startchar] + if kerns then + local lookuptype=lookuptypes[lookupname] + local prev,done=start,false + local factor=tfmdata.parameters.factor + while snext and getid(snext)==glyph_code and getfont(snext)==currentfont and getsubtype(snext)<256 do + local nextchar=getchar(snext) + local krn=kerns[nextchar] + if not krn and marks[nextchar] then + prev=snext + snext=getnext(snext) + else + if not krn then + elseif type(krn)=="table" then + if lookuptype=="pair" then + local a,b=krn[2],krn[3] + if a and #a>0 then + local startchar=getchar(start) + local x,y,w,h=setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar]) + if trace_kerns then + logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h) + end + end + if b and #b>0 then + local startchar=getchar(start) + local x,y,w,h=setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar]) + if trace_kerns then + logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h) + end + end + else + report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname)) + local a,b=krn[2],krn[6] + if a and a~=0 then + local k=setkern(snext,factor,rlmode,a) + if trace_kerns then + logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar)) + end + end + if b and b~=0 then + logwarning("%s: ignoring second kern xoff %s",cref(kind,chainname,chainlookupname),b*factor) + end + end + done=true + elseif krn~=0 then + local k=setkern(snext,factor,rlmode,krn) + if trace_kerns then + logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar)) + end + done=true + end + break + end + end + return head,start,done + end + end + end + return head,start,false +end +chainmores.gpos_pair=chainprocs.gpos_pair +local function show_skip(kind,chainname,char,ck,class) + if ck[9] then + logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a, %a => %a",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10]) + else + logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a",cref(kind,chainname),gref(char),class,ck[1],ck[2]) + end +end +local quit_on_no_replacement=true +directives.register("otf.chain.quitonnoreplacement",function(value) + quit_on_no_replacement=value +end) +local function normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash) + local flags=sequence.flags + local done=false + local skipmark=flags[1] + local skipligature=flags[2] + local skipbase=flags[3] + local someskip=skipmark or skipligature or skipbase + local markclass=sequence.markclass + local skipped=false + for k=1,#contexts do + local match=true + local current=start + local last=start + local ck=contexts[k] + local seq=ck[3] + local s=#seq + if s==1 then + match=getid(current)==glyph_code and getfont(current)==currentfont and getsubtype(current)<256 and seq[1][getchar(current)] + else + local f,l=ck[4],ck[5] + if f==1 and f==l then + else + if f==l then + else + local n=f+1 + last=getnext(last) + while n<=l do + if last then + local id=getid(last) + if id==glyph_code then + if getfont(last)==currentfont and getsubtype(last)<256 then + local char=getchar(last) + local ccd=descriptions[char] + if ccd then + local class=ccd.class + if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then + skipped=true + if trace_skips then + show_skip(kind,chainname,char,ck,class) + end + last=getnext(last) + elseif seq[n][char] then + if n1 then + local prev=getprev(start) + if prev then + local n=f-1 + while n>=1 do + if prev then + local id=getid(prev) + if id==glyph_code then + if getfont(prev)==currentfont and getsubtype(prev)<256 then + local char=getchar(prev) + local ccd=descriptions[char] + if ccd then + local class=ccd.class + if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then + skipped=true + if trace_skips then + show_skip(kind,chainname,char,ck,class) + end + elseif seq[n][char] then + n=n -1 + else + match=false + break + end + else + match=false + break + end + else + match=false + break + end + elseif id==disc_code then + elseif seq[n][32] then + n=n -1 + else + match=false + break + end + prev=getprev(prev) + elseif seq[n][32] then + n=n -1 + else + match=false + break + end + end + elseif f==2 then + match=seq[1][32] + else + for n=f-1,1 do + if not seq[n][32] then + match=false + break + end + end + end + end + if match and s>l then + local current=last and getnext(last) + if current then + local n=l+1 + while n<=s do + if current then + local id=getid(current) + if id==glyph_code then + if getfont(current)==currentfont and getsubtype(current)<256 then + local char=getchar(current) + local ccd=descriptions[char] + if ccd then + local class=ccd.class + if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then + skipped=true + if trace_skips then + show_skip(kind,chainname,char,ck,class) + end + elseif seq[n][char] then + n=n+1 + else + match=false + break + end + else + match=false + break + end + else + match=false + break + end + elseif id==disc_code then + elseif seq[n][32] then + n=n+1 + else + match=false + break + end + current=getnext(current) + elseif seq[n][32] then + n=n+1 + else + match=false + break + end + end + elseif s-l==1 then + match=seq[s][32] + else + for n=l+1,s do + if not seq[n][32] then + match=false + break + end + end + end + end + end + if match then + if trace_contexts then + local rule,lookuptype,f,l=ck[1],ck[2],ck[4],ck[5] + local char=getchar(start) + if ck[9] then + logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a, %a => %a", + cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10]) + else + logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a", + cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype) + end + end + local chainlookups=ck[6] + if chainlookups then + local nofchainlookups=#chainlookups + if nofchainlookups==1 then + local chainlookupname=chainlookups[1] + local chainlookup=lookuptable[chainlookupname] + if chainlookup then + local cp=chainprocs[chainlookup.type] + if cp then + local ok + head,start,ok=cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence) + if ok then + done=true + end + else + logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type) + end + else + logprocess("%s is not yet supported",cref(kind,chainname,chainlookupname)) + end + else + local i=1 + while true do + if skipped then + while true do + local char=getchar(start) + local ccd=descriptions[char] + if ccd then + local class=ccd.class + if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then + start=getnext(start) + else + break + end + else + break + end + end + end + local chainlookupname=chainlookups[i] + local chainlookup=lookuptable[chainlookupname] + if not chainlookup then + i=i+1 + else + local cp=chainmores[chainlookup.type] + if not cp then + logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type) + i=i+1 + else + local ok,n + head,start,ok,n=cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence) + if ok then + done=true + i=i+(n or 1) + else + i=i+1 + end + end + end + if i>nofchainlookups then + break + elseif start then + start=getnext(start) + else + end + end + end + else + local replacements=ck[7] + if replacements then + head,start,done=chainprocs.reversesub(head,start,last,kind,chainname,ck,lookuphash,replacements) + else + done=quit_on_no_replacement + if trace_contexts then + logprocess("%s: skipping match",cref(kind,chainname)) + end + end + end + end + end + return head,start,done +end +local verbose_handle_contextchain=function(font,...) + logwarning("no verbose handler installed, reverting to 'normal'") + otf.setcontextchain() + return normal_handle_contextchain(...) +end +otf.chainhandlers={ + normal=normal_handle_contextchain, + verbose=verbose_handle_contextchain, +} +function otf.setcontextchain(method) + if not method or method=="normal" or not otf.chainhandlers[method] then + if handlers.contextchain then + logwarning("installing normal contextchain handler") + end + handlers.contextchain=normal_handle_contextchain + else + logwarning("installing contextchain handler %a",method) + local handler=otf.chainhandlers[method] + handlers.contextchain=function(...) + return handler(currentfont,...) + end + end + handlers.gsub_context=handlers.contextchain + handlers.gsub_contextchain=handlers.contextchain + handlers.gsub_reversecontextchain=handlers.contextchain + handlers.gpos_contextchain=handlers.contextchain + handlers.gpos_context=handlers.contextchain +end +otf.setcontextchain() +local missing={} +local function logprocess(...) + if trace_steps then + registermessage(...) + end + report_process(...) +end +local logwarning=report_process +local function report_missing_cache(typ,lookup) + local f=missing[currentfont] if not f then f={} missing[currentfont]=f end + local t=f[typ] if not t then t={} f[typ]=t end + if not t[lookup] then + t[lookup]=true + logwarning("missing cache for lookup %a, type %a, font %a, name %a",lookup,typ,currentfont,tfmdata.properties.fullname) + end +end +local resolved={} +local lookuphashes={} +setmetatableindex(lookuphashes,function(t,font) + local lookuphash=fontdata[font].resources.lookuphash + if not lookuphash or not next(lookuphash) then + lookuphash=false + end + t[font]=lookuphash + return lookuphash +end) +local autofeatures=fonts.analyzers.features +local function initialize(sequence,script,language,enabled) + local features=sequence.features + if features then + local order=sequence.order + if order then + for i=1,#order do + local kind=order[i] + local valid=enabled[kind] + if valid then + local scripts=features[kind] + local languages=scripts[script] or scripts[wildcard] + if languages and (languages[language] or languages[wildcard]) then + return { valid,autofeatures[kind] or false,sequence.chain or 0,kind,sequence } + end + end + end + else + end + end + return false +end +function otf.dataset(tfmdata,font) + local shared=tfmdata.shared + local properties=tfmdata.properties + local language=properties.language or "dflt" + local script=properties.script or "dflt" + local enabled=shared.features + local res=resolved[font] + if not res then + res={} + resolved[font]=res + end + local rs=res[script] + if not rs then + rs={} + res[script]=rs + end + local rl=rs[language] + if not rl then + rl={ + } + rs[language]=rl + local sequences=tfmdata.resources.sequences + for s=1,#sequences do + local v=enabled and initialize(sequences[s],script,language,enabled) + if v then + rl[#rl+1]=v + end + end + end + return rl +end +local function featuresprocessor(head,font,attr) + local lookuphash=lookuphashes[font] + if not lookuphash then + return head,false + end + head=tonut(head) + if trace_steps then + checkstep(head) + end + tfmdata=fontdata[font] + descriptions=tfmdata.descriptions + characters=tfmdata.characters + resources=tfmdata.resources + marks=resources.marks + anchorlookups=resources.lookup_to_anchor + lookuptable=resources.lookups + lookuptypes=resources.lookuptypes + lookuptags=resources.lookuptags + currentfont=font + rlmode=0 + local sequences=resources.sequences + local done=false + local datasets=otf.dataset(tfmdata,font,attr) + local dirstack={} + for s=1,#datasets do + local dataset=datasets[s] + featurevalue=dataset[1] + local sequence=dataset[5] + local rlparmode=0 + local topstack=0 + local success=false + local attribute=dataset[2] + local chain=dataset[3] + local typ=sequence.type + local subtables=sequence.subtables + if chain<0 then + local handler=handlers[typ] + local start=find_node_tail(head) + while start do + local id=getid(start) + if id==glyph_code then + if getfont(start)==font and getsubtype(start)<256 then + local a=getattr(start,0) + if a then + a=a==attr + else + a=true + end + if a then + for i=1,#subtables do + local lookupname=subtables[i] + local lookupcache=lookuphash[lookupname] + if lookupcache then + local lookupmatch=lookupcache[getchar(start)] + if lookupmatch then + head,start,success=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) + if success then + break + end + end + else + report_missing_cache(typ,lookupname) + end + end + if start then start=getprev(start) end + else + start=getprev(start) + end + else + start=getprev(start) + end + else + start=getprev(start) + end + end + else + local handler=handlers[typ] + local ns=#subtables + local start=head + rlmode=0 + if ns==1 then + local lookupname=subtables[1] + local lookupcache=lookuphash[lookupname] + if not lookupcache then + report_missing_cache(typ,lookupname) + else + local function subrun(start) + local head=start + local done=false + while start do + local id=getid(start) + if id==glyph_code and getfont(start)==font and getsubtype(start)<256 then + local a=getattr(start,0) + if a then + a=(a==attr) and (not attribute or getprop(start,a_state)==attribute) + else + a=not attribute or getprop(start,a_state)==attribute + end + if a then + local lookupmatch=lookupcache[getchar(start)] + if lookupmatch then + local ok + head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) + if ok then + done=true + end + end + if start then start=getnext(start) end + else + start=getnext(start) + end + else + start=getnext(start) + end + end + if done then + success=true + return head + end + end + local function kerndisc(disc) + local prev=getprev(disc) + local next=getnext(disc) + if prev and next then + setfield(prev,"next",next) + local a=getattr(prev,0) + if a then + a=(a==attr) and (not attribute or getprop(prev,a_state)==attribute) + else + a=not attribute or getprop(prev,a_state)==attribute + end + if a then + local lookupmatch=lookupcache[getchar(prev)] + if lookupmatch then + local h,d,ok=handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) + if ok then + done=true + success=true + end + end + end + setfield(prev,"next",disc) + end + return next + end + while start do + local id=getid(start) + if id==glyph_code then + if getfont(start)==font and getsubtype(start)<256 then + local a=getattr(start,0) + if a then + a=(a==attr) and (not attribute or getprop(start,a_state)==attribute) + else + a=not attribute or getprop(start,a_state)==attribute + end + if a then + local lookupmatch=lookupcache[getchar(start)] + if lookupmatch then + local ok + head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1) + if ok then + success=true + end + end + if start then start=getnext(start) end + else + start=getnext(start) + end + else + start=getnext(start) + end + elseif id==disc_code then + if getsubtype(start)==discretionary_code then + local pre=getfield(start,"pre") + if pre then + local new=subrun(pre) + if new then setfield(start,"pre",new) end + end + local post=getfield(start,"post") + if post then + local new=subrun(post) + if new then setfield(start,"post",new) end + end + local replace=getfield(start,"replace") + if replace then + local new=subrun(replace) + if new then setfield(start,"replace",new) end + end +elseif typ=="gpos_single" or typ=="gpos_pair" then + kerndisc(start) + end + start=getnext(start) + elseif id==whatsit_code then + local subtype=getsubtype(start) + if subtype==dir_code then + local dir=getfield(start,"dir") + if dir=="+TRT" or dir=="+TLT" then + topstack=topstack+1 + dirstack[topstack]=dir + elseif dir=="-TRT" or dir=="-TLT" then + topstack=topstack-1 + end + local newdir=dirstack[topstack] + if newdir=="+TRT" then + rlmode=-1 + elseif newdir=="+TLT" then + rlmode=1 + else + rlmode=rlparmode + end + if trace_directions then + report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir) + end + elseif subtype==localpar_code then + local dir=getfield(start,"dir") + if dir=="TRT" then + rlparmode=-1 + elseif dir=="TLT" then + rlparmode=1 + else + rlparmode=0 + end + rlmode=rlparmode + if trace_directions then + report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode) + end + end + start=getnext(start) + elseif id==math_code then + start=getnext(end_of_math(start)) + else + start=getnext(start) + end + end + end + else + local function subrun(start) + local head=start + local done=false + while start do + local id=getid(start) + if id==glyph_code and getfont(start)==font and getsubtype(start)<256 then + local a=getattr(start,0) + if a then + a=(a==attr) and (not attribute or getprop(start,a_state)==attribute) + else + a=not attribute or getprop(start,a_state)==attribute + end + if a then + for i=1,ns do + local lookupname=subtables[i] + local lookupcache=lookuphash[lookupname] + if lookupcache then + local lookupmatch=lookupcache[getchar(start)] + if lookupmatch then + local ok + head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) + if ok then + done=true + break + elseif not start then + break + end + end + else + report_missing_cache(typ,lookupname) + end + end + if start then start=getnext(start) end + else + start=getnext(start) + end + else + start=getnext(start) + end + end + if done then + success=true + return head + end + end + local function kerndisc(disc) + local prev=getprev(disc) + local next=getnext(disc) + if prev and next then + setfield(prev,"next",next) + local a=getattr(prev,0) + if a then + a=(a==attr) and (not attribute or getprop(prev,a_state)==attribute) + else + a=not attribute or getprop(prev,a_state)==attribute + end + if a then + for i=1,ns do + local lookupname=subtables[i] + local lookupcache=lookuphash[lookupname] + if lookupcache then + local lookupmatch=lookupcache[getchar(prev)] + if lookupmatch then + local h,d,ok=handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) + if ok then + done=true + break + end + end + else + report_missing_cache(typ,lookupname) + end + end + end + setfield(prev,"next",disc) + end + return next + end + while start do + local id=getid(start) + if id==glyph_code then + if getfont(start)==font and getsubtype(start)<256 then + local a=getattr(start,0) + if a then + a=(a==attr) and (not attribute or getprop(start,a_state)==attribute) + else + a=not attribute or getprop(start,a_state)==attribute + end + if a then + for i=1,ns do + local lookupname=subtables[i] + local lookupcache=lookuphash[lookupname] + if lookupcache then + local lookupmatch=lookupcache[getchar(start)] + if lookupmatch then + local ok + head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i) + if ok then + success=true + break + elseif not start then + break + end + end + else + report_missing_cache(typ,lookupname) + end + end + if start then start=getnext(start) end + else + start=getnext(start) + end + else + start=getnext(start) + end + elseif id==disc_code then + if getsubtype(start)==discretionary_code then + local pre=getfield(start,"pre") + if pre then + local new=subrun(pre) + if new then setfield(start,"pre",new) end + end + local post=getfield(start,"post") + if post then + local new=subrun(post) + if new then setfield(start,"post",new) end + end + local replace=getfield(start,"replace") + if replace then + local new=subrun(replace) + if new then setfield(start,"replace",new) end + end +elseif typ=="gpos_single" or typ=="gpos_pair" then + kerndisc(start) + end + start=getnext(start) + elseif id==whatsit_code then + local subtype=getsubtype(start) + if subtype==dir_code then + local dir=getfield(start,"dir") + if dir=="+TRT" or dir=="+TLT" then + topstack=topstack+1 + dirstack[topstack]=dir + elseif dir=="-TRT" or dir=="-TLT" then + topstack=topstack-1 + end + local newdir=dirstack[topstack] + if newdir=="+TRT" then + rlmode=-1 + elseif newdir=="+TLT" then + rlmode=1 + else + rlmode=rlparmode + end + if trace_directions then + report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir) + end + elseif subtype==localpar_code then + local dir=getfield(start,"dir") + if dir=="TRT" then + rlparmode=-1 + elseif dir=="TLT" then + rlparmode=1 + else + rlparmode=0 + end + rlmode=rlparmode + if trace_directions then + report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode) + end + end + start=getnext(start) + elseif id==math_code then + start=getnext(end_of_math(start)) + else + start=getnext(start) + end + end + end + end + if success then + done=true + end + if trace_steps then + registerstep(head) + end + end + head=tonode(head) + return head,done +end +local function generic(lookupdata,lookupname,unicode,lookuphash) + local target=lookuphash[lookupname] + if target then + target[unicode]=lookupdata + else + lookuphash[lookupname]={ [unicode]=lookupdata } + end +end +local action={ + substitution=generic, + multiple=generic, + alternate=generic, + position=generic, + ligature=function(lookupdata,lookupname,unicode,lookuphash) + local target=lookuphash[lookupname] + if not target then + target={} + lookuphash[lookupname]=target + end + for i=1,#lookupdata do + local li=lookupdata[i] + local tu=target[li] + if not tu then + tu={} + target[li]=tu + end + target=tu + end + target.ligature=unicode + end, + pair=function(lookupdata,lookupname,unicode,lookuphash) + local target=lookuphash[lookupname] + if not target then + target={} + lookuphash[lookupname]=target + end + local others=target[unicode] + local paired=lookupdata[1] + if others then + others[paired]=lookupdata + else + others={ [paired]=lookupdata } + target[unicode]=others + end + end, +} +local function prepare_lookups(tfmdata) + local rawdata=tfmdata.shared.rawdata + local resources=rawdata.resources + local lookuphash=resources.lookuphash + local anchor_to_lookup=resources.anchor_to_lookup + local lookup_to_anchor=resources.lookup_to_anchor + local lookuptypes=resources.lookuptypes + local characters=tfmdata.characters + local descriptions=tfmdata.descriptions + for unicode,character in next,characters do + local description=descriptions[unicode] + if description then + local lookups=description.slookups + if lookups then + for lookupname,lookupdata in next,lookups do + action[lookuptypes[lookupname]](lookupdata,lookupname,unicode,lookuphash) + end + end + local lookups=description.mlookups + if lookups then + for lookupname,lookuplist in next,lookups do + local lookuptype=lookuptypes[lookupname] + for l=1,#lookuplist do + local lookupdata=lookuplist[l] + action[lookuptype](lookupdata,lookupname,unicode,lookuphash) + end + end + end + local list=description.kerns + if list then + for lookup,krn in next,list do + local target=lookuphash[lookup] + if target then + target[unicode]=krn + else + lookuphash[lookup]={ [unicode]=krn } + end + end + end + local list=description.anchors + if list then + for typ,anchors in next,list do + if typ=="mark" or typ=="cexit" then + for name,anchor in next,anchors do + local lookups=anchor_to_lookup[name] + if lookups then + for lookup,_ in next,lookups do + local target=lookuphash[lookup] + if target then + target[unicode]=anchors + else + lookuphash[lookup]={ [unicode]=anchors } + end + end + end + end + end + end + end + end + end +end +local function split(replacement,original) + local result={} + for i=1,#replacement do + result[original[i]]=replacement[i] + end + return result +end +local valid={ + coverage={ chainsub=true,chainpos=true,contextsub=true }, + reversecoverage={ reversesub=true }, + glyphs={ chainsub=true,chainpos=true }, +} +local function prepare_contextchains(tfmdata) + local rawdata=tfmdata.shared.rawdata + local resources=rawdata.resources + local lookuphash=resources.lookuphash + local lookuptags=resources.lookuptags + local lookups=rawdata.lookups + if lookups then + for lookupname,lookupdata in next,rawdata.lookups do + local lookuptype=lookupdata.type + if lookuptype then + local rules=lookupdata.rules + if rules then + local format=lookupdata.format + local validformat=valid[format] + if not validformat then + report_prepare("unsupported format %a",format) + elseif not validformat[lookuptype] then + report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookuptags[lookupname]) + else + local contexts=lookuphash[lookupname] + if not contexts then + contexts={} + lookuphash[lookupname]=contexts + end + local t,nt={},0 + for nofrules=1,#rules do + local rule=rules[nofrules] + local current=rule.current + local before=rule.before + local after=rule.after + local replacements=rule.replacements + local sequence={} + local nofsequences=0 + if before then + for n=1,#before do + nofsequences=nofsequences+1 + sequence[nofsequences]=before[n] + end + end + local start=nofsequences+1 + for n=1,#current do + nofsequences=nofsequences+1 + sequence[nofsequences]=current[n] + end + local stop=nofsequences + if after then + for n=1,#after do + nofsequences=nofsequences+1 + sequence[nofsequences]=after[n] + end + end + if sequence[1] then + nt=nt+1 + t[nt]={ nofrules,lookuptype,sequence,start,stop,rule.lookups,replacements } + for unic,_ in next,sequence[start] do + local cu=contexts[unic] + if not cu then + contexts[unic]=t + end + end + end + end + end + else + end + else + report_prepare("missing lookuptype for lookupname %a",lookuptags[lookupname]) + end + end + end +end +local function featuresinitializer(tfmdata,value) + if true then + local rawdata=tfmdata.shared.rawdata + local properties=rawdata.properties + if not properties.initialized then + local starttime=trace_preparing and os.clock() + local resources=rawdata.resources + resources.lookuphash=resources.lookuphash or {} + prepare_contextchains(tfmdata) + prepare_lookups(tfmdata) + properties.initialized=true + if trace_preparing then + report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,tfmdata.properties.fullname) + end + end + end +end +registerotffeature { + name="features", + description="features", + default=true, + initializers={ + position=1, + node=featuresinitializer, + }, + processors={ + node=featuresprocessor, + } +} +otf.handlers=handlers + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-otp']={ + version=1.001, + comment="companion to font-otf.lua (packing)", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local next,type=next,type +local sort,concat=table.sort,table.concat +local sortedhash=table.sortedhash +local trace_packing=false trackers.register("otf.packing",function(v) trace_packing=v end) +local trace_loading=false trackers.register("otf.loading",function(v) trace_loading=v end) +local report_otf=logs.reporter("fonts","otf loading") +fonts=fonts or {} +local handlers=fonts.handlers or {} +fonts.handlers=handlers +local otf=handlers.otf or {} +handlers.otf=otf +local enhancers=otf.enhancers or {} +otf.enhancers=enhancers +local glists=otf.glists or { "gsub","gpos" } +otf.glists=glists +local criterium=1 +local threshold=0 +local function tabstr_normal(t) + local s={} + local n=0 + for k,v in next,t do + n=n+1 + if type(v)=="table" then + s[n]=k..">"..tabstr_normal(v) + elseif v==true then + s[n]=k.."+" + elseif v then + s[n]=k.."="..v + else + s[n]=k.."-" + end + end + if n==0 then + return "" + elseif n==1 then + return s[1] + else + sort(s) + return concat(s,",") + end +end +local function tabstr_flat(t) + local s={} + local n=0 + for k,v in next,t do + n=n+1 + s[n]=k.."="..v + end + if n==0 then + return "" + elseif n==1 then + return s[1] + else + sort(s) + return concat(s,",") + end +end +local function tabstr_mixed(t) + local s={} + local n=#t + if n==0 then + return "" + elseif n==1 then + local k=t[1] + if k==true then + return "++" + elseif k==false then + return "--" + else + return tostring(k) + end + else + for i=1,n do + local k=t[i] + if k==true then + s[i]="++" + elseif k==false then + s[i]="--" + else + s[i]=k + end + end + return concat(s,",") + end +end +local function tabstr_boolean(t) + local s={} + local n=0 + for k,v in next,t do + n=n+1 + if v then + s[n]=k.."+" + else + s[n]=k.."-" + end + end + if n==0 then + return "" + elseif n==1 then + return s[1] + else + sort(s) + return concat(s,",") + end +end +local function packdata(data) + if data then + local h,t,c={},{},{} + local hh,tt,cc={},{},{} + local nt,ntt=0,0 + local function pack_normal(v) + local tag=tabstr_normal(v) + local ht=h[tag] + if ht then + c[ht]=c[ht]+1 + return ht + else + nt=nt+1 + t[nt]=v + h[tag]=nt + c[nt]=1 + return nt + end + end + local function pack_flat(v) + local tag=tabstr_flat(v) + local ht=h[tag] + if ht then + c[ht]=c[ht]+1 + return ht + else + nt=nt+1 + t[nt]=v + h[tag]=nt + c[nt]=1 + return nt + end + end + local function pack_boolean(v) + local tag=tabstr_boolean(v) + local ht=h[tag] + if ht then + c[ht]=c[ht]+1 + return ht + else + nt=nt+1 + t[nt]=v + h[tag]=nt + c[nt]=1 + return nt + end + end + local function pack_indexed(v) + local tag=concat(v," ") + local ht=h[tag] + if ht then + c[ht]=c[ht]+1 + return ht + else + nt=nt+1 + t[nt]=v + h[tag]=nt + c[nt]=1 + return nt + end + end + local function pack_mixed(v) + local tag=tabstr_mixed(v) + local ht=h[tag] + if ht then + c[ht]=c[ht]+1 + return ht + else + nt=nt+1 + t[nt]=v + h[tag]=nt + c[nt]=1 + return nt + end + end + local function pack_final(v) + if c[v]<=criterium then + return t[v] + else + local hv=hh[v] + if hv then + return hv + else + ntt=ntt+1 + tt[ntt]=t[v] + hh[v]=ntt + cc[ntt]=c[v] + return ntt + end + end + end + local function success(stage,pass) + if nt==0 then + if trace_loading or trace_packing then + report_otf("pack quality: nothing to pack") + end + return false + elseif nt>=threshold then + local one,two,rest=0,0,0 + if pass==1 then + for k,v in next,c do + if v==1 then + one=one+1 + elseif v==2 then + two=two+1 + else + rest=rest+1 + end + end + else + for k,v in next,cc do + if v>20 then + rest=rest+1 + elseif v>10 then + two=two+1 + else + one=one+1 + end + end + data.tables=tt + end + if trace_loading or trace_packing then + report_otf("pack quality: stage %s, pass %s, %s packed, 1-10:%s, 11-20:%s, rest:%s (criterium: %s)",stage,pass,one+two+rest,one,two,rest,criterium) + end + return true + else + if trace_loading or trace_packing then + report_otf("pack quality: stage %s, pass %s, %s packed, aborting pack (threshold: %s)",stage,pass,nt,threshold) + end + return false + end + end + local function packers(pass) + if pass==1 then + return pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed + else + return pack_final,pack_final,pack_final,pack_final,pack_final + end + end + local resources=data.resources + local lookuptypes=resources.lookuptypes + for pass=1,2 do + if trace_packing then + report_otf("start packing: stage 1, pass %s",pass) + end + local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass) + for unicode,description in next,data.descriptions do + local boundingbox=description.boundingbox + if boundingbox then + description.boundingbox=pack_indexed(boundingbox) + end + local slookups=description.slookups + if slookups then + for tag,slookup in next,slookups do + local what=lookuptypes[tag] + if what=="pair" then + local t=slookup[2] if t then slookup[2]=pack_indexed(t) end + local t=slookup[3] if t then slookup[3]=pack_indexed(t) end + elseif what~="substitution" then + slookups[tag]=pack_indexed(slookup) + end + end + end + local mlookups=description.mlookups + if mlookups then + for tag,mlookup in next,mlookups do + local what=lookuptypes[tag] + if what=="pair" then + for i=1,#mlookup do + local lookup=mlookup[i] + local t=lookup[2] if t then lookup[2]=pack_indexed(t) end + local t=lookup[3] if t then lookup[3]=pack_indexed(t) end + end + elseif what~="substitution" then + for i=1,#mlookup do + mlookup[i]=pack_indexed(mlookup[i]) + end + end + end + end + local kerns=description.kerns + if kerns then + for tag,kern in next,kerns do + kerns[tag]=pack_flat(kern) + end + end + local math=description.math + if math then + local kerns=math.kerns + if kerns then + for tag,kern in next,kerns do + kerns[tag]=pack_normal(kern) + end + end + end + local anchors=description.anchors + if anchors then + for what,anchor in next,anchors do + if what=="baselig" then + for _,a in next,anchor do + for k=1,#a do + a[k]=pack_indexed(a[k]) + end + end + else + for k,v in next,anchor do + anchor[k]=pack_indexed(v) + end + end + end + end + local altuni=description.altuni + if altuni then + for i=1,#altuni do + altuni[i]=pack_flat(altuni[i]) + end + end + end + local lookups=data.lookups + if lookups then + for _,lookup in next,lookups do + local rules=lookup.rules + if rules then + for i=1,#rules do + local rule=rules[i] + local r=rule.before if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end + local r=rule.after if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end + local r=rule.current if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end + local r=rule.replacements if r then rule.replacements=pack_flat (r) end + local r=rule.lookups if r then rule.lookups=pack_indexed(r) end + end + end + end + end + local anchor_to_lookup=resources.anchor_to_lookup + if anchor_to_lookup then + for anchor,lookup in next,anchor_to_lookup do + anchor_to_lookup[anchor]=pack_normal(lookup) + end + end + local lookup_to_anchor=resources.lookup_to_anchor + if lookup_to_anchor then + for lookup,anchor in next,lookup_to_anchor do + lookup_to_anchor[lookup]=pack_normal(anchor) + end + end + local sequences=resources.sequences + if sequences then + for feature,sequence in next,sequences do + local flags=sequence.flags + if flags then + sequence.flags=pack_normal(flags) + end + local subtables=sequence.subtables + if subtables then + sequence.subtables=pack_normal(subtables) + end + local features=sequence.features + if features then + for script,feature in next,features do + features[script]=pack_normal(feature) + end + end + local order=sequence.order + if order then + sequence.order=pack_indexed(order) + end + local markclass=sequence.markclass + if markclass then + sequence.markclass=pack_boolean(markclass) + end + end + end + local lookups=resources.lookups + if lookups then + for name,lookup in next,lookups do + local flags=lookup.flags + if flags then + lookup.flags=pack_normal(flags) + end + local subtables=lookup.subtables + if subtables then + lookup.subtables=pack_normal(subtables) + end + end + end + local features=resources.features + if features then + for _,what in next,glists do + local list=features[what] + if list then + for feature,spec in next,list do + list[feature]=pack_normal(spec) + end + end + end + end + if not success(1,pass) then + return + end + end + if nt>0 then + for pass=1,2 do + if trace_packing then + report_otf("start packing: stage 2, pass %s",pass) + end + local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass) + for unicode,description in next,data.descriptions do + local kerns=description.kerns + if kerns then + description.kerns=pack_normal(kerns) + end + local math=description.math + if math then + local kerns=math.kerns + if kerns then + math.kerns=pack_normal(kerns) + end + end + local anchors=description.anchors + if anchors then + description.anchors=pack_normal(anchors) + end + local mlookups=description.mlookups + if mlookups then + for tag,mlookup in next,mlookups do + mlookups[tag]=pack_normal(mlookup) + end + end + local altuni=description.altuni + if altuni then + description.altuni=pack_normal(altuni) + end + end + local lookups=data.lookups + if lookups then + for _,lookup in next,lookups do + local rules=lookup.rules + if rules then + for i=1,#rules do + local rule=rules[i] + local r=rule.before if r then rule.before=pack_normal(r) end + local r=rule.after if r then rule.after=pack_normal(r) end + local r=rule.current if r then rule.current=pack_normal(r) end + end + end + end + end + local sequences=resources.sequences + if sequences then + for feature,sequence in next,sequences do + sequence.features=pack_normal(sequence.features) + end + end + if not success(2,pass) then + end + end + for pass=1,2 do + local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass) + for unicode,description in next,data.descriptions do + local slookups=description.slookups + if slookups then + description.slookups=pack_normal(slookups) + end + local mlookups=description.mlookups + if mlookups then + description.mlookups=pack_normal(mlookups) + end + end + end + end + end +end +local unpacked_mt={ + __index=function(t,k) + t[k]=false + return k + end +} +local function unpackdata(data) + if data then + local tables=data.tables + if tables then + local resources=data.resources + local lookuptypes=resources.lookuptypes + local unpacked={} + setmetatable(unpacked,unpacked_mt) + for unicode,description in next,data.descriptions do + local tv=tables[description.boundingbox] + if tv then + description.boundingbox=tv + end + local slookups=description.slookups + if slookups then + local tv=tables[slookups] + if tv then + description.slookups=tv + slookups=unpacked[tv] + end + if slookups then + for tag,lookup in next,slookups do + local what=lookuptypes[tag] + if what=="pair" then + local tv=tables[lookup[2]] + if tv then + lookup[2]=tv + end + local tv=tables[lookup[3]] + if tv then + lookup[3]=tv + end + elseif what~="substitution" then + local tv=tables[lookup] + if tv then + slookups[tag]=tv + end + end + end + end + end + local mlookups=description.mlookups + if mlookups then + local tv=tables[mlookups] + if tv then + description.mlookups=tv + mlookups=unpacked[tv] + end + if mlookups then + for tag,list in next,mlookups do + local tv=tables[list] + if tv then + mlookups[tag]=tv + list=unpacked[tv] + end + if list then + local what=lookuptypes[tag] + if what=="pair" then + for i=1,#list do + local lookup=list[i] + local tv=tables[lookup[2]] + if tv then + lookup[2]=tv + end + local tv=tables[lookup[3]] + if tv then + lookup[3]=tv + end + end + elseif what~="substitution" then + for i=1,#list do + local tv=tables[list[i]] + if tv then + list[i]=tv + end + end + end + end + end + end + end + local kerns=description.kerns + if kerns then + local tm=tables[kerns] + if tm then + description.kerns=tm + kerns=unpacked[tm] + end + if kerns then + for k,kern in next,kerns do + local tv=tables[kern] + if tv then + kerns[k]=tv + end + end + end + end + local math=description.math + if math then + local kerns=math.kerns + if kerns then + local tm=tables[kerns] + if tm then + math.kerns=tm + kerns=unpacked[tm] + end + if kerns then + for k,kern in next,kerns do + local tv=tables[kern] + if tv then + kerns[k]=tv + end + end + end + end + end + local anchors=description.anchors + if anchors then + local ta=tables[anchors] + if ta then + description.anchors=ta + anchors=unpacked[ta] + end + if anchors then + for tag,anchor in next,anchors do + if tag=="baselig" then + for _,list in next,anchor do + for i=1,#list do + local tv=tables[list[i]] + if tv then + list[i]=tv + end + end + end + else + for a,data in next,anchor do + local tv=tables[data] + if tv then + anchor[a]=tv + end + end + end + end + end + end + local altuni=description.altuni + if altuni then + local altuni=tables[altuni] + if altuni then + description.altuni=altuni + for i=1,#altuni do + local tv=tables[altuni[i]] + if tv then + altuni[i]=tv + end + end + end + end + end + local lookups=data.lookups + if lookups then + for _,lookup in next,lookups do + local rules=lookup.rules + if rules then + for i=1,#rules do + local rule=rules[i] + local before=rule.before + if before then + local tv=tables[before] + if tv then + rule.before=tv + before=unpacked[tv] + end + if before then + for i=1,#before do + local tv=tables[before[i]] + if tv then + before[i]=tv + end + end + end + end + local after=rule.after + if after then + local tv=tables[after] + if tv then + rule.after=tv + after=unpacked[tv] + end + if after then + for i=1,#after do + local tv=tables[after[i]] + if tv then + after[i]=tv + end + end + end + end + local current=rule.current + if current then + local tv=tables[current] + if tv then + rule.current=tv + current=unpacked[tv] + end + if current then + for i=1,#current do + local tv=tables[current[i]] + if tv then + current[i]=tv + end + end + end + end + local replacements=rule.replacements + if replacements then + local tv=tables[replacements] + if tv then + rule.replacements=tv + end + end + local lookups=rule.lookups + if lookups then + local tv=tables[lookups] + if tv then + rule.lookups=tv + end + end + end + end + end + end + local anchor_to_lookup=resources.anchor_to_lookup + if anchor_to_lookup then + for anchor,lookup in next,anchor_to_lookup do + local tv=tables[lookup] + if tv then + anchor_to_lookup[anchor]=tv + end + end + end + local lookup_to_anchor=resources.lookup_to_anchor + if lookup_to_anchor then + for lookup,anchor in next,lookup_to_anchor do + local tv=tables[anchor] + if tv then + lookup_to_anchor[lookup]=tv + end + end + end + local ls=resources.sequences + if ls then + for _,feature in next,ls do + local flags=feature.flags + if flags then + local tv=tables[flags] + if tv then + feature.flags=tv + end + end + local subtables=feature.subtables + if subtables then + local tv=tables[subtables] + if tv then + feature.subtables=tv + end + end + local features=feature.features + if features then + local tv=tables[features] + if tv then + feature.features=tv + features=unpacked[tv] + end + if features then + for script,data in next,features do + local tv=tables[data] + if tv then + features[script]=tv + end + end + end + end + local order=feature.order + if order then + local tv=tables[order] + if tv then + feature.order=tv + end + end + local markclass=feature.markclass + if markclass then + local tv=tables[markclass] + if tv then + feature.markclass=tv + end + end + end + end + local lookups=resources.lookups + if lookups then + for _,lookup in next,lookups do + local flags=lookup.flags + if flags then + local tv=tables[flags] + if tv then + lookup.flags=tv + end + end + local subtables=lookup.subtables + if subtables then + local tv=tables[subtables] + if tv then + lookup.subtables=tv + end + end + end + end + local features=resources.features + if features then + for _,what in next,glists do + local feature=features[what] + if feature then + for tag,spec in next,feature do + local tv=tables[spec] + if tv then + feature[tag]=tv + end + end + end + end + end + data.tables=nil + end + end +end +if otf.enhancers.register then + otf.enhancers.register("pack",packdata) + otf.enhancers.register("unpack",unpackdata) +end +otf.enhancers.unpack=unpackdata +otf.enhancers.pack=packdata + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['luatex-fonts-lua']={ + version=1.001, + comment="companion to luatex-*.tex", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end +local fonts=fonts +fonts.formats.lua="lua" +function fonts.readers.lua(specification) + local fullname=specification.filename or "" + if fullname=="" then + local forced=specification.forced or "" + if forced~="" then + fullname=specification.name.."."..forced + else + fullname=specification.name + end + end + local fullname=resolvers.findfile(fullname) or "" + if fullname~="" then + local loader=loadfile(fullname) + loader=loader and loader() + return loader and loader(specification) + end +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-def']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local format,gmatch,match,find,lower,gsub=string.format,string.gmatch,string.match,string.find,string.lower,string.gsub +local tostring,next=tostring,next +local lpegmatch=lpeg.match +local suffixonly,removesuffix=file.suffix,file.removesuffix +local allocate=utilities.storage.allocate +local trace_defining=false trackers .register("fonts.defining",function(v) trace_defining=v end) +local directive_embedall=false directives.register("fonts.embedall",function(v) directive_embedall=v end) +trackers.register("fonts.loading","fonts.defining","otf.loading","afm.loading","tfm.loading") +trackers.register("fonts.all","fonts.*","otf.*","afm.*","tfm.*") +local report_defining=logs.reporter("fonts","defining") +local fonts=fonts +local fontdata=fonts.hashes.identifiers +local readers=fonts.readers +local definers=fonts.definers +local specifiers=fonts.specifiers +local constructors=fonts.constructors +local fontgoodies=fonts.goodies +readers.sequence=allocate { 'otf','ttf','afm','tfm','lua' } +local variants=allocate() +specifiers.variants=variants +definers.methods=definers.methods or {} +local internalized=allocate() +local lastdefined=nil +local loadedfonts=constructors.loadedfonts +local designsizes=constructors.designsizes +local resolvefile=fontgoodies and fontgoodies.filenames and fontgoodies.filenames.resolve or function(s) return s end +local splitter,splitspecifiers=nil,"" +local P,C,S,Cc=lpeg.P,lpeg.C,lpeg.S,lpeg.Cc +local left=P("(") +local right=P(")") +local colon=P(":") +local space=P(" ") +definers.defaultlookup="file" +local prefixpattern=P(false) +local function addspecifier(symbol) + splitspecifiers=splitspecifiers..symbol + local method=S(splitspecifiers) + local lookup=C(prefixpattern)*colon + local sub=left*C(P(1-left-right-method)^1)*right + local specification=C(method)*C(P(1)^1) + local name=C((1-sub-specification)^1) + splitter=P((lookup+Cc(""))*name*(sub+Cc(""))*(specification+Cc(""))) +end +local function addlookup(str,default) + prefixpattern=prefixpattern+P(str) +end +definers.addlookup=addlookup +addlookup("file") +addlookup("name") +addlookup("spec") +local function getspecification(str) + return lpegmatch(splitter,str or "") +end +definers.getspecification=getspecification +function definers.registersplit(symbol,action,verbosename) + addspecifier(symbol) + variants[symbol]=action + if verbosename then + variants[verbosename]=action + end +end +local function makespecification(specification,lookup,name,sub,method,detail,size) + size=size or 655360 + if not lookup or lookup=="" then + lookup=definers.defaultlookup + end + if trace_defining then + report_defining("specification %a, lookup %a, name %a, sub %a, method %a, detail %a", + specification,lookup,name,sub,method,detail) + end + local t={ + lookup=lookup, + specification=specification, + size=size, + name=name, + sub=sub, + method=method, + detail=detail, + resolved="", + forced="", + features={}, + } + return t +end +definers.makespecification=makespecification +function definers.analyze(specification,size) + local lookup,name,sub,method,detail=getspecification(specification or "") + return makespecification(specification,lookup,name,sub,method,detail,size) +end +definers.resolvers=definers.resolvers or {} +local resolvers=definers.resolvers +function resolvers.file(specification) + local name=resolvefile(specification.name) + local suffix=lower(suffixonly(name)) + if fonts.formats[suffix] then + specification.forced=suffix + specification.forcedname=name + specification.name=removesuffix(name) + else + specification.name=name + end +end +function resolvers.name(specification) + local resolve=fonts.names.resolve + if resolve then + local resolved,sub=resolve(specification.name,specification.sub,specification) + if resolved then + specification.resolved=resolved + specification.sub=sub + local suffix=lower(suffixonly(resolved)) + if fonts.formats[suffix] then + specification.forced=suffix + specification.forcedname=resolved + specification.name=removesuffix(resolved) + else + specification.name=resolved + end + end + else + resolvers.file(specification) + end +end +function resolvers.spec(specification) + local resolvespec=fonts.names.resolvespec + if resolvespec then + local resolved,sub=resolvespec(specification.name,specification.sub,specification) + if resolved then + specification.resolved=resolved + specification.sub=sub + specification.forced=lower(suffixonly(resolved)) + specification.forcedname=resolved + specification.name=removesuffix(resolved) + end + else + resolvers.name(specification) + end +end +function definers.resolve(specification) + if not specification.resolved or specification.resolved=="" then + local r=resolvers[specification.lookup] + if r then + r(specification) + end + end + if specification.forced=="" then + specification.forced=nil + specification.forcedname=nil + end + specification.hash=lower(specification.name..' @ '..constructors.hashfeatures(specification)) + if specification.sub and specification.sub~="" then + specification.hash=specification.sub..' @ '..specification.hash + end + return specification +end +function definers.applypostprocessors(tfmdata) + local postprocessors=tfmdata.postprocessors + if postprocessors then + local properties=tfmdata.properties + for i=1,#postprocessors do + local extrahash=postprocessors[i](tfmdata) + if type(extrahash)=="string" and extrahash~="" then + extrahash=gsub(lower(extrahash),"[^a-z]","-") + properties.fullname=format("%s-%s",properties.fullname,extrahash) + end + end + end + return tfmdata +end +local function checkembedding(tfmdata) + local properties=tfmdata.properties + local embedding + if directive_embedall then + embedding="full" + elseif properties and properties.filename and constructors.dontembed[properties.filename] then + embedding="no" + else + embedding="subset" + end + if properties then + properties.embedding=embedding + else + tfmdata.properties={ embedding=embedding } + end + tfmdata.embedding=embedding +end +function definers.loadfont(specification) + local hash=constructors.hashinstance(specification) + local tfmdata=loadedfonts[hash] + if not tfmdata then + local forced=specification.forced or "" + if forced~="" then + local reader=readers[lower(forced)] + tfmdata=reader and reader(specification) + if not tfmdata then + report_defining("forced type %a of %a not found",forced,specification.name) + end + else + local sequence=readers.sequence + for s=1,#sequence do + local reader=sequence[s] + if readers[reader] then + if trace_defining then + report_defining("trying (reader sequence driven) type %a for %a with file %a",reader,specification.name,specification.filename) + end + tfmdata=readers[reader](specification) + if tfmdata then + break + else + specification.filename=nil + end + end + end + end + if tfmdata then + tfmdata=definers.applypostprocessors(tfmdata) + checkembedding(tfmdata) + loadedfonts[hash]=tfmdata + designsizes[specification.hash]=tfmdata.parameters.designsize + end + end + if not tfmdata then + report_defining("font with asked name %a is not found using lookup %a",specification.name,specification.lookup) + end + return tfmdata +end +function constructors.checkvirtualids() +end +function constructors.readanddefine(name,size) + local specification=definers.analyze(name,size) + local method=specification.method + if method and variants[method] then + specification=variants[method](specification) + end + specification=definers.resolve(specification) + local hash=constructors.hashinstance(specification) + local id=definers.registered(hash) + if not id then + local tfmdata=definers.loadfont(specification) + if tfmdata then + tfmdata.properties.hash=hash + constructors.checkvirtualids(tfmdata) + id=font.define(tfmdata) + definers.register(tfmdata,id) + else + id=0 + end + end + return fontdata[id],id +end +function definers.current() + return lastdefined +end +function definers.registered(hash) + local id=internalized[hash] + return id,id and fontdata[id] +end +function definers.register(tfmdata,id) + if tfmdata and id then + local hash=tfmdata.properties.hash + if not hash then + report_defining("registering font, id %a, name %a, invalid hash",id,tfmdata.properties.filename or "?") + elseif not internalized[hash] then + internalized[hash]=id + if trace_defining then + report_defining("registering font, id %s, hash %a",id,hash) + end + fontdata[id]=tfmdata + end + end +end +function definers.read(specification,size,id) + statistics.starttiming(fonts) + if type(specification)=="string" then + specification=definers.analyze(specification,size) + end + local method=specification.method + if method and variants[method] then + specification=variants[method](specification) + end + specification=definers.resolve(specification) + local hash=constructors.hashinstance(specification) + local tfmdata=definers.registered(hash) + if tfmdata then + if trace_defining then + report_defining("already hashed: %s",hash) + end + else + tfmdata=definers.loadfont(specification) + if tfmdata then + if trace_defining then + report_defining("loaded and hashed: %s",hash) + end + tfmdata.properties.hash=hash + if id then + definers.register(tfmdata,id) + end + else + if trace_defining then + report_defining("not loaded and hashed: %s",hash) + end + end + end + lastdefined=tfmdata or id + if not tfmdata then + report_defining("unknown font %a, loading aborted",specification.name) + elseif trace_defining and type(tfmdata)=="table" then + local properties=tfmdata.properties or {} + local parameters=tfmdata.parameters or {} + report_defining("using %a font with id %a, name %a, size %a, bytes %a, encoding %a, fullname %a, filename %a", + properties.format or "unknown",id,properties.name,parameters.size,properties.encodingbytes, + properties.encodingname,properties.fullname,file.basename(properties.filename)) + end + statistics.stoptiming(fonts) + return tfmdata +end +function font.getfont(id) + return fontdata[id] +end +callbacks.register('define_font',definers.read,"definition of fonts (tfmdata preparation)") + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['luatex-font-def']={ + version=1.001, + comment="companion to luatex-*.tex", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end +local fonts=fonts +fonts.constructors.namemode="specification" +function fonts.definers.getspecification(str) + return "",str,"",":",str +end +local list={} +local function issome () list.lookup='name' end +local function isfile () list.lookup='file' end +local function isname () list.lookup='name' end +local function thename(s) list.name=s end +local function issub (v) list.sub=v end +local function iscrap (s) list.crap=string.lower(s) end +local function iskey (k,v) list[k]=v end +local function istrue (s) list[s]=true end +local function isfalse(s) list[s]=false end +local P,S,R,C=lpeg.P,lpeg.S,lpeg.R,lpeg.C +local spaces=P(" ")^0 +local namespec=(1-S("/:("))^0 +local crapspec=spaces*P("/")*(((1-P(":"))^0)/iscrap)*spaces +local filename_1=P("file:")/isfile*(namespec/thename) +local filename_2=P("[")*P(true)/isname*(((1-P("]"))^0)/thename)*P("]") +local fontname_1=P("name:")/isname*(namespec/thename) +local fontname_2=P(true)/issome*(namespec/thename) +local sometext=(R("az","AZ","09")+S("+-."))^1 +local truevalue=P("+")*spaces*(sometext/istrue) +local falsevalue=P("-")*spaces*(sometext/isfalse) +local keyvalue=(C(sometext)*spaces*P("=")*spaces*C(sometext))/iskey +local somevalue=sometext/istrue +local subvalue=P("(")*(C(P(1-S("()"))^1)/issub)*P(")") +local option=spaces*(keyvalue+falsevalue+truevalue+somevalue)*spaces +local options=P(":")*spaces*(P(";")^0*option)^0 +local pattern=(filename_1+filename_2+fontname_1+fontname_2)*subvalue^0*crapspec^0*options^0 +local function colonized(specification) + list={} + lpeg.match(pattern,specification.specification) + list.crap=nil + if list.name then + specification.name=list.name + list.name=nil + end + if list.lookup then + specification.lookup=list.lookup + list.lookup=nil + end + if list.sub then + specification.sub=list.sub + list.sub=nil + end + specification.features.normal=fonts.handlers.otf.features.normalize(list) + return specification +end +fonts.definers.registersplit(":",colonized,"cryptic") +fonts.definers.registersplit("",colonized,"more cryptic") +function fonts.definers.applypostprocessors(tfmdata) + local postprocessors=tfmdata.postprocessors + if postprocessors then + for i=1,#postprocessors do + local extrahash=postprocessors[i](tfmdata) + if type(extrahash)=="string" and extrahash~="" then + extrahash=string.gsub(lower(extrahash),"[^a-z]","-") + tfmdata.properties.fullname=format("%s-%s",tfmdata.properties.fullname,extrahash) + end + end + end + return tfmdata +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['luatex-fonts-ext']={ + version=1.001, + comment="companion to luatex-*.tex", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end +local fonts=fonts +local otffeatures=fonts.constructors.newfeatures("otf") +local function initializeitlc(tfmdata,value) + if value then + local parameters=tfmdata.parameters + local italicangle=parameters.italicangle + if italicangle and italicangle~=0 then + local properties=tfmdata.properties + local factor=tonumber(value) or 1 + properties.hasitalics=true + properties.autoitalicamount=factor*(parameters.uwidth or 40)/2 + end + end +end +otffeatures.register { + name="itlc", + description="italic correction", + initializers={ + base=initializeitlc, + node=initializeitlc, + } +} +local function initializeslant(tfmdata,value) + value=tonumber(value) + if not value then + value=0 + elseif value>1 then + value=1 + elseif value<-1 then + value=-1 + end + tfmdata.parameters.slantfactor=value +end +otffeatures.register { + name="slant", + description="slant glyphs", + initializers={ + base=initializeslant, + node=initializeslant, + } +} +local function initializeextend(tfmdata,value) + value=tonumber(value) + if not value then + value=0 + elseif value>10 then + value=10 + elseif value<-10 then + value=-10 + end + tfmdata.parameters.extendfactor=value +end +otffeatures.register { + name="extend", + description="scale glyphs horizontally", + initializers={ + base=initializeextend, + node=initializeextend, + } +} +fonts.protrusions=fonts.protrusions or {} +fonts.protrusions.setups=fonts.protrusions.setups or {} +local setups=fonts.protrusions.setups +local function initializeprotrusion(tfmdata,value) + if value then + local setup=setups[value] + if setup then + local factor,left,right=setup.factor or 1,setup.left or 1,setup.right or 1 + local emwidth=tfmdata.parameters.quad + tfmdata.parameters.protrusion={ + auto=true, + } + for i,chr in next,tfmdata.characters do + local v,pl,pr=setup[i],nil,nil + if v then + pl,pr=v[1],v[2] + end + if pl and pl~=0 then chr.left_protruding=left*pl*factor end + if pr and pr~=0 then chr.right_protruding=right*pr*factor end + end + end + end +end +otffeatures.register { + name="protrusion", + description="shift characters into the left and or right margin", + initializers={ + base=initializeprotrusion, + node=initializeprotrusion, + } +} +fonts.expansions=fonts.expansions or {} +fonts.expansions.setups=fonts.expansions.setups or {} +local setups=fonts.expansions.setups +local function initializeexpansion(tfmdata,value) + if value then + local setup=setups[value] + if setup then + local factor=setup.factor or 1 + tfmdata.parameters.expansion={ + stretch=10*(setup.stretch or 0), + shrink=10*(setup.shrink or 0), + step=10*(setup.step or 0), + auto=true, + } + for i,chr in next,tfmdata.characters do + local v=setup[i] + if v and v~=0 then + chr.expansion_factor=v*factor + else + chr.expansion_factor=factor + end + end + end + end +end +otffeatures.register { + name="expansion", + description="apply hz optimization", + initializers={ + base=initializeexpansion, + node=initializeexpansion, + } +} +function fonts.loggers.onetimemessage() end +local byte=string.byte +fonts.expansions.setups['default']={ + stretch=2,shrink=2,step=.5,factor=1, + [byte('A')]=0.5,[byte('B')]=0.7,[byte('C')]=0.7,[byte('D')]=0.5,[byte('E')]=0.7, + [byte('F')]=0.7,[byte('G')]=0.5,[byte('H')]=0.7,[byte('K')]=0.7,[byte('M')]=0.7, + [byte('N')]=0.7,[byte('O')]=0.5,[byte('P')]=0.7,[byte('Q')]=0.5,[byte('R')]=0.7, + [byte('S')]=0.7,[byte('U')]=0.7,[byte('W')]=0.7,[byte('Z')]=0.7, + [byte('a')]=0.7,[byte('b')]=0.7,[byte('c')]=0.7,[byte('d')]=0.7,[byte('e')]=0.7, + [byte('g')]=0.7,[byte('h')]=0.7,[byte('k')]=0.7,[byte('m')]=0.7,[byte('n')]=0.7, + [byte('o')]=0.7,[byte('p')]=0.7,[byte('q')]=0.7,[byte('s')]=0.7,[byte('u')]=0.7, + [byte('w')]=0.7,[byte('z')]=0.7, + [byte('2')]=0.7,[byte('3')]=0.7,[byte('6')]=0.7,[byte('8')]=0.7,[byte('9')]=0.7, +} +fonts.protrusions.setups['default']={ + factor=1,left=1,right=1, + [0x002C]={ 0,1 }, + [0x002E]={ 0,1 }, + [0x003A]={ 0,1 }, + [0x003B]={ 0,1 }, + [0x002D]={ 0,1 }, + [0x2013]={ 0,0.50 }, + [0x2014]={ 0,0.33 }, + [0x3001]={ 0,1 }, + [0x3002]={ 0,1 }, + [0x060C]={ 0,1 }, + [0x061B]={ 0,1 }, + [0x06D4]={ 0,1 }, +} +fonts.handlers.otf.features.normalize=function(t) + if t.rand then + t.rand="random" + end + return t +end +function fonts.helpers.nametoslot(name) + local t=type(name) + if t=="string" then + local tfmdata=fonts.hashes.identifiers[currentfont()] + local shared=tfmdata and tfmdata.shared + local fntdata=shared and shared.rawdata + return fntdata and fntdata.resources.unicodes[name] + elseif t=="number" then + return n + end +end +fonts.encodings=fonts.encodings or {} +local reencodings={} +fonts.encodings.reencodings=reencodings +local function specialreencode(tfmdata,value) + local encoding=value and reencodings[value] + if encoding then + local temp={} + local char=tfmdata.characters + for k,v in next,encoding do + temp[k]=char[v] + end + for k,v in next,temp do + char[k]=temp[k] + end + return string.format("reencoded:%s",value) + end +end +local function reencode(tfmdata,value) + tfmdata.postprocessors=tfmdata.postprocessors or {} + table.insert(tfmdata.postprocessors, + function(tfmdata) + return specialreencode(tfmdata,value) + end + ) +end +otffeatures.register { + name="reencode", + description="reencode characters", + manipulators={ + base=reencode, + node=reencode, + } +} + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['luatex-fonts-cbk']={ + version=1.001, + comment="companion to luatex-*.tex", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +if context then + texio.write_nl("fatal error: this module is not for context") + os.exit() +end +local fonts=fonts +local nodes=nodes +local traverse_id=node.traverse_id +local glyph_code=nodes.nodecodes.glyph +local ligaturing=node.ligaturing +local kerning=node.kerning +function node.ligaturing() texio.write_nl("warning: node.ligaturing is already applied") end +function node.kerning () texio.write_nl("warning: node.kerning is already applied") end +function nodes.handlers.characters(head) + local fontdata=fonts.hashes.identifiers + if fontdata then + local usedfonts,basefonts,prevfont,basefont={},{},nil,nil + for n in traverse_id(glyph_code,head) do + local font=n.font + if font~=prevfont then + if basefont then + basefont[2]=n.prev + end + prevfont=font + local used=usedfonts[font] + if not used then + local tfmdata=fontdata[font] + if tfmdata then + local shared=tfmdata.shared + if shared then + local processors=shared.processes + if processors and #processors>0 then + usedfonts[font]=processors + else + basefont={ n,nil } + basefonts[#basefonts+1]=basefont + end + end + end + end + end + end + if next(usedfonts) then + for font,processors in next,usedfonts do + for i=1,#processors do + head=processors[i](head,font,0) or head + end + end + end + if #basefonts>0 then + for i=1,#basefonts do + local range=basefonts[i] + local start,stop=range[1],range[2] + if stop then + ligaturing(start,stop) + kerning(start,stop) + else + ligaturing(start) + kerning(start) + end + end + end + return head,true + else + return head,false + end +end +function nodes.simple_font_handler(head) + head=nodes.handlers.characters(head) + nodes.injections.handler(head) + nodes.handlers.protectglyphs(head) + return head +end + +end -- closure -- cgit v1.2.3 From 84c0a997a775b116df79fb0ed5c8f7e0bf09c1fd Mon Sep 17 00:00:00 2001 From: Philipp Gesang Date: Tue, 9 Dec 2014 23:44:02 +0100 Subject: [status] update paths References to currently unpackaged files from the fontloader distribution have been commented. --- scripts/mkstatus | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/scripts/mkstatus b/scripts/mkstatus index 9d04ef9..e18abb7 100755 --- a/scripts/mkstatus +++ b/scripts/mkstatus @@ -37,25 +37,25 @@ local filelist = "./build/luaotfload-status.lua" --- result local names = { --- only the runtime files and scripts { "src", "luaotfload-auxiliary.lua", }, - { "src/fontloader", "fontloader-basics-gen.lua", }, - { "src/fontloader", "fontloader-basics-nod.lua", }, + { "src/fontloader/runtime", "fontloader-basics-gen.lua", }, + --{ "src/fontloader", "fontloader-basics-nod.lua", }, { "build", "luaotfload-characters.lua", }, { "src", "luaotfload-colors.lua", }, { "src", "luaotfload-database.lua", }, { "src", "luaotfload-diagnostics.lua", }, { "src", "luaotfload-features.lua", }, - { "src/fontloader", "fontloader-fonts-cbk.lua", }, - { "src/fontloader", "fontloader-fonts-def.lua", }, - { "src/fontloader", "fontloader-fonts-enc.lua", }, - { "src/fontloader", "fontloader-fonts-ext.lua", }, - { "src/fontloader", "fontloader-fonts-lua.lua", }, - { "src/fontloader", "fontloader-fonts-tfm.lua", }, + --{ "src/fontloader", "fontloader-fonts-cbk.lua", }, + --{ "src/fontloader", "fontloader-fonts-def.lua", }, + --{ "src/fontloader", "fontloader-fonts-enc.lua", }, + --{ "src/fontloader", "fontloader-fonts-ext.lua", }, + --{ "src/fontloader", "fontloader-fonts-lua.lua", }, + --{ "src/fontloader", "fontloader-fonts-tfm.lua", }, { "build", "luaotfload-glyphlist.lua", }, { "src", "luaotfload-letterspace.lua", }, { "src", "luaotfload-loaders.lua", }, { "src", "luaotfload-log.lua", }, { "src", "luaotfload-main.lua", }, - { "src/fontloader", "fontloader-fontloader.lua", }, + { "src/fontloader/runtime", "fontloader-fontloader.lua", }, { "src", "luaotfload-override.lua", }, { "src", "luaotfload-parsers.lua", }, { "src", "luaotfload-tool.lua", }, -- cgit v1.2.3 From 9a778b19fbfd69dc7bee8b88b8640ebf2451047e Mon Sep 17 00:00:00 2001 From: Philipp Gesang Date: Wed, 10 Dec 2014 08:07:41 +0100 Subject: [import] add preliminary file inspection --- scripts/mkimport | 45 ++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 44 insertions(+), 1 deletion(-) diff --git a/scripts/mkimport b/scripts/mkimport index 1b4dc39..a748459 100644 --- a/scripts/mkimport +++ b/scripts/mkimport @@ -45,6 +45,17 @@ local paths = { fontloader = "tex/generic/context/luatex", } +local subdirs = { + "runtime", + "misc" +} + +local searchdirs = { + --- order is important! + fontloader_subdir, + context_root +} + local prefixes = { context = nil, fontloader = "luatex", @@ -427,10 +438,42 @@ local import = function (arg) return 0 end --[[ [local import = function (arg)] ]] +local tell = function (arg) + local target = arg[2] + if not target then die "no filename given" end + + local look_for + --- pick a file + if lfs.isfile (target) then --- absolute path given + look_for = target + goto found + else + --- search in local tree + for i = 1, #searchdirs do + local root = searchdirs[i] + for j = 1, #subdirs do + local dir = file.join (searchdirs[i], subdirs[j]) + local file = file.join (dir, target) + if lfs.isfile (file) then + look_for = file + goto found + end + end + end + end + + if not look_for then + die ("file %s not found in any of the search locations", target) + end + +::found:: + status ("found file %s at %s", target, look_for) +end + local job_kind = table.mirrored { news = news, import = import, - tell = function () end, + tell = tell, } ------------------------------------------------------------------------------- -- cgit v1.2.3 From 27a805594949ffb973d0055380cb354b043e1d0f Mon Sep 17 00:00:00 2001 From: Philipp Gesang Date: Wed, 10 Dec 2014 08:10:12 +0100 Subject: [import] add stub for usage message --- scripts/mkimport | 22 +++++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/scripts/mkimport b/scripts/mkimport index a748459..651ff04 100644 --- a/scripts/mkimport +++ b/scripts/mkimport @@ -438,7 +438,7 @@ local import = function (arg) return 0 end --[[ [local import = function (arg)] ]] -local tell = function (arg) +local search = function (arg) local target = arg[2] if not target then die "no filename given" end @@ -462,18 +462,30 @@ local tell = function (arg) end end - if not look_for then - die ("file %s not found in any of the search locations", target) - end +::fail:: + if not look_for then return end ::found:: status ("found file %s at %s", target, look_for) + return look_for +end + +local tell = function (target) + local location = search (target) + if not location then + die ("file %s not found in any of the search locations", target) + end +end + +local help = function () + die "not implemented :/" end local job_kind = table.mirrored { news = news, import = import, tell = tell, + help = help, } ------------------------------------------------------------------------------- @@ -490,7 +502,7 @@ end ------------------------------------------------------------------------------- local main = function () - local job = arg[1] or "news" + local job = arg[1] or "help" local runner = check_job (job) return runner(arg) end -- cgit v1.2.3 From 4d9430560ea1c8d02b5737a6d0bc7115d1f4b72e Mon Sep 17 00:00:00 2001 From: Philipp Gesang Date: Thu, 11 Dec 2014 22:06:29 +0100 Subject: [import] add usage message --- scripts/mkimport | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/scripts/mkimport b/scripts/mkimport index 651ff04..7d6f984 100644 --- a/scripts/mkimport +++ b/scripts/mkimport @@ -478,7 +478,14 @@ local tell = function (target) end local help = function () - die "not implemented :/" + iowrite "usage: mkinfo []\n" + iowrite "\n" + iowrite "Where is one of\n" + iowrite " help Print this help message\n" + iowrite " tell Display information about a file’s integration\n" + iowrite " news Check Context for updated files\n" + iowrite " import Update with files from Context\n" + iowrite "\n" end local job_kind = table.mirrored { -- cgit v1.2.3 From 3bacb6622d08679935e2746b2b8480d258bc0032 Mon Sep 17 00:00:00 2001 From: Philipp Gesang Date: Thu, 11 Dec 2014 22:28:55 +0100 Subject: [import] extend tell search --- scripts/mkimport | 80 ++++++++++++++++++++++++++++++++++++++++++++++---------- 1 file changed, 66 insertions(+), 14 deletions(-) diff --git a/scripts/mkimport b/scripts/mkimport index 7d6f984..ac8e8fa 100644 --- a/scripts/mkimport +++ b/scripts/mkimport @@ -223,14 +223,19 @@ local derive_category_path = function (cat) return location end +local derive_suffix = function (kind) + if kind == kind_tex then return ".tex" end + return ".lua" +end + local derive_fullname = function (cat, name, kind) local tmp = prefixes[cat] tmp = tmp and tmp .. "-" .. name or name - return tmp .. (kind == kind_tex and ".tex" or ".lua") + return tmp .. derive_suffix (kind) end local derive_ourname = function (name, kind) - local suffix = kind == kind_tex and ".tex" or ".lua" + local suffix = derive_suffix (kind) local subdir = kind == kind_essential and "runtime" or "misc" return subdir, our_prefix .. "-" .. name .. suffix end @@ -438,6 +443,56 @@ local import = function (arg) return 0 end --[[ [local import = function (arg)] ]] +local search_paths = function (target) + for i = 1, #searchdirs do + local root = searchdirs[i] + for j = 1, #subdirs do + local dir = file.join (searchdirs[i], subdirs[j]) + local file = file.join (dir, target) + if lfs.isfile (file) then return file end + end + end + return false +end + +local search_defs = function (target) + for cat, defs in next, imports do + local ndefs = #defs + for i = 1, ndefs do + local def = defs[i] + + local dname = def.name + if target == dname then + local found = search_paths (target .. derive_suffix (def.kind)) + if found then return found end + end + + local dfull = dname .. derive_suffix (def.kind) + if target == dfull then + local found = search_paths (dfull) + if found then return found end + end + + local dours = def.ours + if dours then + + local _, ourname = derive_ourname (dours, kind) + if target == dours then + local found = search_paths (ourname) + if found then return found end + end + + if target == ourname then + local found = search_paths (ourname) + if found then return found end + end + end + + end + end + return false +end + local search = function (arg) local target = arg[2] if not target then die "no filename given" end @@ -448,18 +503,15 @@ local search = function (arg) look_for = target goto found else - --- search in local tree - for i = 1, #searchdirs do - local root = searchdirs[i] - for j = 1, #subdirs do - local dir = file.join (searchdirs[i], subdirs[j]) - local file = file.join (dir, target) - if lfs.isfile (file) then - look_for = file - goto found - end - end - end + + --- search as file name in local tree + look_for = search_paths (target) + if look_for then goto found end + + --- seach the definitions + look_for = search_defs (target) + if look_for then goto found end + end ::fail:: -- cgit v1.2.3 From 57aa7e07532a85c34b7588350122830579f8df51 Mon Sep 17 00:00:00 2001 From: Philipp Gesang Date: Thu, 11 Dec 2014 23:00:39 +0100 Subject: [import] implement tell output --- scripts/mkimport | 84 ++++++++++++++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 78 insertions(+), 6 deletions(-) diff --git a/scripts/mkimport b/scripts/mkimport index ac8e8fa..43d76fd 100644 --- a/scripts/mkimport +++ b/scripts/mkimport @@ -138,6 +138,14 @@ local kind_tex = 2 local kind_ignored = 3 local kind_lualibs = 4 +local kind_name = { + [0] = "essential", + [1] = "merged" , + [2] = "tex" , + [3] = "ignored" , + [4] = "lualibs" , +} + local imports = { fontloader = { @@ -228,6 +236,18 @@ local derive_suffix = function (kind) return ".lua" end +local pfxlen +local strip_prefix = function (fname) + if not pfxlen then pfxlen = #our_prefix end + if #fname <= pfxlen + 2 then + --- too short to accomodate prefix + basename + return + end + if string.sub (fname, 1, pfxlen) == our_prefix then + return string.sub (fname, pfxlen + 2) + end +end + local derive_fullname = function (cat, name, kind) local tmp = prefixes[cat] tmp = tmp and tmp .. "-" .. name or name @@ -240,6 +260,13 @@ local derive_ourname = function (name, kind) return subdir, our_prefix .. "-" .. name .. suffix end +local format_file_definition = function (def) + return stringformat ("name = \"%s\", kind = \"%s\"", + def.name, + kind_name[def.kind] or def.kind) + .. (def.ours and (", ours = \"" .. def.ours .. "\"") or "") +end + local is_readable = function (f) local fh = io.open (f, "r") if fh then @@ -493,10 +520,7 @@ local search_defs = function (target) return false end -local search = function (arg) - local target = arg[2] - if not target then die "no filename given" end - +local search = function (target) local look_for --- pick a file if lfs.isfile (target) then --- absolute path given @@ -518,15 +542,63 @@ local search = function (arg) if not look_for then return end ::found:: - status ("found file %s at %s", target, look_for) return look_for end -local tell = function (target) +local find_matching_def = function (location) + local basename = file.basename (location) + if not basename then die ("corrupt path %s", location) end + local barename = file.removesuffix (basename) + local pfxless = strip_prefix (barename) + local kind = file.suffix (pfxless) or "lua" + for cat, defs in next, imports do + for _, def in next, defs do + local dname = def.name + local dours = def.ours + if dname == pfxless + or dname == barename + -- dname == basename -- can’t happen for lack of suffix + or dours == pfxless + or dours == barename + then + return cat, def + end + end + end + return false +end + +local describe = function (target, location) + --- Map files to import definitions + separator () + status ("found file %s at %s", target, location) + local cat, def = find_matching_def (location) + if not cat or not def then + die ("file %s not found in registry", location) + end + + local dname = def.name + local dkind = def.kind + local subdir, ourname = derive_ourname (dname, dkind) + separator () + status ("category %s", cat) + status ("kind %s", kind_name[dkind]) + status ("in Context %s", derive_fullname (cat, def.name, def.kind)) + status ("in Luaotfload %s", ourname) + separator () + return 0 +end + +local tell = function (arg) + local target = arg[2] + if not target then die "no filename given" end + local location = search (target) if not location then die ("file %s not found in any of the search locations", target) end + + return describe (target, location) end local help = function () -- cgit v1.2.3 From 16a45555ad3677155ea097a8153cf7e266879e13 Mon Sep 17 00:00:00 2001 From: Philipp Gesang Date: Thu, 11 Dec 2014 23:17:12 +0100 Subject: [import] include Luatex-Fonts path in tell lookup --- scripts/mkimport | 31 +++++++++++++++++++++++-------- 1 file changed, 23 insertions(+), 8 deletions(-) diff --git a/scripts/mkimport b/scripts/mkimport index 43d76fd..c25459b 100644 --- a/scripts/mkimport +++ b/scripts/mkimport @@ -40,7 +40,7 @@ local context_root = "/home/phg/context/tex/texmf-context" local our_prefix = "fontloader" local fontloader_subdir = "src/fontloader" -local paths = { +local origin_paths = { context = "tex/context/base", fontloader = "tex/generic/context/luatex", } @@ -222,7 +222,7 @@ local hash_file = function (fname) end local derive_category_path = function (cat) - local subpath = paths[cat] or die ("category " .. cat .. " unknown") + local subpath = origin_paths[cat] or die ("category " .. cat .. " unknown") local location = file.join (context_root, subpath) if not lfs.isdir (location) then die ("invalid base path defined for category " @@ -470,15 +470,29 @@ local import = function (arg) return 0 end --[[ [local import = function (arg)] ]] +local find_in_path = function (root, subdir, target) + local file = file.join (root, subdir, target) + if lfs.isfile (file) then + return file + end +end + local search_paths = function (target) for i = 1, #searchdirs do local root = searchdirs[i] + for j = 1, #subdirs do - local dir = file.join (searchdirs[i], subdirs[j]) - local file = file.join (dir, target) - if lfs.isfile (file) then return file end + local found = find_in_path (root, subdirs[j], target) + if found then return found end end + end + + local found = find_in_path (context_root, origin_paths.context, target) + if found then return found end + + local found = find_in_path (context_root, origin_paths.fontloader, target) + if found then return found end return false end @@ -494,8 +508,9 @@ local search_defs = function (target) if found then return found end end - local dfull = dname .. derive_suffix (def.kind) - if target == dfull then + local dkind = def.kind + local dfull = derive_fullname (cat, dname, dkind) + if derive_fullname (cat, target, dkind) == dfull then local found = search_paths (dfull) if found then return found end end @@ -503,7 +518,7 @@ local search_defs = function (target) local dours = def.ours if dours then - local _, ourname = derive_ourname (dours, kind) + local _, ourname = derive_ourname (dours, dkind) if target == dours then local found = search_paths (ourname) if found then return found end -- cgit v1.2.3 From f5180e94891872433c3c1ef068d5557c5969c993 Mon Sep 17 00:00:00 2001 From: Philipp Gesang Date: Thu, 11 Dec 2014 23:25:49 +0100 Subject: [import] consider prefix variants when searching --- scripts/mkimport | 76 ++++++++++++++++++++++++++++++++++---------------------- 1 file changed, 46 insertions(+), 30 deletions(-) diff --git a/scripts/mkimport b/scripts/mkimport index c25459b..132d026 100644 --- a/scripts/mkimport +++ b/scripts/mkimport @@ -36,9 +36,10 @@ local stringformat = string.format -- config ------------------------------------------------------------------------------- -local context_root = "/home/phg/context/tex/texmf-context" -local our_prefix = "fontloader" -local fontloader_subdir = "src/fontloader" +local context_root = "/home/phg/context/tex/texmf-context" +local our_prefix = "fontloader" +local luatex_fonts_prefix = "luatex" +local fontloader_subdir = "src/fontloader" local origin_paths = { context = "tex/context/base", @@ -236,15 +237,17 @@ local derive_suffix = function (kind) return ".lua" end -local pfxlen -local strip_prefix = function (fname) - if not pfxlen then pfxlen = #our_prefix end - if #fname <= pfxlen + 2 then +local pfxlen = { } +local strip_prefix = function (fname, prefix) + prefix = prefix or our_prefix + if not pfxlen[prefix] then pfxlen[prefix] = #prefix end + local len = pfxlen[prefix] + if #fname <= len + 2 then --- too short to accomodate prefix + basename return end - if string.sub (fname, 1, pfxlen) == our_prefix then - return string.sub (fname, pfxlen + 2) + if string.sub (fname, 1, len) == prefix then + return string.sub (fname, len + 2) end end @@ -497,39 +500,52 @@ local search_paths = function (target) end local search_defs = function (target) + local variants = { target, --[[ unstripped ]] } + local tmp + tmp = strip_prefix (target) + if tmp then variants[#variants + 1] = tmp end + tmp = strip_prefix (target, luatex_fonts_prefix) + if tmp then variants[#variants + 1] = tmp end + + local nvariants = #variants + for cat, defs in next, imports do local ndefs = #defs for i = 1, ndefs do local def = defs[i] - local dname = def.name - if target == dname then - local found = search_paths (target .. derive_suffix (def.kind)) - if found then return found end - end - - local dkind = def.kind - local dfull = derive_fullname (cat, dname, dkind) - if derive_fullname (cat, target, dkind) == dfull then - local found = search_paths (dfull) - if found then return found end - end + for i = 1, nvariants do + local variant = variants[i] - local dours = def.ours - if dours then - - local _, ourname = derive_ourname (dours, dkind) - if target == dours then - local found = search_paths (ourname) + local dname = def.name + if variant == dname then + local found = search_paths (variant .. derive_suffix (def.kind)) if found then return found end end - if target == ourname then - local found = search_paths (ourname) + local dkind = def.kind + local dfull = derive_fullname (cat, dname, dkind) + if derive_fullname (cat, variant, dkind) == dfull then + local found = search_paths (dfull) if found then return found end end - end + local dours = def.ours + if dours then + + local _, ourname = derive_ourname (dours, dkind) + if variant == dours then + local found = search_paths (ourname) + if found then return found end + end + + if variant == ourname then + local found = search_paths (ourname) + if found then return found end + end + end + + end end end return false -- cgit v1.2.3 From f9e8ebd7846583d696b4e9e7d6454475a38fd767 Mon Sep 17 00:00:00 2001 From: Philipp Gesang Date: Sun, 14 Dec 2014 11:00:07 +0100 Subject: [fontloader] sync with Context as of 2014-12-14 --- src/fontloader/misc/fontloader-font-otf.lua | 4 +- src/fontloader/misc/fontloader-fonts-cbk.lua | 53 +++++++++++++++++++----- src/fontloader/misc/fontloader-l-string.lua | 2 +- src/fontloader/misc/fontloader-util-str.lua | 4 +- src/fontloader/runtime/fontloader-fontloader.lua | 51 +++++++++++++++++++---- 5 files changed, 93 insertions(+), 21 deletions(-) diff --git a/src/fontloader/misc/fontloader-font-otf.lua b/src/fontloader/misc/fontloader-font-otf.lua index 18b9752..302d8ea 100644 --- a/src/fontloader/misc/fontloader-font-otf.lua +++ b/src/fontloader/misc/fontloader-font-otf.lua @@ -989,8 +989,8 @@ actions["add duplicates"] = function(data,filename,raw) local description = descriptions[unicode] local n = 0 for _, description in next, descriptions do + local kerns = description.kerns if kerns then - local kerns = description.kerns for _, k in next, kerns do local ku = k[unicode] if ku then @@ -1724,6 +1724,8 @@ actions["merge kern classes"] = function(data,filename,raw) local lookup = subtable.lookup or subtable.name if kernclass then -- the next one is quite slow if #kernclass > 0 then + -- it's a table with one entry .. a future luatex can just + -- omit that level kernclass = kernclass[1] lookup = type(kernclass.lookup) == "string" and kernclass.lookup or lookup report_otf("fixing kernclass table of lookup %a",lookup) diff --git a/src/fontloader/misc/fontloader-fonts-cbk.lua b/src/fontloader/misc/fontloader-fonts-cbk.lua index 8632701..965b968 100644 --- a/src/fontloader/misc/fontloader-fonts-cbk.lua +++ b/src/fontloader/misc/fontloader-fonts-cbk.lua @@ -18,21 +18,28 @@ local nodes = nodes local traverse_id = node.traverse_id local glyph_code = nodes.nodecodes.glyph +local disc_code = nodes.nodecodes.disc -- from now on we apply ligaturing and kerning here because it might interfere with complex -- opentype discretionary handling where the base ligature pass expect some weird extra -- pointers (which then confuse the tail slider that has some checking built in) -local ligaturing = node.ligaturing -local kerning = node.kerning +local ligaturing = node.ligaturing +local kerning = node.kerning -function node.ligaturing() texio.write_nl("warning: node.ligaturing is already applied") end -function node.kerning () texio.write_nl("warning: node.kerning is already applied") end +local basepass = true + +function nodes.handlers.setbasepass(v) + basepass = v +end function nodes.handlers.characters(head) local fontdata = fonts.hashes.identifiers if fontdata then - local usedfonts, basefonts, prevfont, basefont = { }, { }, nil, nil + local usedfonts = { } + local basefonts = { } + local prevfont = nil + local basefont = nil for n in traverse_id(glyph_code,head) do local font = n.font if font ~= prevfont then @@ -49,7 +56,7 @@ function nodes.handlers.characters(head) local processors = shared.processes if processors and #processors > 0 then usedfonts[font] = processors - else + elseif basepass then basefont = { n, nil } basefonts[#basefonts+1] = basefont end @@ -58,6 +65,30 @@ function nodes.handlers.characters(head) end end end + for d in traverse_id(disc_code,head) do + local r = d.replace + if r then + for n in traverse_id(glyph_code,r) do + local font = n.font + if font ~= prevfont then + prevfont = font + local used = usedfonts[font] + if not used then + local tfmdata = fontdata[font] -- + if tfmdata then + local shared = tfmdata.shared -- we need to check shared, only when same features + if shared then + local processors = shared.processes + if processors and #processors > 0 then + usedfonts[font] = processors + end + end + end + end + end + end + end + end if next(usedfonts) then for font, processors in next, usedfonts do for i=1,#processors do @@ -65,7 +96,7 @@ function nodes.handlers.characters(head) end end end - if #basefonts > 0 then + if basepass and #basefonts > 0 then for i=1,#basefonts do local range = basefonts[i] local start, stop = range[1], range[2] @@ -85,11 +116,13 @@ function nodes.handlers.characters(head) end function nodes.simple_font_handler(head) --- lang.hyphenate(head) + -- lang.hyphenate(head) head = nodes.handlers.characters(head) nodes.injections.handler(head) + if not basepass then + head = ligaturing(head) + head = kerning(head) + end nodes.handlers.protectglyphs(head) - -- head = node.ligaturing(head) - -- head = node.kerning(head) return head end diff --git a/src/fontloader/misc/fontloader-l-string.lua b/src/fontloader/misc/fontloader-l-string.lua index 3b1a000..70c66f6 100644 --- a/src/fontloader/misc/fontloader-l-string.lua +++ b/src/fontloader/misc/fontloader-l-string.lua @@ -94,7 +94,7 @@ end -- return not find(str,"%S") -- end -local pattern = P(" ")^0 * P(-1) +local pattern = P(" ")^0 * P(-1) -- maybe also newlines -- patterns.onlyspaces = pattern diff --git a/src/fontloader/misc/fontloader-util-str.lua b/src/fontloader/misc/fontloader-util-str.lua index 8529c3a..a040b01 100644 --- a/src/fontloader/misc/fontloader-util-str.lua +++ b/src/fontloader/misc/fontloader-util-str.lua @@ -219,10 +219,12 @@ local striplinepatterns = { ["collapse"] = patterns.collapser, -- how about: stripper fullstripper } +setmetatable(striplinepatterns,{ __index = function(t,k) return p_prune_collapse end }) + strings.striplinepatterns = striplinepatterns function strings.striplines(str,how) - return str and lpegmatch(how and striplinepatterns[how] or p_prune_collapse,str) or str + return str and lpegmatch(striplinepatterns[how],str) or str end -- also see: string.collapsespaces diff --git a/src/fontloader/runtime/fontloader-fontloader.lua b/src/fontloader/runtime/fontloader-fontloader.lua index 0313204..f11a74c 100644 --- a/src/fontloader/runtime/fontloader-fontloader.lua +++ b/src/fontloader/runtime/fontloader-fontloader.lua @@ -1,6 +1,6 @@ -- merged file : luatex-fonts-merged.lua -- parent file : luatex-fonts.lua --- merge date : 12/03/14 18:26:33 +-- merge date : 12/11/14 12:02:53 do -- begin closure to overcome local limits and interference @@ -2831,9 +2831,10 @@ local striplinepatterns={ ["retain and no empty"]=p_retain_noempty, ["collapse"]=patterns.collapser, } +setmetatable(striplinepatterns,{ __index=function(t,k) return p_prune_collapse end }) strings.striplinepatterns=striplinepatterns function strings.striplines(str,how) - return str and lpegmatch(how and striplinepatterns[how] or p_prune_collapse,str) or str + return str and lpegmatch(striplinepatterns[how],str) or str end strings.striplong=strings.striplines function strings.nice(str) @@ -7830,8 +7831,8 @@ actions["add duplicates"]=function(data,filename,raw) local description=descriptions[unicode] local n=0 for _,description in next,descriptions do + local kerns=description.kerns if kerns then - local kerns=description.kerns for _,k in next,kerns do local ku=k[unicode] if ku then @@ -14560,14 +14561,20 @@ local fonts=fonts local nodes=nodes local traverse_id=node.traverse_id local glyph_code=nodes.nodecodes.glyph +local disc_code=nodes.nodecodes.disc local ligaturing=node.ligaturing local kerning=node.kerning -function node.ligaturing() texio.write_nl("warning: node.ligaturing is already applied") end -function node.kerning () texio.write_nl("warning: node.kerning is already applied") end +local basepass=true +function nodes.handlers.setbasepass(v) + basepass=v +end function nodes.handlers.characters(head) local fontdata=fonts.hashes.identifiers if fontdata then - local usedfonts,basefonts,prevfont,basefont={},{},nil,nil + local usedfonts={} + local basefonts={} + local prevfont=nil + local basefont=nil for n in traverse_id(glyph_code,head) do local font=n.font if font~=prevfont then @@ -14584,7 +14591,7 @@ function nodes.handlers.characters(head) local processors=shared.processes if processors and #processors>0 then usedfonts[font]=processors - else + elseif basepass then basefont={ n,nil } basefonts[#basefonts+1]=basefont end @@ -14593,6 +14600,30 @@ function nodes.handlers.characters(head) end end end + for d in traverse_id(disc_code,head) do + local r=d.replace + if r then + for n in traverse_id(glyph_code,r) do + local font=n.font + if font~=prevfont then + prevfont=font + local used=usedfonts[font] + if not used then + local tfmdata=fontdata[font] + if tfmdata then + local shared=tfmdata.shared + if shared then + local processors=shared.processes + if processors and #processors>0 then + usedfonts[font]=processors + end + end + end + end + end + end + end + end if next(usedfonts) then for font,processors in next,usedfonts do for i=1,#processors do @@ -14600,7 +14631,7 @@ function nodes.handlers.characters(head) end end end - if #basefonts>0 then + if basepass and #basefonts>0 then for i=1,#basefonts do local range=basefonts[i] local start,stop=range[1],range[2] @@ -14621,6 +14652,10 @@ end function nodes.simple_font_handler(head) head=nodes.handlers.characters(head) nodes.injections.handler(head) + if not basepass then + head=ligaturing(head) + head=kerning(head) + end nodes.handlers.protectglyphs(head) return head end -- cgit v1.2.3 From 5b773f9c6d94616316734a0070cae68ce6a67523 Mon Sep 17 00:00:00 2001 From: Philipp Gesang Date: Sun, 14 Dec 2014 12:14:51 +0100 Subject: [letterspace] convert to node.direct MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This is an experiment: The character kerning callback has been converted in its entirety to utilize the not-so-new direct node API. Since the translation was mechanical only to a certain extent, this may have introduced errors. On the other hand, the revised code resembles its distant ancestor in Context’s ``typo-krn.lua`` much more closely again, which may come in handy in the future. --- src/luaotfload-letterspace.lua | 217 ++++++++++++++++++++++++----------------- 1 file changed, 127 insertions(+), 90 deletions(-) diff --git a/src/luaotfload-letterspace.lua b/src/luaotfload-letterspace.lua index f1fb234..8956f82 100644 --- a/src/luaotfload-letterspace.lua +++ b/src/luaotfload-letterspace.lua @@ -7,7 +7,7 @@ if not modules then modules = { } end modules ['letterspace'] = { } local log = luaotfload.log -local report = log.report +local logreport = log.report local getmetatable = getmetatable local require = require @@ -17,14 +17,26 @@ local tonumber = tonumber local next = next local nodes, node, fonts = nodes, node, fonts -local find_node_tail = node.tail or node.slide -local free_node = node.free -local copy_node = node.copy -local new_node = node.new -local insert_node_before = node.insert_before - -local nodepool = nodes.pool - +--- As of December 2014 the faster ``node.direct.*`` interface is +--- preferred. +local nodedirect = nodes.nuts +local getchar = nodedirect.getchar +local getfont = nodedirect.getfont +local getid = nodedirect.getid +local getnext = nodedirect.getnext +local getprev = nodedirect.getprev +local getfield = nodedirect.getfield +local setfield = nodedirect.setfield +local find_node_tail = nodedirect.tail +local todirect = nodedirect.tonut +local tonode = nodedirect.tonode + +local insert_node_before = nodedirect.insert_before +local free_node = nodedirect.free +local copy_node = nodedirect.copy +local new_node = nodedirect.new + +local nodepool = nodedirect.pool local new_kern = nodepool.kern local new_glue = nodepool.glue @@ -78,34 +90,24 @@ local userkern_code = kerncodes.userkern --- node-res ----------------------------------------------------------------------- -nodes.pool = nodes.pool or { } -local pool = nodes.pool - -local kern = new_node ("kern", kerncodes.userkern) local glue_spec = new_node "glue_spec" -pool.kern = function (k) - local n = copy_node (kern) - n.kern = k - return n -end - -pool.glue = function (width, stretch, shrink, - stretch_order, shrink_order) - local n = new_node"glue" +nodepool.glue = function (width, stretch, shrink, + stretch_order, shrink_order) + local n = new_node "glue" if not width then -- no spec elseif width == false or tonumber(width) then local s = copy_node(glue_spec) - if width then s.width = width end - if stretch then s.stretch = stretch end - if shrink then s.shrink = shrink end - if stretch_order then s.stretch_order = stretch_order end - if shrink_order then s.shrink_order = shrink_order end - n.spec = s + if width then setfield(s, "width" , width ) end + if stretch then setfield(s, "stretch" , stretch ) end + if shrink then setfield(s, "shrink" , shrink ) end + if stretch_order then setfield(s, "stretch_order", stretch_order) end + if shrink_order then setfield(s, "shrink_order" , shrink_order ) end + setfield(n, "spec", s) else -- shared - n.spec = copy_node(width) + setfield(n, "spec", copy_node(width)) end return n end @@ -187,13 +189,12 @@ end local kern_injector = function (fillup, kern) if fillup then local g = new_glue(kern) - local s = g.spec - s.stretch = kern - s.stretch_order = 1 + local s = getfield(g, "spec") + setfield(s, "stretch", kern) + setfield(s, "stretch_order", 1) return g - else - return new_kern(kern) end + return new_kern(kern) end --[[doc-- @@ -223,12 +224,12 @@ kerncharacters = function (head) local firstkern = true while start do - local id = start.id + local id = getid(start) if id == glyph_code then --- 1) look up kern factor (slow, but cached rudimentarily) local krn - local fontid = start.font + local fontid = getfont(start) do krn = kernfactors[fontid] if not krn then @@ -249,7 +250,7 @@ kerncharacters = function (head) goto nextnode elseif firstkern then firstkern = false - if (id ~= disc_code) and (not start.components) then + if (id ~= disc_code) and (not getfield(start, "components")) then --- not a ligature, skip node goto nextnode end @@ -266,7 +267,7 @@ kerncharacters = function (head) lastfont = fontid --- 2) resolve ligatures - local c = start.components + local c = getfield(start, "components") if c then if keepligature and keepligature(start) then @@ -274,20 +275,20 @@ kerncharacters = function (head) else --- c = kerncharacters (c) --> taken care of after replacing local s = start - local p, n = s.prev, s.next + local p, n = getprev(s), s.next local tail = find_node_tail(c) if p then - p.next = c - c.prev = p + setfield(p, "next", c) + p = getprev(c) else head = c end if n then - n.prev = tail + tail = getprev(n) end - tail.next = n + setnext(tail, "next", n) start = c - s.components = nil + setfield(s, "components", nil) -- we now leak nodes ! -- free_node(s) done = true @@ -295,30 +296,40 @@ kerncharacters = function (head) end -- kern ligature --- 3) apply the extra kerning - local prev = start.prev + local prev = getprev(start) if prev then - local pid = prev.id + local pid = getid(prev) if not pid then -- nothing elseif pid == kern_code then - if prev.subtype == kerning_code --- context does this by means of an - or prev.subtype == userkern_code --- attribute; we may need a test + local prev_subtype = getsubtype(prev) + if prev_subtype == kerning_code --- context does this by means of an + or prev_subtype == userkern_code --- attribute; we may need a test then - if keeptogether and prev.prev.id == glyph_code and keeptogether(prev.prev,start) then + + local pprev = getprev(prev) + local pprev_id = getid(pprev) + + if keeptogether + and pprev_id == glyph_code + and keeptogether(pprev, start) + then -- keep else - prev.subtype = userkern_code - prev.kern = prev.kern + quaddata[lastfont]*krn -- here + prev_subtype = userkern_code + local prev_kern = getfield(prev, "kern") + prev_kern = prev_kern + quaddata[lastfont] * krn done = true end end elseif pid == glyph_code then - if prev.font == lastfont then - local prevchar, lastchar = prev.char, start.char - if keeptogether and keeptogether(prev,start) then + if getfont(prev) == lastfont then + local prevchar = getchar(prev) + local lastchar = getchar(start) + if keeptogether and keeptogether(prev, start) then -- keep 'm elseif identifiers[lastfont] then local kerns = chardata[lastfont] and chardata[lastfont][prevchar].kerns @@ -337,31 +348,34 @@ kerncharacters = function (head) -- a bit too complicated, we can best not copy and just calculate -- but we could have multiple glyphs involved so ... local disc = prev -- disc - local pre, post, replace = disc.pre, disc.post, disc.replace - local prv, nxt = disc.prev, disc.next + local pre = getfield(disc, "pre") + local post = getfield(disc, "post") + local replace = getfield(disc, "replace") + local prv = getprev(disc) + local nxt = getnext(disc) if pre and prv then -- must pair with start.prev -- this one happens in most cases local before = copy_node(prv) - pre.prev = before - before.next = pre - before.prev = nil + setfield(pre, "prev", before) + setfield(before, "next", pre) + setfield(before, "prev", nil) pre = kerncharacters (before) - pre = pre.next - pre.prev = nil - disc.pre = pre + pre = getnext(pre) + setfield(pre, "prev", nil) + setfield(disc, "pre", pre) free_node(before) end if post and nxt then -- must pair with start local after = copy_node(nxt) local tail = find_node_tail(post) - tail.next = after - after.prev = tail - after.next = nil + setfield(tail, "next", after) + setfield(after, "prev", tail) + setfield(after, "next", nil) post = kerncharacters (post) - tail.next = nil - disc.post = post + setfield(tail, "next", nil) + setfield(disc, "post", post) free_node(after) end @@ -369,29 +383,34 @@ kerncharacters = function (head) local before = copy_node(prv) local after = copy_node(nxt) local tail = find_node_tail(replace) - replace.prev = before - before.next = replace - before.prev = nil - tail.next = after - after.prev = tail - after.next = nil + setfield(replace, "prev", before) + setfield(before, "next", replace) + setfield(before, "prev", nil) + setfield(tail, "next", after) + setfield(after, "prev", tail) + setfield(after, "next", nil) replace = kerncharacters (before) - replace = replace.next - replace.prev = nil - after.prev.next = nil - disc.replace = replace + replace = getnext(replace) + setfield(replace, "prev", nil) + setfield(after, "prev.next", nil) + setfield(disc, "replace", replace) free_node(after) free_node(before) + elseif identifiers[lastfont] then - if prv and prv.id == glyph_code and prv.font == lastfont then - local prevchar, lastchar = prv.char, start.char + if prv + and getid(prv) == glyph_code + and getfont(prv) == lastfont + then + local prevchar = getchar(prv) + local lastchar = getchar(start) local kerns = chardata[lastfont] and chardata[lastfont][prevchar].kerns local kern = kerns and kerns[lastchar] or 0 krn = kern + quaddata[lastfont]*krn -- here else krn = quaddata[lastfont]*krn -- here end - disc.replace = kern_injector(false,krn) -- only kerns permitted, no glue + setfield(disc, "replace", kern_injector(false, krn)) end end @@ -400,7 +419,7 @@ kerncharacters = function (head) ::nextnode:: if start then - start = start.next + start = getnext(start) end end return head, done @@ -439,19 +458,37 @@ local remove_processor = function (name) return false --> unregistered end ---- now for the simplistic variant +--- When font kerning is requested, usually by defining a font with the +--- ``letterspace`` parameter, we inject a wrapper for the +--- ``kerncharacters()`` node processor in the relevant callbacks. This +--- wrapper initially converts the received head node into its “direct” +--- counterpart. Likewise, the callback result is converted back to an +--- ordinary node prior to returning. Internally, ``kerncharacters()`` +--- performs all node operations on direct nodes. + --- unit -> bool local enablefontkerning = function ( ) - return add_processor( kerncharacters + + local handler = function (hd) + local direct_hd = todirect (hd) + local hd, _done = kerncharacters (hd) + if not hd then --- bad + logreport ("both", 0, "letterspace", + "kerncharacters() failed to return a valid new head") + end + return tonode (hd) + end + + return add_processor( handler , "luaotfload.letterspace" , "pre_linebreak_filter" , "hpack_filter") end --- unit -> bool -local disablefontkerning = function ( ) - return remove_processor "luaotfload.letterspace" -end +---al disablefontkerning = function ( ) +---eturn remove_processor "luaotfload.letterspace" +--- --[[doc-- @@ -515,10 +552,10 @@ otffeatures.register { local initializecompatfontkerning = function (tfmdata, percentage) local factor = tonumber (percentage) if not factor then - report ("both", 0, "letterspace", - "Invalid argument to letterspace: %s (type %q), " .. - "was expecting percentage as Lua number instead.", - percentage, type (percentage)) + logreport ("both", 0, "letterspace", + "Invalid argument to letterspace: %s (type %q), " .. + "was expecting percentage as Lua number instead.", + percentage, type (percentage)) return end return initializefontkerning (tfmdata, factor * 0.01) -- cgit v1.2.3 From ca673f7b14af906606a188fd98978d3501842f63 Mon Sep 17 00:00:00 2001 From: Philipp Gesang Date: Sun, 14 Dec 2014 12:19:55 +0100 Subject: [*] update news --- NEWS | 3 +++ 1 file changed, 3 insertions(+) diff --git a/NEWS b/NEWS index 4926d35..0e0e5b0 100644 --- a/NEWS +++ b/NEWS @@ -6,6 +6,9 @@ Change History * Add ``--dumpconf`` option to luaotfload-tool for generating configuration files * Move fontloader files to subtree src/fontloader + * New script ``mkimport`` facilitates maintainance of code imported from + Context + * Revised letterspacing, now utilizing the ``node.direct`` interface 2014/07/13, luaotfload v2.5 * Remove legacy code. -- cgit v1.2.3