summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/luaotfload-auxiliary.lua820
-rw-r--r--src/luaotfload-basics-gen.lua368
-rw-r--r--src/luaotfload-basics-nod.lua178
-rw-r--r--src/luaotfload-colors.lua319
-rw-r--r--src/luaotfload-configuration.lua704
-rw-r--r--src/luaotfload-database.lua3466
-rw-r--r--src/luaotfload-diagnostics.lua684
-rw-r--r--src/luaotfload-features.lua1238
-rw-r--r--src/luaotfload-fontloader.lua13910
-rw-r--r--src/luaotfload-fonts-cbk.lua68
-rw-r--r--src/luaotfload-fonts-def.lua97
-rw-r--r--src/luaotfload-fonts-enc.lua28
-rw-r--r--src/luaotfload-fonts-ext.lua272
-rw-r--r--src/luaotfload-fonts-inj.lua526
-rw-r--r--src/luaotfload-fonts-lua.lua33
-rw-r--r--src/luaotfload-fonts-otn.lua2848
-rw-r--r--src/luaotfload-fonts-tfm.lua38
-rw-r--r--src/luaotfload-letterspace.lua544
-rw-r--r--src/luaotfload-loaders.lua30
-rw-r--r--src/luaotfload-log.lua404
-rw-r--r--src/luaotfload-main.lua708
-rw-r--r--src/luaotfload-override.lua52
-rw-r--r--src/luaotfload-parsers.lua701
-rwxr-xr-xsrc/luaotfload-tool.lua1617
-rw-r--r--src/luaotfload.sty45
25 files changed, 29698 insertions, 0 deletions
diff --git a/src/luaotfload-auxiliary.lua b/src/luaotfload-auxiliary.lua
new file mode 100644
index 0000000..89bf51b
--- /dev/null
+++ b/src/luaotfload-auxiliary.lua
@@ -0,0 +1,820 @@
+#!/usr/bin/env texlua
+-----------------------------------------------------------------------
+-- FILE: luaotfload-auxiliary.lua
+-- DESCRIPTION: part of luaotfload
+-- REQUIREMENTS: luaotfload 2.5
+-- AUTHOR: Khaled Hosny, Élie Roux, Philipp Gesang
+-- VERSION: 2.5
+-- MODIFIED: 2014-01-02 21:24:25+0100
+-----------------------------------------------------------------------
+--
+
+--- this file addresses issue #24
+--- https://github.com/lualatex/luaotfload/issues/24#
+
+luaotfload = luaotfload or {}
+luaotfload.aux = luaotfload.aux or { }
+
+local aux = luaotfload.aux
+local log = luaotfload.log
+local report = log.report
+local fonthashes = fonts.hashes
+local identifiers = fonthashes.identifiers
+local fontnames = fonts.names
+
+local fontid = font.id
+local texsprint = tex.sprint
+
+local dofile = dofile
+local getmetatable = getmetatable
+local setmetatable = setmetatable
+local utf8 = unicode.utf8
+local stringlower = string.lower
+local stringformat = string.format
+local stringgsub = string.gsub
+local stringbyte = string.byte
+local stringfind = string.find
+local tablecopy = table.copy
+
+-----------------------------------------------------------------------
+--- font patches
+-----------------------------------------------------------------------
+
+--- https://github.com/khaledhosny/luaotfload/issues/54
+
+local rewrite_fontname = function (tfmdata, specification)
+ tfmdata.name = [["]] .. specification .. [["]]
+end
+
+local rewriting = false
+
+local start_rewrite_fontname = function ()
+ if rewriting == false then
+ luatexbase.add_to_callback (
+ "luaotfload.patch_font",
+ rewrite_fontname,
+ "luaotfload.rewrite_fontname")
+ rewriting = true
+ report ("log", 1, "aux",
+ "start rewriting tfmdata.name field")
+ end
+end
+
+aux.start_rewrite_fontname = start_rewrite_fontname
+
+local stop_rewrite_fontname = function ()
+ if rewriting == true then
+ luatexbase.remove_fromt_callback
+ ("luaotfload.patch_font", "luaotfload.rewrite_fontname")
+ rewriting = false
+ report ("log", 1, "aux",
+ "stop rewriting tfmdata.name field")
+ end
+end
+
+aux.stop_rewrite_fontname = stop_rewrite_fontname
+
+
+--[[doc--
+This sets two dimensions apparently relied upon by the unicode-math
+package.
+--doc]]--
+
+local set_sscale_dimens = function (fontdata)
+ local mathconstants = fontdata.MathConstants
+ local parameters = fontdata.parameters
+ if mathconstants then
+ parameters[10] = mathconstants.ScriptPercentScaleDown or 70
+ parameters[11] = mathconstants.ScriptScriptPercentScaleDown or 50
+ end
+ return fontdata
+end
+
+luatexbase.add_to_callback(
+ "luaotfload.patch_font",
+ set_sscale_dimens,
+ "luaotfload.aux.set_sscale_dimens")
+
+--- fontobj -> int
+local lookup_units = function (fontdata)
+ local metadata = fontdata.shared and fontdata.shared.rawdata.metadata
+ if metadata and metadata.units_per_em then
+ return metadata.units_per_em
+ elseif fontdata.parameters and fontdata.parameters.units then
+ return fontdata.parameters.units
+ elseif fontdata.units then --- v1.x
+ return fontdata.units
+ end
+ return 1000
+end
+
+--[[doc--
+This callback corrects some values of the Cambria font.
+--doc]]--
+--- fontobj -> unit
+local patch_cambria_domh = function (fontdata)
+ local mathconstants = fontdata.MathConstants
+ if mathconstants and fontdata.psname == "CambriaMath" then
+ --- my test Cambria has 2048
+ local units_per_em = fontdata.units_per_em or lookup_units(fontdata)
+ local sz = fontdata.parameters.size or fontdata.size
+ local mh = 2800 / units_per_em * sz
+ if mathconstants.DisplayOperatorMinHeight < mh then
+ mathconstants.DisplayOperatorMinHeight = mh
+ end
+ end
+end
+
+luatexbase.add_to_callback(
+ "luaotfload.patch_font",
+ patch_cambria_domh,
+ "luaotfload.aux.patch_cambria_domh")
+
+--[[doc--
+
+Comment from fontspec:
+
+ “Here we patch fonts tfm table to emulate \XeTeX's \cs{fontdimen8},
+ which stores the caps-height of the font. (Cf.\ \cs{fontdimen5} which
+ stores the x-height.)
+
+ Falls back to measuring the glyph if the font doesn't contain the
+ necessary information.
+ This needs to be extended for fonts that don't contain an `X'.”
+
+--doc]]--
+
+local set_capheight = function (fontdata)
+ local shared = fontdata.shared
+ local parameters = fontdata.parameters
+ local capheight
+ if shared and shared.rawdata.metadata.pfminfo then
+ local units_per_em = parameters.units
+ local size = parameters.size
+ local os2_capheight = shared.rawdata.metadata.pfminfo.os2_capheight
+
+ if os2_capheight > 0 then
+ capheight = os2_capheight / units_per_em * size
+ else
+ local X8 = stringbyte"X"
+ if fontdata.characters[X8] then
+ capheight = fontdata.characters[X8].height
+ else
+ capheight = parameters.ascender / units_per_em * size
+ end
+ end
+ else
+ local X8 = stringbyte"X"
+ if fontdata.characters[X8] then
+ capheight = fontdata.characters[X8].height
+ end
+ end
+ if capheight then
+ --- is this legit? afaics there’s nothing else on the
+ --- array part of that table
+ fontdata.parameters[8] = capheight
+ end
+end
+
+luatexbase.add_to_callback(
+ "luaotfload.patch_font",
+ set_capheight,
+ "luaotfload.aux.set_capheight")
+
+-----------------------------------------------------------------------
+--- glyphs and characters
+-----------------------------------------------------------------------
+
+local agl = fonts.encodings.agl
+
+--- int -> int -> bool
+local font_has_glyph = function (font_id, codepoint)
+ local fontdata = fonts.hashes.identifiers[font_id]
+ if fontdata then
+ if fontdata.characters[codepoint] ~= nil then return true end
+ end
+ return false
+end
+
+aux.font_has_glyph = font_has_glyph
+
+--- undocumented
+
+local raw_slot_of_name = function (font_id, glyphname)
+ local fontdata = font.fonts[font_id]
+ if fontdata.type == "virtual" then --- get base font for glyph idx
+ local codepoint = agl.unicodes[glyphname]
+ local glyph = fontdata.characters[codepoint]
+ if fontdata.characters[codepoint] then
+ return codepoint
+ end
+ end
+ return false
+end
+
+--[[doc--
+
+ This one is approximately “name_to_slot” from the microtype package;
+ note that it is all about Adobe Glyph names and glyph slots in the
+ font. The names and values may diverge from actual Unicode.
+
+ http://www.adobe.com/devnet/opentype/archives/glyph.html
+
+ The “unsafe” switch triggers a fallback lookup in the raw fonts
+ table. As some of the information is stored as references, this may
+ have unpredictable side-effects.
+
+--doc]]--
+
+--- int -> string -> bool -> (int | false)
+local slot_of_name = function (font_id, glyphname, unsafe)
+ local fontdata = identifiers[font_id]
+ if fontdata then
+ local unicode = fontdata.resources.unicodes[glyphname]
+ if unicode then
+ if type(unicode) == "number" then
+ return unicode
+ else
+ return unicode[1] --- for multiple components
+ end
+-- else
+-- --- missing
+ end
+ elseif unsafe == true then -- for Robert
+ return raw_slot_of_name(font_id, glyphname)
+ end
+ return false
+end
+
+aux.slot_of_name = slot_of_name
+
+--[[doc--
+
+ Inverse of above; not authoritative as to my knowledge the official
+ inverse of the AGL is the AGLFN. Maybe this whole issue should be
+ dealt with in a separate package that loads char-def.lua and thereby
+ solves the problem for the next couple decades.
+
+ http://partners.adobe.com/public/developer/en/opentype/aglfn13.txt
+
+--doc]]--
+
+local indices
+
+--- int -> (string | false)
+local name_of_slot = function (codepoint)
+ if not indices then --- this will load the glyph list
+ local unicodes = agl.unicodes
+ indices = table.swapped(unicodes)
+ end
+ local glyphname = indices[codepoint]
+ if glyphname then
+ return glyphname
+ end
+ return false
+end
+
+aux.name_of_slot = name_of_slot
+
+--[[doc--
+
+ In Context, characters.data is where the data from char-def.lua
+ resides. The file is huge (>3.7 MB as of 2013) and not part of the
+ isolated font loader. Nevertheless, we include a partial version
+ generated by the mkcharacters script that contains only the
+ a subset of the fields of each character defined.
+
+ Currently, these are (compare the mkcharacters script!)
+
+ · "direction"
+ · "mirror"
+ · "category"
+ · "textclass"
+
+ The directional information is required for packages like Simurgh [0]
+ to work correctly. In an early stage [1] it was necessary to load
+ further files from Context directly, including the full blown version
+ of char-def. Since we have no use for most of the so imported
+ functionality, the required parts have been isolated and are now
+ instated along with luaotfload-characters.lua. We can extend the set
+ of imported features easily should it not be enough.
+
+ [0] https://github.com/persian-tex/simurgh
+ [1] http://tex.stackexchange.com/a/132301/14066
+
+--doc]]--
+
+characters = characters or { } --- should be created in basics-gen
+characters.data = nil
+local chardef = "luaotfload-characters"
+
+do
+ local setmetatableindex = function (t, f)
+ local mt = getmetatable (t)
+ if mt then
+ mt.__index = f
+ else
+ setmetatable (t, { __index = f })
+ end
+ end
+
+ --- there are some special tables for each field that provide access
+ --- to fields of the character table by means of a metatable
+
+ local mkcharspecial = function (characters, tablename, field)
+
+ local chardata = characters.data
+
+ if chardata then
+ local newspecial = { }
+ characters [tablename] = newspecial --> e.g. “characters.data.mirrors”
+
+ local idx = function (t, char)
+ local c = chardata [char]
+ if c then
+ local m = c [field] --> e.g. “mirror”
+ if m then
+ t [char] = m
+ return m
+ end
+ end
+ newspecial [char] = false
+ return char
+ end
+
+ setmetatableindex (newspecial, idx)
+ end
+
+ end
+
+ local mkcategories = function (characters) -- different from the others
+
+ local chardata = characters.data
+
+ setmetatable (characters, { __index = function (t, char)
+ if char then
+ local c = chardata [char]
+ c = c.category or char
+ t [char] = c
+ return c
+ end
+ end})
+
+ end
+
+ local load_failed = false
+ local chardata --> characters.data; loaded on demand
+
+ local load_chardef = function ()
+
+ report ("both", 1, "aux", "Loading character metadata from %s.", chardef)
+ chardata = dofile (kpse.find_file (chardef, "lua"))
+
+ if chardata == nil then
+ warning ("Could not load %s; continuing \z
+ with empty character table.",
+ chardef)
+ chardata = { }
+ load_failed = true
+ end
+
+ characters = { } --- nuke metatable
+ characters.data = chardata
+
+ --- institute some of the functionality from char-ini.lua
+
+ mkcharspecial (characters, "mirrors", "mirror")
+ mkcharspecial (characters, "directions", "direction")
+ mkcharspecial (characters, "textclasses", "textclass")
+ mkcategories (characters)
+
+ end
+
+ local charindex = function (t, k)
+ if chardata == nil and load_failed ~= true then
+ load_chardef ()
+ end
+
+ return characters [k]
+ end
+
+ setmetatableindex (characters, charindex)
+
+end
+
+-----------------------------------------------------------------------
+--- features / scripts / languages
+-----------------------------------------------------------------------
+--- lots of arrowcode ahead
+
+--[[doc--
+This function, modeled after “check_script()” from fontspec, returns
+true if in the given font, the script “asked_script” is accounted for in at
+least one feature.
+--doc]]--
+
+--- int -> string -> bool
+local provides_script = function (font_id, asked_script)
+ asked_script = stringlower(asked_script)
+ if font_id and font_id > 0 then
+ local fontdata = identifiers[font_id].shared.rawdata
+ if fontdata then
+ local fontname = fontdata.metadata.fontname
+ local features = fontdata.resources.features
+ for method, featuredata in next, features do
+ --- where method: "gpos" | "gsub"
+ for feature, data in next, featuredata do
+ if data[asked_script] then
+ report ("log", 1, "aux",
+ "font no %d (%s) defines feature %s for script %s",
+ font_id, fontname, feature, asked_script)
+ return true
+ end
+ end
+ end
+ report ("log", 0, "aux",
+ "font no %d (%s) defines no feature for script %s",
+ font_id, fontname, asked_script)
+ end
+ end
+ report ("log", 0, "aux", "no font with id %d", font_id)
+ return false
+end
+
+aux.provides_script = provides_script
+
+--[[doc--
+This function, modeled after “check_language()” from fontspec, returns
+true if in the given font, the language with tage “asked_language” is
+accounted for in the script with tag “asked_script” in at least one
+feature.
+--doc]]--
+
+--- int -> string -> string -> bool
+local provides_language = function (font_id, asked_script, asked_language)
+ asked_script = stringlower(asked_script)
+ asked_language = stringlower(asked_language)
+ if font_id and font_id > 0 then
+ local fontdata = identifiers[font_id].shared.rawdata
+ if fontdata then
+ local fontname = fontdata.metadata.fontname
+ local features = fontdata.resources.features
+ for method, featuredata in next, features do
+ --- where method: "gpos" | "gsub"
+ for feature, data in next, featuredata do
+ local scriptdata = data[asked_script]
+ if scriptdata and scriptdata[asked_language] then
+ report ("log", 1, "aux",
+ "font no %d (%s) defines feature %s "
+ .. "for script %s with language %s",
+ font_id, fontname, feature,
+ asked_script, asked_language)
+ return true
+ end
+ end
+ end
+ report ("log", 0, "aux",
+ "font no %d (%s) defines no feature "
+ .. "for script %s with language %s",
+ font_id, fontname, asked_script, asked_language)
+ end
+ end
+ report ("log", 0, "aux", "no font with id %d", font_id)
+ return false
+end
+
+aux.provides_language = provides_language
+
+--[[doc--
+We strip the syntax elements from feature definitions (shouldn’t
+actually be there in the first place, but who cares ...)
+--doc]]--
+
+local lpeg = require"lpeg"
+local C, P, S = lpeg.C, lpeg.P, lpeg.S
+local lpegmatch = lpeg.match
+
+local sign = S"+-"
+local rhs = P"=" * P(1)^0 * P(-1)
+local strip_garbage = sign^-1 * C((1 - rhs)^1)
+
+--s = "+foo" --> foo
+--ss = "-bar" --> bar
+--sss = "baz" --> baz
+--t = "foo=bar" --> foo
+--tt = "+bar=baz" --> bar
+--ttt = "-baz=true" --> baz
+--
+--print(lpeg.match(strip_garbage, s))
+--print(lpeg.match(strip_garbage, ss))
+--print(lpeg.match(strip_garbage, sss))
+--print(lpeg.match(strip_garbage, t))
+--print(lpeg.match(strip_garbage, tt))
+--print(lpeg.match(strip_garbage, ttt))
+
+--[[doc--
+This function, modeled after “check_feature()” from fontspec, returns
+true if in the given font, the language with tag “asked_language” is
+accounted for in the script with tag “asked_script” in feature
+“asked_feature”.
+--doc]]--
+
+--- int -> string -> string -> string -> bool
+local provides_feature = function (font_id, asked_script,
+ asked_language, asked_feature)
+ asked_script = stringlower(asked_script)
+ asked_language = stringlower(asked_language)
+ asked_feature = lpegmatch(strip_garbage, asked_feature)
+
+ if font_id and font_id > 0 then
+ local fontdata = identifiers[font_id].shared.rawdata
+ if fontdata then
+ local features = fontdata.resources.features
+ local fontname = fontdata.metadata.fontname
+ for method, featuredata in next, features do
+ --- where method: "gpos" | "gsub"
+ local feature = featuredata[asked_feature]
+ if feature then
+ local scriptdata = feature[asked_script]
+ if scriptdata and scriptdata[asked_language] then
+ report ("log", 1, "aux",
+ "font no %d (%s) defines feature %s "
+ .. "for script %s with language %s",
+ font_id, fontname, asked_feature,
+ asked_script, asked_language)
+ return true
+ end
+ end
+ end
+ report ("log", 0, "aux",
+ "font no %d (%s) does not define feature %s for script %s with language %s",
+ font_id, fontname, asked_feature, asked_script, asked_language)
+ end
+ end
+ report ("log", 0, "aux", "no font with id %d", font_id)
+ return false
+end
+
+aux.provides_feature = provides_feature
+
+-----------------------------------------------------------------------
+--- font dimensions
+-----------------------------------------------------------------------
+
+--- int -> string -> int
+local get_math_dimension = function (font_id, dimenname)
+ if type(font_id) == "string" then
+ font_id = fontid(font_id) --- safeguard
+ end
+ local fontdata = identifiers[font_id]
+ local mathdata = fontdata.mathparameters
+ if mathdata then
+ return mathdata[dimenname] or 0
+ end
+ return 0
+end
+
+aux.get_math_dimension = get_math_dimension
+
+--- int -> string -> unit
+local sprint_math_dimension = function (font_id, dimenname)
+ if type(font_id) == "string" then
+ font_id = fontid(font_id)
+ end
+ local dim = get_math_dimension(font_id, dimenname)
+ texsprint(luatexbase.catcodetables["latex-package"], dim, "sp")
+end
+
+aux.sprint_math_dimension = sprint_math_dimension
+
+-----------------------------------------------------------------------
+--- extra database functions
+-----------------------------------------------------------------------
+
+local namesresolve = fontnames.resolve
+local namesscan_dir = fontnames.scan_dir
+
+--[====[-- TODO -> port this to new db model
+
+--- local directories -------------------------------------------------
+
+--- migrated from luaotfload-database.lua
+--- https://github.com/lualatex/luaotfload/pull/61#issuecomment-17776975
+
+--- string -> (int * int)
+local scan_external_dir = function (dir)
+ local old_names, new_names = names.data()
+ if not old_names then
+ old_names = load_names()
+ end
+ new_names = tablecopy(old_names)
+ local n_scanned, n_new = scan_dir(dir, old_names, new_names)
+ --- FIXME
+ --- This doesn’t seem right. If a db update is triggered after this
+ --- point, then the added fonts will be saved along with it --
+ --- which is not as “temporarily” as it should be. (This should be
+ --- addressed during a refactoring of names_resolve().)
+ names.data = new_names
+ return n_scanned, n_new
+end
+
+aux.scan_external_dir = scan_external_dir
+
+--]====]--
+
+aux.scan_external_dir = function ()
+ print "ERROR: scan_external_dir() is not implemented"
+end
+
+--- db queries --------------------------------------------------------
+
+--- https://github.com/lualatex/luaotfload/issues/74
+--- string -> (string * int)
+local resolve_fontname = function (name)
+ local foundname, subfont, success = namesresolve(nil, nil, {
+ name = name,
+ lookup = "name",
+ optsize = 0,
+ specification = "name:" .. name,
+ })
+ if success then
+ return foundname, subfont
+ end
+ return false, false
+end
+
+aux.resolve_fontname = resolve_fontname
+
+--- string list -> (string * int)
+local resolve_fontlist
+resolve_fontlist = function (names, n)
+ if not n then
+ return resolve_fontlist(names, 1)
+ end
+ local this = names[n]
+ if this then
+ local foundname, subfont = resolve_fontname(this)
+ if foundname then
+ return foundname, subfont
+ end
+ return resolve_fontlist(names, n+1)
+ end
+ return false, false
+end
+
+aux.resolve_fontlist = resolve_fontlist
+
+--- index access ------------------------------------------------------
+
+--- Based on a discussion on the Luatex mailing list:
+--- http://tug.org/pipermail/luatex/2014-June/004881.html
+
+--[[doc--
+
+ aux.read_font_index -- Read the names index from the canonical
+ location and return its contents. This does not affect the behavior
+ of Luaotfload: The returned table is independent of what the font
+ resolvers use internally. Access is raw: each call to the function
+ will result in the entire table being re-read from disk.
+
+--doc]]--
+
+local load_names = fontnames.load
+local access_font_index = fontnames.access_font_index
+
+local read_font_index = function ()
+ return load_names (true) or { }
+end
+
+--[[doc--
+
+ aux.font_index -- Access Luaotfload’s internal database. If the
+ database hasn’t been loaded yet this will cause it to be loaded, with
+ all the possible side-effects like for instance creating the index
+ file if it doesn’t exist, reading all font files, &c.
+
+--doc]]--
+
+local font_index = function () return access_font_index () end
+
+aux.read_font_index = read_font_index
+aux.font_index = font_index
+
+--- loaded fonts ------------------------------------------------------
+
+--- just a proof of concept
+
+--- fontobj -> string list -> (string list) list
+local get_font_data get_font_data = function (tfmdata, keys, acc, n)
+ if not acc then
+ return get_font_data(tfmdata, keys, {}, 1)
+ end
+ local key = keys[n]
+ if key then
+ local val = tfmdata[key]
+ if val then
+ acc[#acc+1] = val
+ else
+ acc[#acc+1] = false
+ end
+ return get_font_data(tfmdata, keys, acc, n+1)
+ end
+ return acc
+end
+
+--[[doc--
+
+ The next one operates on the fonts.hashes.identifiers table.
+ It returns a list containing tuples of font ids and the
+ contents of the fields specified in the first argument.
+ Font table entries that were created indirectly -- e.g. by
+ \letterspacefont or during font expansion -- will not be
+ listed.
+
+--doc]]--
+
+local default_keys = { "fullname" }
+
+--- string list -> (int * string list) list
+local get_loaded_fonts get_loaded_fonts = function (keys, acc, lastid)
+ if not acc then
+ if not keys then
+ keys = default_keys
+ end
+ return get_loaded_fonts(keys, {}, lastid)
+ end
+ local id, tfmdata = next(identifiers, lastid)
+ if id then
+ local data = get_font_data(tfmdata, keys)
+ acc[#acc+1] = { id, data }
+ return get_loaded_fonts (keys, acc, id)
+ end
+ return acc
+end
+
+aux.get_loaded_fonts = get_loaded_fonts
+
+--- Raw access to the font.* namespace is unsafe so no documentation on
+--- this one.
+local get_raw_fonts = function ( )
+ local res = { }
+ for i, v in font.each() do
+ if v.filename then
+ res[#res+1] = { i, v }
+ end
+ end
+ return res
+end
+
+aux.get_raw_fonts = get_raw_fonts
+
+-----------------------------------------------------------------------
+--- font parameters
+-----------------------------------------------------------------------
+--- analogy of font-hsh
+
+fonthashes.parameters = fonthashes.parameters or { }
+fonthashes.quads = fonthashes.quads or { }
+
+local parameters = fonthashes.parameters or { }
+local quads = fonthashes.quads or { }
+
+setmetatable(parameters, { __index = function (t, font_id)
+ local tfmdata = identifiers[font_id]
+ if not tfmdata then --- unsafe; avoid
+ tfmdata = font.fonts[font_id]
+ end
+ if tfmdata and type(tfmdata) == "table" then
+ local fontparameters = tfmdata.parameters
+ t[font_id] = fontparameters
+ return fontparameters
+ end
+ return nil
+end})
+
+--[[doc--
+
+ Note that the reason as to why we prefer functions over table indices
+ is that functions are much safer against unintended manipulation.
+ This justifies the overhead they cost.
+
+--doc]]--
+
+--- int -> (number | false)
+local get_quad = function (font_id)
+ local quad = quads[font_id]
+ if quad then
+ return quad
+ end
+ local fontparameters = parameters[font_id]
+ if fontparameters then
+ local quad = fontparameters.quad or 0
+ quads[font_id] = quad
+ return quad
+ end
+ return false
+end
+
+aux.get_quad = get_quad
+
+-- vim:tw=71:sw=2:ts=2:expandtab
diff --git a/src/luaotfload-basics-gen.lua b/src/luaotfload-basics-gen.lua
new file mode 100644
index 0000000..c19a49a
--- /dev/null
+++ b/src/luaotfload-basics-gen.lua
@@ -0,0 +1,368 @@
+if not modules then modules = { } end modules ['luat-basics-gen'] = {
+ version = 1.100,
+ comment = "companion to luatex-*.tex",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+
+local dummyfunction = function()
+end
+
+local dummyreporter = function(c)
+ return function(...)
+ (texio.reporter or texio.write_nl)(c .. " : " .. string.formatters(...))
+ end
+end
+
+statistics = {
+ register = dummyfunction,
+ starttiming = dummyfunction,
+ stoptiming = dummyfunction,
+ elapsedtime = nil,
+}
+
+directives = {
+ register = dummyfunction,
+ enable = dummyfunction,
+ disable = dummyfunction,
+}
+
+trackers = {
+ register = dummyfunction,
+ enable = dummyfunction,
+ disable = dummyfunction,
+}
+
+experiments = {
+ register = dummyfunction,
+ enable = dummyfunction,
+ disable = dummyfunction,
+}
+
+storage = { -- probably no longer needed
+ register = dummyfunction,
+ shared = { },
+}
+
+logs = {
+ new = dummyreporter,
+ reporter = dummyreporter,
+ messenger = dummyreporter,
+ report = dummyfunction,
+}
+
+callbacks = {
+ register = function(n,f) return callback.register(n,f) end,
+
+}
+
+utilities = {
+ storage = {
+ allocate = function(t) return t or { } end,
+ mark = function(t) return t or { } end,
+ },
+}
+
+characters = characters or {
+ data = { }
+}
+
+-- we need to cheat a bit here
+
+texconfig.kpse_init = true
+
+resolvers = resolvers or { } -- no fancy file helpers used
+
+local remapper = {
+ otf = "opentype fonts",
+ ttf = "truetype fonts",
+ ttc = "truetype fonts",
+ dfont = "truetype fonts", -- "truetype dictionary",
+ cid = "cid maps",
+ cidmap = "cid maps",
+ fea = "font feature files",
+ pfa = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this!
+ pfb = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this!
+ afm = "afm",
+}
+
+function resolvers.findfile(name,fileformat)
+ name = string.gsub(name,"\\","/")
+ if not fileformat or fileformat == "" then
+ fileformat = file.suffix(name)
+ if fileformat == "" then
+ fileformat = "tex"
+ end
+ end
+ fileformat = string.lower(fileformat)
+ fileformat = remapper[fileformat] or fileformat
+ local found = kpse.find_file(name,fileformat)
+ if not found or found == "" then
+ found = kpse.find_file(name,"other text files")
+ end
+ return found
+end
+
+-- function resolvers.findbinfile(name,fileformat)
+-- if not fileformat or fileformat == "" then
+-- fileformat = file.suffix(name)
+-- end
+-- return resolvers.findfile(name,(fileformat and remapper[fileformat]) or fileformat)
+-- end
+
+resolvers.findbinfile = resolvers.findfile
+
+function resolvers.loadbinfile(filename,filetype)
+ local data = io.loaddata(filename)
+ return true, data, #data
+end
+
+function resolvers.resolve(s)
+ return s
+end
+
+function resolvers.unresolve(s)
+ return s
+end
+
+-- Caches ... I will make a real stupid version some day when I'm in the
+-- mood. After all, the generic code does not need the more advanced
+-- ConTeXt features. Cached data is not shared between ConTeXt and other
+-- usage as I don't want any dependency at all. Also, ConTeXt might have
+-- different needs and tricks added.
+
+--~ containers.usecache = true
+
+caches = { }
+
+local writable = nil
+local readables = { }
+local usingjit = jit
+
+if not caches.namespace or caches.namespace == "" or caches.namespace == "context" then
+ caches.namespace = 'generic'
+end
+
+do
+
+ -- standard context tree setup
+
+ local cachepaths = kpse.expand_var('$TEXMFCACHE') or ""
+
+ -- quite like tex live or so (the weird $TEXMFCACHE test seems to be needed on miktex)
+
+ if cachepaths == "" or cachepaths == "$TEXMFCACHE" then
+ cachepaths = kpse.expand_var('$TEXMFVAR') or ""
+ end
+
+ -- this also happened to be used (the weird $TEXMFVAR test seems to be needed on miktex)
+
+ if cachepaths == "" or cachepaths == "$TEXMFVAR" then
+ cachepaths = kpse.expand_var('$VARTEXMF') or ""
+ end
+
+ -- and this is a last resort (hm, we could use TEMP or TEMPDIR)
+
+ if cachepaths == "" then
+ local fallbacks = { "TMPDIR", "TEMPDIR", "TMP", "TEMP", "HOME", "HOMEPATH" }
+ for i=1,#fallbacks do
+ cachepaths = os.getenv(fallbacks[i]) or ""
+ if cachepath ~= "" and lfs.isdir(cachepath) then
+ break
+ end
+ end
+ end
+
+ if cachepaths == "" then
+ cachepaths = "."
+ end
+
+ cachepaths = string.split(cachepaths,os.type == "windows" and ";" or ":")
+
+ for i=1,#cachepaths do
+ local cachepath = cachepaths[i]
+ if not lfs.isdir(cachepath) then
+ lfs.mkdirs(cachepath) -- needed for texlive and latex
+ if lfs.isdir(cachepath) then
+ texio.write(string.format("(created cache path: %s)",cachepath))
+ end
+ end
+ if file.is_writable(cachepath) then
+ writable = file.join(cachepath,"luatex-cache")
+ lfs.mkdir(writable)
+ writable = file.join(writable,caches.namespace)
+ lfs.mkdir(writable)
+ break
+ end
+ end
+
+ for i=1,#cachepaths do
+ if file.is_readable(cachepaths[i]) then
+ readables[#readables+1] = file.join(cachepaths[i],"luatex-cache",caches.namespace)
+ end
+ end
+
+ if not writable then
+ texio.write_nl("quiting: fix your writable cache path")
+ os.exit()
+ elseif #readables == 0 then
+ texio.write_nl("quiting: fix your readable cache path")
+ os.exit()
+ elseif #readables == 1 and readables[1] == writable then
+ texio.write(string.format("(using cache: %s)",writable))
+ else
+ texio.write(string.format("(using write cache: %s)",writable))
+ texio.write(string.format("(using read cache: %s)",table.concat(readables, " ")))
+ end
+
+end
+
+function caches.getwritablepath(category,subcategory)
+ local path = file.join(writable,category)
+ lfs.mkdir(path)
+ path = file.join(path,subcategory)
+ lfs.mkdir(path)
+ return path
+end
+
+function caches.getreadablepaths(category,subcategory)
+ local t = { }
+ for i=1,#readables do
+ t[i] = file.join(readables[i],category,subcategory)
+ end
+ return t
+end
+
+local function makefullname(path,name)
+ if path and path ~= "" then
+ return file.addsuffix(file.join(path,name),"lua"), file.addsuffix(file.join(path,name),usingjit and "lub" or "luc")
+ end
+end
+
+function caches.is_writable(path,name)
+ local fullname = makefullname(path,name)
+ return fullname and file.is_writable(fullname)
+end
+
+function caches.loaddata(paths,name)
+ for i=1,#paths do
+ local data = false
+ local luaname, lucname = makefullname(paths[i],name)
+ if lucname and not lfs.isfile(lucname) and type(caches.compile) == "function" then
+ -- in case we used luatex and luajittex mixed ... lub or luc file
+ texio.write(string.format("(compiling luc: %s)",lucname))
+ data = loadfile(luaname)
+ if data then
+ data = data()
+ end
+ if data then
+ caches.compile(data,luaname,lucname)
+ return data
+ end
+ end
+ if lucname and lfs.isfile(lucname) then -- maybe also check for size
+ texio.write(string.format("(load luc: %s)",lucname))
+ data = loadfile(lucname)
+ if data then
+ data = data()
+ end
+ if data then
+ return data
+ else
+ texio.write(string.format("(loading failed: %s)",lucname))
+ end
+ end
+ if luaname and lfs.isfile(luaname) then
+ texio.write(string.format("(load lua: %s)",luaname))
+ data = loadfile(luaname)
+ if data then
+ data = data()
+ end
+ if data then
+ return data
+ end
+ end
+ end
+end
+
+function caches.savedata(path,name,data)
+ local luaname, lucname = makefullname(path,name)
+ if luaname then
+ texio.write(string.format("(save: %s)",luaname))
+ table.tofile(luaname,data,true)
+ if lucname and type(caches.compile) == "function" then
+ os.remove(lucname) -- better be safe
+ texio.write(string.format("(save: %s)",lucname))
+ caches.compile(data,luaname,lucname)
+ end
+ end
+end
+
+-- According to KH os.execute is not permitted in plain/latex so there is
+-- no reason to use the normal context way. So the method here is slightly
+-- different from the one we have in context. We also use different suffixes
+-- as we don't want any clashes (sharing cache files is not that handy as
+-- context moves on faster.)
+--
+-- Beware: serialization might fail on large files (so maybe we should pcall
+-- this) in which case one should limit the method to luac and enable support
+-- for execution.
+
+-- function caches.compile(data,luaname,lucname)
+-- local d = io.loaddata(luaname)
+-- if not d or d == "" then
+-- d = table.serialize(data,true) -- slow
+-- end
+-- if d and d ~= "" then
+-- local f = io.open(lucname,'w')
+-- if f then
+-- local s = loadstring(d)
+-- if s then
+-- f:write(string.dump(s,true))
+-- end
+-- f:close()
+-- end
+-- end
+-- end
+
+function caches.compile(data,luaname,lucname)
+ local d = io.loaddata(luaname)
+ if not d or d == "" then
+ d = table.serialize(data,true) -- slow
+ end
+ if d and d ~= "" then
+ local f = io.open(lucname,'wb')
+ if f then
+ local s = loadstring(d)
+ if s then
+ f:write(string.dump(s,true))
+ end
+ f:close()
+ end
+ end
+end
+
+--
+
+function table.setmetatableindex(t,f)
+ setmetatable(t,{ __index = f })
+end
+
+-- helper for plain:
+
+arguments = { }
+
+if arg then
+ for i=1,#arg do
+ local k, v = string.match(arg[i],"^%-%-([^=]+)=?(.-)$")
+ if k and v then
+ arguments[k] = v
+ end
+ end
+end
diff --git a/src/luaotfload-basics-nod.lua b/src/luaotfload-basics-nod.lua
new file mode 100644
index 0000000..373dab5
--- /dev/null
+++ b/src/luaotfload-basics-nod.lua
@@ -0,0 +1,178 @@
+if not modules then modules = { } end modules ['luatex-fonts-nod'] = {
+ version = 1.001,
+ comment = "companion to luatex-fonts.lua",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+
+-- Don't depend on code here as it is only needed to complement the
+-- font handler code.
+
+-- Attributes:
+
+if tex.attribute[0] ~= 0 then
+
+ texio.write_nl("log","!")
+ texio.write_nl("log","! Attribute 0 is reserved for ConTeXt's font feature management and has to be")
+ texio.write_nl("log","! set to zero. Also, some attributes in the range 1-255 are used for special")
+ texio.write_nl("log","! purposes so setting them at the TeX end might break the font handler.")
+ texio.write_nl("log","!")
+
+ tex.attribute[0] = 0 -- else no features
+
+end
+
+attributes = attributes or { }
+attributes.unsetvalue = -0x7FFFFFFF
+
+local numbers, last = { }, 127
+
+attributes.private = attributes.private or function(name)
+ local number = numbers[name]
+ if not number then
+ if last < 255 then
+ last = last + 1
+ end
+ number = last
+ numbers[name] = number
+ end
+ return number
+end
+
+-- Nodes:
+
+nodes = { }
+nodes.pool = { }
+nodes.handlers = { }
+
+local nodecodes = { } for k,v in next, node.types () do nodecodes[string.gsub(v,"_","")] = k end
+local whatcodes = { } for k,v in next, node.whatsits() do whatcodes[string.gsub(v,"_","")] = k end
+local glyphcodes = { [0] = "character", "glyph", "ligature", "ghost", "left", "right" }
+local disccodes = { [0] = "discretionary", "explicit", "automatic", "regular", "first", "second" }
+
+nodes.nodecodes = nodecodes
+nodes.whatcodes = whatcodes
+nodes.whatsitcodes = whatcodes
+nodes.glyphcodes = glyphcodes
+nodes.disccodes = disccodes
+
+local free_node = node.free
+local remove_node = node.remove
+local new_node = node.new
+local traverse_id = node.traverse_id
+
+nodes.handlers.protectglyphs = node.protect_glyphs
+nodes.handlers.unprotectglyphs = node.unprotect_glyphs
+
+local math_code = nodecodes.math
+local end_of_math = node.end_of_math
+
+function node.end_of_math(n)
+ if n.id == math_code and n.subtype == 1 then
+ return n
+ else
+ return end_of_math(n)
+ end
+end
+
+function nodes.remove(head, current, free_too)
+ local t = current
+ head, current = remove_node(head,current)
+ if t then
+ if free_too then
+ free_node(t)
+ t = nil
+ else
+ t.next, t.prev = nil, nil
+ end
+ end
+ return head, current, t
+end
+
+function nodes.delete(head,current)
+ return nodes.remove(head,current,true)
+end
+
+function nodes.pool.kern(k)
+ local n = new_node("kern",1)
+ n.kern = k
+ return n
+end
+
+-- experimental
+
+local getfield = node.getfield or function(n,tag) return n[tag] end
+local setfield = node.setfield or function(n,tag,value) n[tag] = value end
+
+nodes.getfield = getfield
+nodes.setfield = setfield
+
+nodes.getattr = getfield
+nodes.setattr = setfield
+
+if node.getid then nodes.getid = node.getid else function nodes.getid (n) return getfield(n,"id") end end
+if node.getsubtype then nodes.getsubtype = node.getsubtype else function nodes.getsubtype(n) return getfield(n,"subtype") end end
+if node.getnext then nodes.getnext = node.getnext else function nodes.getnext (n) return getfield(n,"next") end end
+if node.getprev then nodes.getprev = node.getprev else function nodes.getprev (n) return getfield(n,"prev") end end
+if node.getchar then nodes.getchar = node.getchar else function nodes.getchar (n) return getfield(n,"char") end end
+if node.getfont then nodes.getfont = node.getfont else function nodes.getfont (n) return getfield(n,"font") end end
+if node.getlist then nodes.getlist = node.getlist else function nodes.getlist (n) return getfield(n,"list") end end
+
+function nodes.tonut (n) return n end
+function nodes.tonode(n) return n end
+
+-- being lazy ... just copy a bunch ... not all needed in generic but we assume
+-- nodes to be kind of private anyway
+
+nodes.tostring = node.tostring or tostring
+nodes.copy = node.copy
+nodes.copy_list = node.copy_list
+nodes.delete = node.delete
+nodes.dimensions = node.dimensions
+nodes.end_of_math = node.end_of_math
+nodes.flush_list = node.flush_list
+nodes.flush_node = node.flush_node
+nodes.free = node.free
+nodes.insert_after = node.insert_after
+nodes.insert_before = node.insert_before
+nodes.hpack = node.hpack
+nodes.new = node.new
+nodes.tail = node.tail
+nodes.traverse = node.traverse
+nodes.traverse_id = node.traverse_id
+nodes.slide = node.slide
+nodes.vpack = node.vpack
+
+nodes.first_glyph = node.first_glyph
+nodes.first_character = node.first_character
+nodes.has_glyph = node.has_glyph or node.first_glyph
+
+nodes.current_attr = node.current_attr
+nodes.do_ligature_n = node.do_ligature_n
+nodes.has_field = node.has_field
+nodes.last_node = node.last_node
+nodes.usedlist = node.usedlist
+nodes.protrusion_skippable = node.protrusion_skippable
+nodes.write = node.write
+
+nodes.has_attribute = node.has_attribute
+nodes.set_attribute = node.set_attribute
+nodes.unset_attribute = node.unset_attribute
+
+nodes.protect_glyphs = node.protect_glyphs
+nodes.unprotect_glyphs = node.unprotect_glyphs
+nodes.kerning = node.kerning
+nodes.ligaturing = node.ligaturing
+nodes.mlist_to_hlist = node.mlist_to_hlist
+
+-- in generic code, at least for some time, we stay nodes, while in context
+-- we can go nuts (e.g. experimental); this split permits us us keep code
+-- used elsewhere stable but at the same time play around in context
+
+nodes.nuts = nodes
diff --git a/src/luaotfload-colors.lua b/src/luaotfload-colors.lua
new file mode 100644
index 0000000..9be2974
--- /dev/null
+++ b/src/luaotfload-colors.lua
@@ -0,0 +1,319 @@
+if not modules then modules = { } end modules ['luaotfload-colors'] = {
+ version = "2.5",
+ comment = "companion to luaotfload-main.lua (font color)",
+ author = "Khaled Hosny, Elie Roux, Philipp Gesang",
+ copyright = "Luaotfload Development Team",
+ license = "GNU GPL v2.0"
+}
+
+--[[doc--
+
+buggy coloring with the pre_output_filter when expansion is enabled
+ · tfmdata for different expansion values is split over different objects
+ · in ``initializeexpansion()``, chr.expansion_factor is set, and only
+ those characters that have it are affected
+ · in constructors.scale: chr.expansion_factor = ve*1000 if commented out
+ makes the bug vanish
+
+explanation: http://tug.org/pipermail/luatex/2013-May/004305.html
+
+--doc]]--
+
+local log = luaotfload.log
+local logreport = log.report
+
+local newnode = node.new
+local nodetype = node.id
+local traverse_nodes = node.traverse
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+
+local texset = tex.set
+local texget = tex.get
+
+local stringformat = string.format
+local stringgsub = string.gsub
+local stringfind = string.find
+local stringsub = string.sub
+
+local otffeatures = fonts.constructors.newfeatures("otf")
+local identifiers = fonts.hashes.identifiers
+local registerotffeature = otffeatures.register
+
+local add_color_callback --[[ this used to be a global‽ ]]
+
+--[[doc--
+This converts a single octet into a decimal with three digits of
+precision. The optional second argument limits precision to a single
+digit.
+--doc]]--
+
+--- string -> bool? -> string
+local hex_to_dec = function (hex,one) --- one isn’t actually used anywhere ...
+ if one then
+ return stringformat("%.1g", tonumber(hex, 16)/255)
+ else
+ return stringformat("%.3g", tonumber(hex, 16)/255)
+ end
+end
+
+--[[doc--
+Color string validator / parser.
+--doc]]--
+
+local lpeg = require"lpeg"
+local lpegmatch = lpeg.match
+local C, Cg, Ct, P, R, S = lpeg.C, lpeg.Cg, lpeg.Ct, lpeg.P, lpeg.R, lpeg.S
+
+local digit16 = R("09", "af", "AF")
+local octet = C(digit16 * digit16)
+
+local p_rgb = octet * octet * octet
+local p_rgba = p_rgb * octet
+local valid_digits = C(p_rgba + p_rgb) -- matches eight or six hex digits
+
+local p_Crgb = Cg(octet/hex_to_dec, "red") --- for captures
+ * Cg(octet/hex_to_dec, "green")
+ * Cg(octet/hex_to_dec, "blue")
+local p_Crgba = p_Crgb * Cg(octet/hex_to_dec, "alpha")
+local extract_color = Ct(p_Crgba + p_Crgb)
+
+--- string -> (string | nil)
+local sanitize_color_expression = function (digits)
+ digits = tostring(digits)
+ local sanitized = lpegmatch(valid_digits, digits)
+ if not sanitized then
+ logreport("both", 0, "color",
+ "%q is not a valid rgb[a] color expression",
+ digits)
+ return nil
+ end
+ return sanitized
+end
+
+--[[doc--
+``setcolor`` modifies tfmdata.properties.color in place
+--doc]]--
+
+--- fontobj -> string -> unit
+---
+--- (where “string” is a rgb value as three octet
+--- hexadecimal, with an optional fourth transparency
+--- value)
+---
+local setcolor = function (tfmdata, value)
+ local sanitized = sanitize_color_expression(value)
+ local properties = tfmdata.properties
+
+ if sanitized then
+ properties.color = sanitized
+ add_color_callback()
+ end
+end
+
+registerotffeature {
+ name = "color",
+ description = "color",
+ initializers = {
+ base = setcolor,
+ node = setcolor,
+ }
+}
+
+
+--- something is carried around in ``res``
+--- for later use by color_handler() --- but what?
+
+local res = nil
+
+--- float -> unit
+local function pageresources(alpha)
+ res = res or {}
+ res[alpha] = true
+end
+
+--- we store results of below color handler as tuples of
+--- push/pop strings
+local color_cache = { } --- (string, (string * string)) hash_t
+
+--- string -> (string * string)
+local hex_to_rgba = function (digits)
+ if not digits then
+ return
+ end
+
+ --- this is called like a thousand times, so some
+ --- memoizing is in order.
+ local cached = color_cache[digits]
+ if not cached then
+ local push, pop
+ local rgb = lpegmatch(extract_color, digits)
+ if rgb.alpha then
+ pageresources(rgb.alpha)
+ push = stringformat(
+ "/TransGs%g gs %s %s %s rg",
+ rgb.alpha,
+ rgb.red,
+ rgb.green,
+ rgb.blue)
+ pop = "0 g /TransGs1 gs"
+ else
+ push = stringformat(
+ "%s %s %s rg",
+ rgb.red,
+ rgb.green,
+ rgb.blue)
+ pop = "0 g"
+ end
+ color_cache[digits] = { push, pop }
+ return push, pop
+ end
+
+ return cached[1], cached[2]
+end
+
+--- Luatex internal types
+
+local glyph_t = nodetype("glyph")
+local hlist_t = nodetype("hlist")
+local vlist_t = nodetype("vlist")
+local whatsit_t = nodetype("whatsit")
+local page_insert_t = nodetype("page_insert")
+local sub_box_t = nodetype("sub_box")
+
+--- node -> nil | -1 | color‽
+local lookup_next_color
+lookup_next_color = function (head) --- paragraph material
+ for n in traverse_nodes(head) do
+ local n_id = n.id
+
+ if n_id == glyph_t then
+ local n_font
+ if identifiers[n_font]
+ and identifiers[n_font].properties
+ and identifiers[n_font].properties.color
+ then
+ return identifiers[n.font].properties.color
+ else
+ return -1
+ end
+
+ elseif n_id == vlist_t or n_id == hlist_t or n_id == sub_box_t then
+ local r = lookup_next_color(n.list)
+ if r then
+ return r
+ end
+
+ elseif n_id == whatsit_t or n_id == page_insert_t then
+ return -1
+ end
+ end
+ return nil
+end
+
+--[[doc--
+While the second argument and second returned value are apparently
+always nil when the function is called, they temporarily take string
+values during the node list traversal.
+--doc]]--
+
+local cnt = 0
+--- node -> string -> int -> (node * string)
+local node_colorize
+node_colorize = function (head, current_color, next_color)
+ for n in traverse_nodes(head) do
+ local n_id = n.id
+ local nextnode = n.next
+
+ if n_id == hlist_t or n_id == vlist_t or n_id == sub_box_t then
+ local next_color_in = lookup_next_color(nextnode) or next_color
+ n.list, current_color = node_colorize(n.list, current_color, next_color_in)
+
+ elseif n_id == glyph_t then
+ cnt = cnt + 1
+ local tfmdata = identifiers[n.font]
+
+ --- colorization is restricted to those fonts
+ --- that received the “color” property upon
+ --- loading (see ``setcolor()`` above)
+ if tfmdata and tfmdata.properties and tfmdata.properties.color then
+ local font_color = tfmdata.properties.color
+-- luaotfload.info(
+-- "n: %d; %s; %d %s, %s",
+-- cnt, utf.char(n.char), n.font, "<TRUE>", font_color)
+ if font_color ~= current_color then
+ local pushcolor = hex_to_rgba(font_color)
+ local push = newnode(whatsit_t, 8)
+ push.mode = 1
+ push.data = pushcolor
+ head = insert_node_before(head, n, push)
+ current_color = font_color
+ end
+ local next_color_in = lookup_next_color (nextnode) or next_color
+ if next_color_in ~= font_color then
+ local _, popcolor = hex_to_rgba(font_color)
+ local pop = newnode(whatsit_t, 8)
+ pop.mode = 1
+ pop.data = popcolor
+ head = insert_node_after(head, n, pop)
+ current_color = nil
+ end
+
+-- else
+-- luaotfload.info(
+-- "n: %d; %s; %d %s",
+-- cnt, utf.char(n.char), n.font, "<FALSE>")
+ end
+ end
+ end
+ return head, current_color
+end
+
+--- node -> node
+local color_handler = function (head)
+ local new_head = node_colorize(head, nil, nil)
+ -- now append our page resources
+ if res then
+ res["1"] = true
+ local tpr = texget("pdfpageresources")
+ local t = ""
+ for k in pairs(res) do
+ local str = stringformat("/TransGs%s<</ca %s/CA %s>>", k, k, k)
+ if not stringfind(tpr,str) then
+ t = t .. str
+ end
+ end
+ print""
+ if t ~= "" then
+ print(">>", tpr, "<<")
+ if not stringfind(tpr,"/ExtGState<<.*>>") then
+ tpr = tpr.."/ExtGState<<>>"
+ end
+ tpr = stringgsub(tpr,"/ExtGState<<","%1"..t)
+ texset("global", "pdfpageresources", tpr)
+ print(">>", tpr, "<<")
+ end
+ res = nil -- reset res
+ end
+ return new_head
+end
+
+local color_callback_activated = 0
+
+--- unit -> unit
+add_color_callback = function ( )
+ local color_callback = config.luaotfload.run.color_callback
+ if not color_callback then
+ color_callback = "pre_linebreak_filter"
+ end
+
+ if color_callback_activated == 0 then
+ luatexbase.add_to_callback(color_callback,
+ color_handler,
+ "luaotfload.color_handler")
+ color_callback_activated = 1
+ end
+end
+
+-- vim:tw=71:sw=4:ts=4:expandtab
+
diff --git a/src/luaotfload-configuration.lua b/src/luaotfload-configuration.lua
new file mode 100644
index 0000000..dfa222c
--- /dev/null
+++ b/src/luaotfload-configuration.lua
@@ -0,0 +1,704 @@
+#!/usr/bin/env texlua
+-------------------------------------------------------------------------------
+-- FILE: luaotfload-configuration.lua
+-- DESCRIPTION: config file reader
+-- REQUIREMENTS: Luaotfload 2.5 or above
+-- AUTHOR: Philipp Gesang (Phg), <phg42.2a@gmail.com>
+-- VERSION: same as Luaotfload
+-- MODIFIED: 2014-07-13 14:19:32+0200
+-------------------------------------------------------------------------------
+--
+
+if not modules then modules = { } end modules ["luaotfload-configuration"] = {
+ version = "2.5",
+ comment = "part of Luaotfload",
+ author = "Philipp Gesang",
+ copyright = "Luaotfload Development Team",
+ license = "GNU GPL v2.0"
+}
+
+luaotfload = luaotfload or { }
+config = config or { }
+config.luaotfload = { }
+
+local status_file = "luaotfload-status"
+local luaotfloadstatus = require (status_file)
+
+local string = string
+local stringsub = string.sub
+local stringexplode = string.explode
+local stringstrip = string.strip
+local stringfind = string.find
+
+local table = table
+local tableappend = table.append
+local tablecopy = table.copy
+local tableconcat = table.concat
+local tabletohash = table.tohash
+
+local math = math
+local mathfloor = math.floor
+
+local io = io
+local ioloaddata = io.loaddata
+local iopopen = io.popen
+
+local os = os
+local osgetenv = os.getenv
+
+local lpeg = require "lpeg"
+local lpegmatch = lpeg.match
+local commasplitter = lpeg.splitat ","
+local equalssplitter = lpeg.splitat "="
+
+local kpse = kpse
+local kpseexpand_path = kpse.expand_path
+local kpselookup = kpse.lookup
+
+local lfs = lfs
+local lfsisfile = lfs.isfile
+local lfsisdir = lfs.isdir
+
+local file = file
+local filejoin = file.join
+local filereplacesuffix = file.replacesuffix
+
+
+local parsers = luaotfload.parsers
+
+local log = luaotfload.log
+local logreport = log.report
+
+local config_parser = parsers.config
+local stripslashes = parsers.stripslashes
+
+local getwritablepath = caches.getwritablepath
+
+-------------------------------------------------------------------------------
+--- SETTINGS
+-------------------------------------------------------------------------------
+
+local path_t = 0
+local kpse_t = 1
+
+local val_home = kpseexpand_path "~"
+local val_xdg_config_home = kpseexpand_path "$XDG_CONFIG_HOME"
+
+if val_xdg_config_home == "" then val_xdg_config_home = "~/.config" end
+
+local config_paths = {
+ --- needs adapting for those other OS
+ { path_t, "./luaotfload.conf" },
+ { path_t, "./luaotfloadrc" },
+ { path_t, filejoin (val_xdg_config_home, "luaotfload/luaotfload.conf") },
+ { path_t, filejoin (val_xdg_config_home, "luaotfload/luaotfloadrc") },
+ { path_t, filejoin (val_home, ".luaotfloadrc") },
+ { kpse_t, "luaotfloadrc" },
+ { kpse_t, "luaotfload.conf" },
+}
+
+local valid_formats = tabletohash {
+ "otf", "ttc", "ttf", "dfont", "afm", "pfb", "pfa",
+}
+
+local feature_presets = {
+ arab = tabletohash {
+ "ccmp", "locl", "isol", "fina", "fin2",
+ "fin3", "medi", "med2", "init", "rlig",
+ "calt", "liga", "cswh", "mset", "curs",
+ "kern", "mark", "mkmk",
+ },
+ deva = tabletohash {
+ "ccmp", "locl", "init", "nukt", "akhn",
+ "rphf", "blwf", "half", "pstf", "vatu",
+ "pres", "blws", "abvs", "psts", "haln",
+ "calt", "blwm", "abvm", "dist", "kern",
+ "mark", "mkmk",
+ },
+ khmr = tabletohash {
+ "ccmp", "locl", "pref", "blwf", "abvf",
+ "pstf", "pres", "blws", "abvs", "psts",
+ "clig", "calt", "blwm", "abvm", "dist",
+ "kern", "mark", "mkmk",
+ },
+ thai = tabletohash {
+ "ccmp", "locl", "liga", "kern", "mark",
+ "mkmk",
+ },
+}
+
+
+
+-------------------------------------------------------------------------------
+--- DEFAULTS
+-------------------------------------------------------------------------------
+
+local default_config = {
+ db = {
+ formats = "otf,ttf,ttc,dfont",
+ scan_local = false,
+ skip_read = false,
+ strip = true,
+ update_live = true,
+ compress = true,
+ max_fonts = 2^51,
+ },
+ run = {
+ resolver = "cached",
+ definer = "patch",
+ log_level = 0,
+ color_callback = "pre_linebreak_filter",
+ },
+ misc = {
+ bisect = false,
+ version = luaotfload.version,
+ statistics = false,
+ termwidth = nil,
+ },
+ paths = {
+ names_dir = "names",
+ cache_dir = "fonts",
+ index_file = "luaotfload-names.lua",
+ lookups_file = "luaotfload-lookup-cache.lua",
+ lookup_path_lua = nil,
+ lookup_path_luc = nil,
+ index_path_lua = nil,
+ index_path_luc = nil,
+ },
+ default_features = {
+ global = { mode = "node" },
+ dflt = tabletohash {
+ "ccmp", "locl", "rlig", "liga", "clig",
+ "kern", "mark", "mkmk", 'itlc',
+ },
+
+ arab = feature_presets.arab,
+ syrc = feature_presets.arab,
+ mong = feature_presets.arab,
+ nko = feature_presets.arab,
+
+ deva = feature_presets.deva,
+ beng = feature_presets.deva,
+ guru = feature_presets.deva,
+ gujr = feature_presets.deva,
+ orya = feature_presets.deva,
+ taml = feature_presets.deva,
+ telu = feature_presets.deva,
+ knda = feature_presets.deva,
+ mlym = feature_presets.deva,
+ sinh = feature_presets.deva,
+
+ khmr = feature_presets.khmr,
+ tibt = feature_presets.khmr,
+ thai = feature_presets.thai,
+ lao = feature_presets.thai,
+
+ hang = tabletohash { "ccmp", "ljmo", "vjmo", "tjmo", },
+ },
+}
+
+-------------------------------------------------------------------------------
+--- RECONFIGURATION TASKS
+-------------------------------------------------------------------------------
+
+--[[doc--
+
+ Procedures to be executed in order to put the new configuration into effect.
+
+--doc]]--
+
+local reconf_tasks = nil
+
+local min_terminal_width = 40
+
+--- The “termwidth” value is only considered when printing
+--- short status messages, e.g. when building the database
+--- online.
+local check_termwidth = function ()
+ if config.luaotfload.misc.termwidth == nil then
+ local tw = 79
+ if not ( os.type == "windows" --- Assume broken terminal.
+ or osgetenv "TERM" == "dumb")
+ then
+ local p = iopopen "tput cols"
+ if p then
+ result = tonumber (p:read "*all")
+ p:close ()
+ if result then
+ tw = result
+ else
+ logreport ("log", 2, "db", "tput returned non-number.")
+ end
+ else
+ logreport ("log", 2, "db", "Shell escape disabled or tput executable missing.")
+ logreport ("log", 2, "db", "Assuming 79 cols terminal width.")
+ end
+ end
+ config.luaotfload.misc.termwidth = tw
+ end
+ return true
+end
+
+local set_font_filter = function ()
+ local names = fonts.names
+ if names and names.set_font_filter then
+ local formats = config.luaotfload.db.formats
+ if not formats or formats == "" then
+ formats = default_config.db.formats
+ end
+ names.set_font_filter (formats)
+ end
+ return true
+end
+
+local set_name_resolver = function ()
+ local names = fonts.names
+ if names and names.resolve_cached then
+ --- replace the resolver from luatex-fonts
+ if config.luaotfload.db.resolver == "cached" then
+ logreport ("both", 2, "cache", "Caching of name: lookups active.")
+ names.resolvespec = names.resolve_cached
+ else
+ names.resolvespec = names.resolve_name
+ end
+ end
+ return true
+end
+
+local set_loglevel = function ()
+ log.set_loglevel (config.luaotfload.run.log_level)
+ return true
+end
+
+local build_cache_paths = function ()
+ local paths = config.luaotfload.paths
+ local prefix = getwritablepath (paths.names_dir, "")
+
+ if not prefix then
+ luaotfload.error ("Impossible to find a suitable writeable cache...")
+ return false
+ end
+
+ prefix = lpegmatch (stripslashes, prefix)
+ logreport ("log", 0, "conf", "Root cache directory is %s.", prefix)
+
+ local index_file = filejoin (prefix, paths.index_file)
+ local lookups_file = filejoin (prefix, paths.lookups_file)
+
+ paths.prefix = prefix
+ paths.index_path_lua = filereplacesuffix (index_file, "lua")
+ paths.index_path_luc = filereplacesuffix (index_file, "luc")
+ paths.lookup_path_lua = filereplacesuffix (lookups_file, "lua")
+ paths.lookup_path_luc = filereplacesuffix (lookups_file, "luc")
+ return true
+end
+
+
+local set_default_features = function ()
+ local default_features = config.luaotfload.default_features
+ luaotfload.features = luaotfload.features or {
+ global = { },
+ defaults = { },
+ }
+ current_features = luaotfload.features
+ for var, val in next, default_features do
+ if var == "global" then
+ current_features.global = val
+ else
+ current_features.defaults[var] = val
+ end
+ end
+ return true
+end
+
+
+reconf_tasks = {
+ { "Set the log level" , set_loglevel },
+ { "Build cache paths" , build_cache_paths },
+ { "Check terminal dimensions" , check_termwidth },
+ { "Set the font filter" , set_font_filter },
+ { "Install font name resolver", set_name_resolver },
+ { "Set default features" , set_default_features },
+}
+
+-------------------------------------------------------------------------------
+--- OPTION SPECIFICATION
+-------------------------------------------------------------------------------
+
+local string_t = "string"
+local table_t = "table"
+local number_t = "number"
+local boolean_t = "boolean"
+local function_t = "function"
+
+local tointeger = function (n)
+ n = tonumber (n)
+ if n then
+ return mathfloor (n + 0.5)
+ end
+end
+
+local toarray = function (s)
+ local fields = { lpegmatch (commasplitter, s) }
+ local ret = { }
+ for i = 1, #fields do
+ local field = stringstrip (fields[i])
+ if field and field ~= "" then
+ ret[#ret + 1] = field
+ end
+ end
+ return ret
+end
+
+local tohash = function (s)
+ local result = { }
+ local fields = toarray (s)
+ for _, field in next, fields do
+ local var, val
+ if stringfind (field, "=") then
+ local tmp
+ var, tmp = lpegmatch (equalssplitter, field)
+ if tmp == "true" or tmp == "yes" then val = true else val = tmp end
+ else
+ var, val = field, true
+ end
+ result[var] = val
+ end
+ return result
+end
+
+local option_spec = {
+ db = {
+ formats = {
+ in_t = string_t,
+ out_t = string_t,
+ transform = function (f)
+ local fields = toarray (f)
+
+ --- check validity
+ if not fields then
+ logreport ("both", 0, "conf",
+ "Expected list of identifiers, got %q.", f)
+ return nil
+ end
+
+ --- strip dupes
+ local known = { }
+ local result = { }
+ for i = 1, #fields do
+ local field = fields[i]
+ if known[field] ~= true then
+ --- yet unknown, tag as seen
+ known[field] = true
+ --- include in output if valid
+ if valid_formats[field] == true then
+ result[#result + 1] = field
+ else
+ logreport ("both", 4, "conf",
+ "Invalid font format identifier %q, ignoring.",
+ field)
+ end
+ end
+ end
+ if #result == 0 then
+ --- force defaults
+ return nil
+ end
+ return tableconcat (result, ",")
+ end
+ },
+ scan_local = { in_t = boolean_t, },
+ skip_read = { in_t = boolean_t, },
+ strip = { in_t = boolean_t, },
+ update_live = { in_t = boolean_t, },
+ compress = { in_t = boolean_t, },
+ max_fonts = {
+ in_t = number_t,
+ out_t = number_t, --- TODO int_t from 5.3.x on
+ transform = tointeger,
+ },
+ },
+ run = {
+ resolver = {
+ in_t = string_t,
+ out_t = string_t,
+ transform = function (r) return r == "normal" and r or "cached" end,
+ },
+ definer = {
+ in_t = string_t,
+ out_t = string_t,
+ transform = function (d) return d == "generic" and d or "patch" end,
+ },
+ log_level = {
+ in_t = number_t,
+ out_t = number_t, --- TODO int_t from 5.3.x on
+ transform = tointeger,
+ },
+ color_callback = {
+ in_t = string_t,
+ out_t = string_t,
+ transform = function (cb)
+ --- These are the two that make sense.
+ return cb == "pre_output_filter" and cb or "pre_linebreak_filter"
+ end,
+ },
+ },
+ misc = {
+ bisect = { in_t = boolean_t, }, --- doesn’t make sense in a config file
+ version = { in_t = string_t, },
+ statistics = { in_t = boolean_t, },
+ termwidth = {
+ in_t = number_t,
+ out_t = number_t,
+ transform = function (w)
+ w = tointeger (w)
+ if w < min_terminal_width then
+ return min_terminal_width
+ end
+ return w
+ end,
+ },
+ },
+ paths = {
+ names_dir = { in_t = string_t, },
+ cache_dir = { in_t = string_t, },
+ index_file = { in_t = string_t, },
+ lookups_file = { in_t = string_t, },
+ lookup_path_lua = { in_t = string_t, },
+ lookup_path_luc = { in_t = string_t, },
+ index_path_lua = { in_t = string_t, },
+ index_path_luc = { in_t = string_t, },
+ },
+ default_features = {
+ __default = { in_t = string_t, out_t = table_t, transform = tohash, },
+ },
+}
+
+-------------------------------------------------------------------------------
+--- MAIN FUNCTIONALITY
+-------------------------------------------------------------------------------
+
+--[[doc--
+
+ tilde_expand -- Rudimentary tilde expansion; covers just the “substitute ‘~’
+ by the current users’s $HOME” part.
+
+--doc]]--
+
+local tilde_expand = function (p)
+ if #p > 2 then
+ if stringsub (p, 1, 2) == "~/" then
+ local homedir = osgetenv "HOME"
+ if homedir and lfsisdir (homedir) then
+ p = filejoin (homedir, stringsub (p, 3))
+ end
+ end
+ end
+ return p
+end
+
+local resolve_config_path = function ()
+ for i = 1, #config_paths do
+ local t, p = unpack (config_paths[i])
+ local fullname
+ if t == kpse_t then
+ fullname = kpse.lookup (p)
+ logreport ("both", 6, "conf", "kpse lookup: %s -> %s.", p, fullname)
+ elseif t == path_t then
+ local expanded = tilde_expand (p)
+ if lfsisfile (expanded) then
+ fullname = expanded
+ end
+ logreport ("both", 6, "conf", "path lookup: %s -> %s.", p, fullname)
+ end
+ if fullname then
+ logreport ("both", 3, "conf", "Reading configuration file at %q.", fullname)
+ return fullname
+ end
+ end
+ logreport ("both", 2, "conf", "No configuration file found.")
+ return false
+end
+
+local add_config_paths = function (t)
+ if not next (t) then
+ return
+ end
+ local result = { }
+ for i = 1, #t do
+ local path = t[i]
+ result[#result + 1] = { path_t, path }
+ end
+ config_paths = tableappend (result, config_paths)
+end
+
+local process_options = function (opts)
+ local new = { }
+ for i = 1, #opts do
+ local section = opts[i]
+ local title = section.section.title
+ local vars = section.variables
+
+ if not title then --- trigger warning: arrow code ahead
+ logreport ("both", 2, "conf", "Section %d lacks a title; skipping.", i)
+ elseif not vars then
+ logreport ("both", 2, "conf", "Section %d (%s) lacks a variable section; skipping.", i, title)
+ else
+ local spec = option_spec[title]
+ if not spec then
+ logreport ("both", 2, "conf", "Section %d (%s) unknown; skipping.", i, title)
+ else
+ local newsection = new[title]
+ if not newsection then
+ newsection = { }
+ new[title] = newsection
+ end
+
+ for var, val in next, vars do
+ local vspec = spec[var] or spec.__default
+ local t_val = type (val)
+ if not vspec then
+ logreport ("both", 2, "conf",
+ "Section %d (%s): invalid configuration variable %q (%q); ignoring.",
+ i, title,
+ var, tostring (val))
+ elseif t_val ~= vspec.in_t then
+ logreport ("both", 2, "conf",
+ "Section %d (%s): type mismatch of input value %q (%q, %s != %s); ignoring.",
+ i, title,
+ var, tostring (val), t_val, vspec.in_t)
+ else --- type matches
+ local transform = vspec.transform
+ if transform then
+ local dval
+ local t_transform = type (transform)
+ if t_transform == function_t then
+ dval = transform (val)
+ elseif t_transform == table_t then
+ dval = transform[val]
+ end
+ if dval then
+ local out_t = vspec.out_t
+ if out_t then
+ local t_dval = type (dval)
+ if t_dval == out_t then
+ newsection[var] = dval
+ else
+ logreport ("both", 2, "conf",
+ "Section %d (%s): type mismatch of derived value of %q (%q, %s != %s); ignoring.",
+ i, title,
+ var, tostring (dval), t_dval, out_t)
+ end
+ else
+ newsection[var] = dval
+ end
+ else
+ logreport ("both", 2, "conf",
+ "Section %d (%s): value of %q could not be derived via %s from input %q; ignoring.",
+ i, title, var, t_transform, tostring (val))
+ end
+ else --- insert as is
+ newsection[var] = val
+ end
+ end
+ end
+ end
+ end
+ end
+ return new
+end
+
+local apply
+apply = function (old, new)
+ if not new then
+ if not old then
+ return false
+ end
+ return tablecopy (old)
+ elseif not old then
+ return tablecopy (new)
+ end
+ local result = tablecopy (old)
+ for name, section in next, new do
+ local t_section = type (section)
+ if t_section ~= table_t then
+ logreport ("both", 1, "conf",
+ "Error applying configuration: entry %s is %s, expected table.",
+ section, t_section)
+ --- ignore
+ else
+ local currentsection = result[name]
+ for var, val in next, section do
+ currentsection[var] = val
+ end
+ end
+ end
+ result.status = luaotfloadstatus
+ return result
+end
+
+local reconfigure = function ()
+ for i = 1, #reconf_tasks do
+ local name, task = unpack (reconf_tasks[i])
+ logreport ("both", 3, "conf", "Launch post-configuration task %q.", name)
+ if not task () then
+ logreport ("both", 0, "conf", "Post-configuration task %q failed.", name)
+ return false
+ end
+ end
+ return true
+end
+
+local read = function (extra)
+ if extra then
+ add_config_paths (extra)
+ end
+
+ local readme = resolve_config_path ()
+ if readme == false then
+ logreport ("both", 2, "conf", "No configuration file.")
+ return false
+ end
+
+ local raw = ioloaddata (readme)
+ if not raw then
+ logreport ("both", 2, "conf", "Error reading the configuration file %q.", readme)
+ return false
+ end
+
+ local parsed = lpegmatch (parsers.config, raw)
+ if not parsed then
+ logreport ("both", 2, "conf", "Error parsing configuration file %q.", readme)
+ return false
+ end
+
+ local ret, msg = process_options (parsed)
+ if not ret then
+ logreport ("both", 2, "conf", "File %q is not a valid configuration file.", readme)
+ logreport ("both", 2, "conf", "Error: %s", msg)
+ return false
+ end
+ return ret
+end
+
+local apply_defaults = function ()
+ local defaults = default_config
+ local vars = read ()
+ --- Side-effects galore ...
+ config.luaotfload = apply (defaults, vars)
+ return reconfigure ()
+end
+
+-------------------------------------------------------------------------------
+--- EXPORTS
+-------------------------------------------------------------------------------
+
+luaotfload.default_config = default_config
+
+config.actions = {
+ read = read,
+ apply = apply,
+ apply_defaults = apply_defaults,
+ reconfigure = reconfigure,
+}
+
diff --git a/src/luaotfload-database.lua b/src/luaotfload-database.lua
new file mode 100644
index 0000000..7a01ca6
--- /dev/null
+++ b/src/luaotfload-database.lua
@@ -0,0 +1,3466 @@
+if not modules then modules = { } end modules ['luaotfload-database'] = {
+ version = "2.5",
+ comment = "companion to luaotfload-main.lua",
+ author = "Khaled Hosny, Elie Roux, Philipp Gesang",
+ copyright = "Luaotfload Development Team",
+ license = "GNU GPL v2.0"
+}
+
+--[[doc--
+
+ Some statistics:
+
+ a) TL 2012, mkluatexfontdb --force
+ b) v2.4, luaotfload-tool --update --force
+ c) v2.4, luaotfload-tool --update --force --formats=+afm,pfa,pfb
+ d) Context, mtxrun --script fonts --reload --force
+
+ (Keep in mind that Context does index fewer fonts since it
+ considers only the contents of the minimals tree, not the
+ tex live one!)
+
+ time (m:s) peak VmSize (kB)
+ a 1:19 386 018
+ b 0:37 715 797
+ c 2:27 1 017 674
+ d 0:44 1 082 313
+
+ Most of the increase in memory consumption from version 1.x to 2.2+
+ can be attributed to the move from single-pass to a multi-pass
+ approach to building the index: Information is first gathered from
+ all reachable fonts and only afterwards processed, classified and
+ discarded. Also, there is a good deal of additional stuff kept in
+ the database now: two extra tables for file names and font families
+ have been added, making font lookups more efficient while improving
+ maintainability of the code.
+
+--doc]]--
+
+local lpeg = require "lpeg"
+local P, Cc, lpegmatch = lpeg.P, lpeg.Cc, lpeg.match
+
+local parsers = luaotfload.parsers
+local read_fonts_conf = parsers.read_fonts_conf
+local stripslashes = parsers.stripslashes
+local splitcomma = parsers.splitcomma
+
+local log = luaotfload.log
+local report = log.report
+local report_status = log.names_status
+local report_status_start = log.names_status_start
+local report_status_stop = log.names_status_stop
+
+
+--- Luatex builtins
+local load = load
+local next = next
+local require = require
+local tonumber = tonumber
+local unpack = table.unpack
+
+local fontloaderinfo = fontloader.info
+local fontloaderclose = fontloader.close
+local fontloaderopen = fontloader.open
+----- fontloaderto_table = fontloader.to_table
+local gzipopen = gzip.open
+local iolines = io.lines
+local ioopen = io.open
+local iopopen = io.popen
+local kpseexpand_path = kpse.expand_path
+local kpsefind_file = kpse.find_file
+local kpselookup = kpse.lookup
+local kpsereadable_file = kpse.readable_file
+local lfsattributes = lfs.attributes
+local lfschdir = lfs.chdir
+local lfscurrentdir = lfs.currentdir
+local lfsdir = lfs.dir
+local mathabs = math.abs
+local mathmin = math.min
+local osgetenv = os.getenv
+local osgettimeofday = os.gettimeofday
+local osremove = os.remove
+local stringfind = string.find
+local stringformat = string.format
+local stringgmatch = string.gmatch
+local stringgsub = string.gsub
+local stringlower = string.lower
+local stringsub = string.sub
+local stringupper = string.upper
+local tableconcat = table.concat
+local tablesort = table.sort
+local utf8gsub = unicode.utf8.gsub
+local utf8lower = unicode.utf8.lower
+local utf8len = unicode.utf8.len
+local zlibcompress = zlib.compress
+
+--- these come from Lualibs/Context
+local filebasename = file.basename
+local filecollapsepath = file.collapsepath or file.collapse_path
+local filedirname = file.dirname
+local fileextname = file.extname
+local fileiswritable = file.iswritable
+local filejoin = file.join
+local filenameonly = file.nameonly
+local filereplacesuffix = file.replacesuffix
+local filesplitpath = file.splitpath or file.split_path
+local filesuffix = file.suffix
+local getwritablepath = caches.getwritablepath
+local lfsisdir = lfs.isdir
+local lfsisfile = lfs.isfile
+local lfsmkdirs = lfs.mkdirs
+local lpegsplitat = lpeg.splitat
+local stringis_empty = string.is_empty
+local stringsplit = string.split
+local stringstrip = string.strip
+local tableappend = table.append
+local tablecontains = table.contains
+local tablecopy = table.copy
+local tablefastcopy = table.fastcopy
+local tabletofile = table.tofile
+local tabletohash = table.tohash
+local tableserialize = table.serialize
+--- the font loader namespace is “fonts”, same as in Context
+--- we need to put some fallbacks into place for when running
+--- as a script
+fonts = fonts or { }
+fonts.names = fonts.names or { }
+fonts.definers = fonts.definers or { }
+
+local names = fonts.names
+local name_index = nil --> upvalue for names.data
+local lookup_cache = nil --> for names.lookups
+names.version = 2.51
+names.data = nil --- contains the loaded database
+names.lookups = nil --- contains the lookup cache
+
+--- string -> (string * string)
+local make_luanames = function (path)
+ return filereplacesuffix(path, "lua"),
+ filereplacesuffix(path, "luc")
+end
+
+local format_precedence = {
+ "otf", "ttc", "ttf",
+ "dfont", "afm", "pfb",
+ "pfa",
+}
+
+local location_precedence = {
+ "local", "system", "texmf",
+}
+
+local set_location_precedence = function (precedence)
+ location_precedence = precedence
+end
+
+--[[doc--
+
+ Auxiliary functions
+
+--doc]]--
+
+--- fontnames contain all kinds of garbage; as a precaution we
+--- lowercase and strip them of non alphanumerical characters
+
+--- string -> string
+
+local invalidchars = "[^%a%d]"
+
+local sanitize_fontname = function (str)
+ if str ~= nil then
+ str = utf8gsub (utf8lower (str), invalidchars, "")
+ return str
+ end
+ return nil
+end
+
+local sanitize_fontnames = function (rawnames)
+ local result = { }
+ for category, namedata in next, rawnames do
+
+ if type (namedata) == "string" then
+ result [category] = utf8gsub (utf8lower (namedata),
+ invalidchars,
+ "")
+ else
+ local target = { }
+ for field, name in next, namedata do
+ target [field] = utf8gsub (utf8lower (name),
+ invalidchars,
+ "")
+ end
+ result [category] = target
+ end
+ end
+ return result
+end
+
+local find_files_indeed
+find_files_indeed = function (acc, dirs, filter)
+ if not next (dirs) then --- done
+ return acc
+ end
+
+ local pwd = lfscurrentdir ()
+ local dir = dirs[#dirs]
+ dirs[#dirs] = nil
+
+ if lfschdir (dir) then
+ lfschdir (pwd)
+
+ local newfiles = { }
+ for ent in lfsdir (dir) do
+ if ent ~= "." and ent ~= ".." then
+ local fullpath = dir .. "/" .. ent
+ if filter (fullpath) == true then
+ if lfsisdir (fullpath) then
+ dirs[#dirs+1] = fullpath
+ elseif lfsisfile (fullpath) then
+ newfiles[#newfiles+1] = fullpath
+ end
+ end
+ end
+ end
+ return find_files_indeed (tableappend (acc, newfiles),
+ dirs, filter)
+ end
+ --- could not cd into, so we skip it
+ return find_files_indeed (acc, dirs, filter)
+end
+
+local dummyfilter = function () return true end
+
+--- the optional filter function receives the full path of a file
+--- system entity. a filter applies if the first argument it returns is
+--- true.
+
+--- string -> function? -> string list
+local find_files = function (root, filter)
+ if lfsisdir (root) then
+ return find_files_indeed ({}, { root }, filter or dummyfilter)
+ end
+end
+
+
+--[[doc--
+This is a sketch of the luaotfload db:
+
+ type dbobj = {
+ families : familytable;
+ files : filemap;
+ status : filestatus;
+ mappings : fontentry list;
+ meta : metadata;
+ }
+ and familytable = {
+ local : (format, familyentry) hash; // specified with include dir
+ texmf : (format, familyentry) hash;
+ system : (format, familyentry) hash;
+ }
+ and familyentry = {
+ r : sizes; // regular
+ i : sizes; // italic
+ b : sizes; // bold
+ bi : sizes; // bold italic
+ }
+ and sizes = {
+ default : int; // points into mappings or names
+ optical : (int, int) list; // design size -> index entry
+ }
+ and metadata = {
+ created : string // creation time
+ formats : string list; // { "otf", "ttf", "ttc", "dfont" }
+ local : bool; (* set if local fonts were added to the db *)
+ modified : string // modification time
+ statistics : TODO; // created when built with "--stats"
+ version : float; // index version
+ }
+ and filemap = { // created by generate_filedata()
+ base : {
+ local : (string, int) hash; // basename -> idx
+ system : (string, int) hash;
+ texmf : (string, int) hash;
+ };
+ bare : {
+ local : (string, (string, int) hash) hash; // location -> (barename -> idx)
+ system : (string, (string, int) hash) hash;
+ texmf : (string, (string, int) hash) hash;
+ };
+ full : (int, string) hash; // idx -> full path
+ }
+ and fontentry = { // finalized by collect_families()
+ basename : string; // file name without path "foo.otf"
+ conflicts : { barename : int; basename : int }; // filename conflict with font at index; happens with subfonts
+ familyname : string; // sanitized name of the font family the font belongs to, usually from the names table
+ fontname : string; // sanitized name of the font
+ fontstyle_name : string; // the fontstyle_name field returned by fontloader.info()
+ format : string; // "otf" | "ttf" | "dfont" | "pfa" | "pfb" | "afm"
+ fullname : string; // sanitized full name of the font including style modifiers
+ fullpath : string; // path to font in filesystem
+ index : int; // index in the mappings table
+ italicangle : float; // italic angle; non-zero with oblique faces
+ location : string; // "texmf" | "system" | "local"
+ metafamily : string; // alternative family identifier if appropriate, sanitized
+ plainname : string; // unsanitized font name
+ prefmodifiers : string; // sanitized preferred subfamily (names table 14)
+ psname : string; // PostScript name
+ size : (false | float * float * float); // if available, size info from the size table converted from decipoints
+ splainname : string; // sanitized version of the “plainname” field
+ splitstyle : string; // style information obtained by splitting the full name at the last dash
+ subfamily : string; // sanitized subfamily (names table 2)
+ subfont : (int | bool); // integer if font is part of a TrueType collection ("ttc")
+ version : string; // font version string
+ weight : int; // usWeightClass
+ }
+ and filestatus = (string, // fullname
+ { index : int list; // pointer into mappings
+ timestamp : int; }) dict
+
+beware that this is a reconstruction and may be incomplete or out of
+date. Last update: 2014-04-06, describing version 2.51.
+
+mtx-fonts has in names.tma:
+
+ type names = {
+ cache_uuid : uuid;
+ cache_version : float;
+ datastate : uuid list;
+ fallbacks : (filetype, (basename, int) hash) hash;
+ families : (basename, int list) hash;
+ files : (filename, fullname) hash;
+ indices : (fullname, int) hash;
+ mappings : (filetype, (basename, int) hash) hash;
+ names : ? (empty hash) ?;
+ rejected : (basename, int) hash;
+ specifications: fontentry list;
+ }
+ and fontentry = {
+ designsize : int;
+ familyname : string;
+ filename : string;
+ fontname : string;
+ format : string;
+ fullname : string;
+ maxsize : int;
+ minsize : int;
+ modification : int;
+ rawname : string;
+ style : string;
+ subfamily : string;
+ variant : string;
+ weight : string;
+ width : string;
+ }
+
+--doc]]--
+
+--- string list -> string option -> dbobj
+
+local initialize_namedata = function (formats, created)
+ local now = os.date "%F %T"
+ return {
+ --families = { },
+ status = { }, -- was: status; map abspath -> mapping
+ mappings = { }, -- TODO: check if still necessary after rewrite
+ names = { },
+-- files = { }, -- created later
+ meta = {
+ created = created or now,
+ formats = formats,
+ ["local"] = false,
+ modified = now,
+ statistics = { },
+ version = names.version,
+ },
+ }
+end
+
+--[[doc--
+
+ Since Luaotfload does not depend on the lualibs anymore we
+ have to put our own small wrappers for the gzip library in
+ place.
+
+ load_gzipped -- Read and decompress and entire gzipped file.
+ Returns the uncompressed content as a string.
+
+--doc]]--
+
+local load_gzipped = function (filename)
+ local gh = gzipopen (filename,"rb")
+ if gh then
+ local data = gh:read "*all"
+ gh:close ()
+ return data
+ end
+end
+
+--[[doc--
+
+ save_gzipped -- Compress and write a string to file. The return
+ value is the number of bytes written. Zlib parameters are: best
+ compression and default strategy.
+
+--doc]]--
+
+local save_gzipped = function (filename, data)
+ local gh = gzipopen (filename, "wb9")
+ if gh then
+ gh:write (data)
+ local bytes = gh:seek ()
+ gh:close ()
+ return bytes
+ end
+end
+
+--- When loading a lua file we try its binary complement first, which
+--- is assumed to be located at an identical path, carrying the suffix
+--- .luc.
+
+--- string -> (string * table)
+local load_lua_file = function (path)
+ local foundname = filereplacesuffix (path, "luc")
+ local code = nil
+
+ local fh = ioopen (foundname, "rb") -- try bin first
+ if fh then
+ local chunk = fh:read"*all"
+ fh:close()
+ code = load (chunk, "b")
+ end
+
+ if not code then --- fall back to text file
+ foundname = filereplacesuffix (path, "lua")
+ fh = ioopen(foundname, "rb")
+ if fh then
+ local chunk = fh:read"*all"
+ fh:close()
+ code = load (chunk, "t")
+ end
+ end
+
+ if not code then --- probe gzipped file
+ foundname = filereplacesuffix (path, "lua.gz")
+ local chunk = load_gzipped (foundname)
+ if chunk then
+ code = load (chunk, "t")
+ end
+ end
+
+ if not code then return nil, nil end
+ return foundname, code ()
+end
+
+--- define locals in scope
+local access_font_index
+local collect_families
+local font_file_lookup
+local find_closest
+local flush_lookup_cache
+local generate_filedata
+local get_font_filter
+local group_modifiers
+local load_lookups
+local load_names
+local getmetadata
+local order_design_sizes
+local ot_fullinfo
+local read_blacklist
+local reload_db
+local resolve_cached
+local resolve_fullpath
+local resolve_name
+local save_lookups
+local save_names
+local set_font_filter
+local t1_fullinfo
+local update_names
+
+--- state of the database
+local fonts_reloaded = false
+
+--- limit output when approximate font matching (luaotfload-tool -F)
+local fuzzy_limit = 1 --- display closest only
+
+--- bool? -> dbobj
+load_names = function (dry_run)
+ local starttime = osgettimeofday ()
+ local foundname, data = load_lua_file (config.luaotfload.paths.index_path_lua)
+
+ if data then
+ report ("log", 0, "db",
+ "Font names database loaded from %s", foundname)
+ report ("term", 3, "db",
+ "Font names database loaded from %s", foundname)
+ report ("info", 3, "db", "Loading took %0.f ms.",
+ 1000 * (osgettimeofday () - starttime))
+
+ local db_version, names_version
+ if data.meta then
+ db_version = data.meta.version
+ else
+ --- Compatibility branch; the version info used to be
+ --- stored in the table root which is why updating from
+ --- an earlier index version broke.
+ db_version = data.version or -42 --- invalid
+ end
+ names_version = names.version
+ if db_version ~= names_version then
+ report ("both", 0, "db",
+ [[Version mismatch; expected %4.3f, got %4.3f.]],
+ names_version, db_version)
+ if not fonts_reloaded then
+ report ("both", 0, "db", [[Force rebuild.]])
+ data = update_names ({ }, true, false)
+ if not data then
+ report ("both", 0, "db",
+ "Database creation unsuccessful.")
+ end
+ end
+ end
+ else
+ report ("both", 0, "db",
+ [[Font names database not found, generating new one.]])
+ report ("both", 0, "db",
+ [[This can take several minutes; please be patient.]])
+ data = update_names (initialize_namedata (get_font_filter ()),
+ nil, dry_run)
+ if not data then
+ report ("both", 0, "db", "Database creation unsuccessful.")
+ end
+ end
+ return data
+end
+
+--[[doc--
+
+ access_font_index -- Provide a reference of the index table. Will
+ cause the index to be loaded if not present.
+
+--doc]]--
+
+access_font_index = function ()
+ if not name_index then name_index = load_names () end
+ return name_index
+end
+
+getmetadata = function ()
+ if not name_index then name_index = load_names() end
+ return tablefastcopy (name_index.meta)
+end
+
+--- unit -> unit
+load_lookups = function ( )
+ local foundname, data = load_lua_file(config.luaotfload.paths.lookup_path_lua)
+ if data then
+ report("log", 0, "cache", "Lookup cache loaded from %s.", foundname)
+ report("term", 3, "cache",
+ "Lookup cache loaded from %s.", foundname)
+ else
+ report("both", 1, "cache",
+ "No lookup cache, creating empty.")
+ data = { }
+ end
+ lookup_cache = data
+end
+
+local regular_synonym = {
+ book = "r",
+ normal = "r",
+ plain = "r",
+ regular = "r",
+ roman = "r",
+}
+
+local italic_synonym = {
+ oblique = true,
+ slanted = true,
+ italic = true,
+}
+
+local style_category = {
+ regular = "r",
+ bold = "b",
+ bolditalic = "bi",
+ italic = "i",
+ r = "regular",
+ b = "bold",
+ bi = "bolditalic",
+ i = "italic",
+}
+
+local type1_metrics = { "tfm", "ofm", }
+
+local dummy_findfile = resolvers.findfile -- from basics-gen
+
+local lookup_filename = function (filename)
+ if not name_index then name_index = load_names () end
+ local files = name_index.files
+ local basedata = files.base
+ local baredata = files.bare
+ for i = 1, #location_precedence do
+ local location = location_precedence [i]
+ local basenames = basedata [location]
+ local barenames = baredata [location]
+ local idx
+ if basenames ~= nil then
+ idx = basenames [filename]
+ if idx then
+ goto done
+ end
+ end
+ if barenames ~= nil then
+ for j = 1, #format_precedence do
+ local format = format_precedence [j]
+ local filemap = barenames [format]
+ if filemap then
+ idx = barenames [format] [filename]
+ if idx then
+ break
+ end
+ end
+ end
+ end
+::done::
+ if idx then
+ return files.full [idx]
+ end
+ end
+end
+
+--[[doc--
+
+ font_file_lookup -- The ``file:`` are ultimately delegated here.
+ The lookups are kind of a blunt instrument since they try locating
+ the file using every conceivable method, which is quite
+ inefficient. Nevertheless, resolving files that way is rarely the
+ bottleneck.
+
+--doc]]--
+
+--- string -> string * string * bool
+font_file_lookup = function (filename)
+ local found = lookup_filename (filename)
+
+ if not found then
+ found = dummy_findfile(filename)
+ end
+
+ if found then
+ return found, nil, true
+ end
+
+ for i=1, #type1_metrics do
+ local format = type1_metrics[i]
+ if resolvers.findfile(filename, format) then
+ return file.addsuffix(filename, format), format, true
+ end
+ end
+
+ if not fonts_reloaded and config.luaotfload.db.update_live == true then
+ return reload_db (stringformat ("File not found: %s.", filename),
+ font_file_lookup,
+ filename)
+ end
+ return filename, nil, false
+end
+
+--[[doc--
+
+ get_font_file -- Look up the file of an entry in the mappings
+ table. If the index is valid, pass on the name and subfont index
+ after verifing the existence of the resolved file. This
+ verification differs depending the index entry’s ``location``
+ field:
+
+ * ``texmf`` fonts are verified using the (slow) function
+ ``kpse.lookup()``;
+ * other locations are tested by resolving the full path and
+ checking for the presence of a file there.
+
+--doc]]--
+
+--- int -> bool * (string * int) option
+local get_font_file = function (index)
+ local entry = name_index.mappings [index]
+ if not entry then
+ return false
+ end
+ local basename = entry.basename
+ if entry.location == "texmf" then
+ if kpselookup(basename) then
+ return true, basename, entry.subfont
+ end
+ else --- system, local
+ local fullname = name_index.files.full [index]
+ if lfsisfile (fullname) then
+ return true, basename, entry.subfont
+ end
+ end
+ return false
+end
+
+--[[doc--
+We need to verify if the result of a cached lookup actually exists in
+the texmf or filesystem. Again, due to the schizoprenic nature of the
+font managment we have to check both the system path and the texmf.
+--doc]]--
+
+local verify_font_file = function (basename)
+ local path = resolve_fullpath (basename)
+ if path and lfsisfile(path) then
+ return true
+ end
+ if kpsefind_file(basename) then
+ return true
+ end
+ return false
+end
+
+--[[doc--
+Lookups can be quite costly, more so the less specific they are.
+Even if we find a matching font eventually, the next time the
+user compiles Eir document E will have to stand through the delay
+again.
+Thus, some caching of results -- even between runs -- is in order.
+We’ll just store successful name: lookups in a separate cache file.
+
+type lookup_cache = (string, (string * num)) dict
+
+The spec is expected to be modified in place (ugh), so we’ll have to
+catalogue what fields actually influence its behavior.
+
+Idk what the “spec” resolver is for.
+
+ lookup inspects modifies
+ ---------- ----------------- ---------------------------
+ file: name forced, name
+ name:[*] name, style, sub, resolved, sub, name, forced
+ optsize, size
+ spec: name, sub resolved, sub, name, forced
+
+[*] name: contains both the name resolver from luatex-fonts and
+ resolve_name() below
+
+From my reading of font-def.lua, what a resolver does is
+basically rewrite the “name” field of the specification record
+with the resolution.
+Also, the fields “resolved”, “sub”, “force” etc. influence the outcome.
+
+--doc]]--
+
+local concat_char = "#"
+local hash_fields = {
+ --- order is important
+ "specification", "style", "sub", "optsize", "size",
+}
+local n_hash_fields = #hash_fields
+
+--- spec -> string
+local hash_request = function (specification)
+ local key = { } --- segments of the hash
+ for i=1, n_hash_fields do
+ local field = specification[hash_fields[i]]
+ if field then
+ key[#key+1] = field
+ end
+ end
+ return tableconcat(key, concat_char)
+end
+
+--- 'a -> 'a -> table -> (string * int|boolean * boolean)
+resolve_cached = function (specification)
+ if not lookup_cache then load_lookups () end
+ local request = hash_request(specification)
+ report("both", 4, "cache", "Looking for %q in cache ...",
+ request)
+
+ local found = lookup_cache [request]
+
+ --- case 1) cache positive ----------------------------------------
+ if found then --- replay fields from cache hit
+ report("info", 4, "cache", "Found!")
+ local basename = found[1]
+ --- check the presence of the file in case it’s been removed
+ local success = verify_font_file (basename)
+ if success == true then
+ return basename, found[2], true
+ end
+ report("both", 4, "cache", "Cached file not found; resolving again.")
+ else
+ report("both", 4, "cache", "Not cached; resolving.")
+ end
+
+ --- case 2) cache negative ----------------------------------------
+ --- first we resolve normally ...
+ local filename, subfont = resolve_name (specification)
+ if not filename then
+ return nil, nil
+ end
+ --- ... then we add the fields to the cache ... ...
+ local entry = { filename, subfont }
+ report("both", 4, "cache", "New entry: %s.", request)
+ lookup_cache [request] = entry
+
+ --- obviously, the updated cache needs to be stored.
+ --- TODO this should trigger a save only once the
+ --- document is compiled (finish_pdffile callback?)
+ report("both", 5, "cache", "Saving updated cache.")
+ local success = save_lookups ()
+ if not success then --- sad, but not critical
+ report("both", 0, "cache", "Error writing cache.")
+ end
+ return filename, subfont
+end
+
+--- this used to be inlined; with the lookup cache we don’t
+--- have to be parsimonious wrt function calls anymore
+--- “found” is the match accumulator
+local add_to_match = function (found, size, face)
+
+ local continue = true
+
+ local optsize = face.size
+
+ if optsize and next (optsize) then
+ local dsnsize, maxsize, minsize
+ dsnsize = optsize[1]
+ maxsize = optsize[2]
+ minsize = optsize[3]
+
+ if size ~= nil
+ and (dsnsize == size or (size > minsize and size <= maxsize))
+ then
+ found[1] = face
+ continue = false ---> break
+ else
+ found[#found+1] = face
+ end
+ else
+ found[1] = face
+ continue = false ---> break
+ end
+
+ return found, continue
+end
+
+local choose_closest = function (distances)
+ local closest = 2^51
+ local match
+ for i = 1, #distances do
+ local d, index = unpack (distances [i])
+ if d < closest then
+ closest = d
+ match = index
+ end
+ end
+ return match
+end
+
+--[[doc--
+
+ choose_size -- Pick a font face of appropriate size from the list
+ of family members with matching style. There are three categories:
+
+ 1. exact matches: if there is a face whose design size equals
+ the asked size, it is returned immediately and no further
+ candidates are inspected.
+
+ 2. range matches: of all faces in whose design range the
+ requested size falls the one whose center the requested
+ size is closest to is returned.
+
+ 3. out-of-range matches: of all other faces (i. e. whose range
+ is above or below the asked size) the one is chosen whose
+ boundary (upper or lower) is closest to the requested size.
+
+ 4. default matches: if no design size or a design size of zero
+ is requested, the face with the default size is returned.
+
+--doc]]--
+
+--- int * int * int * int list -> int -> int
+local choose_size = function (sizes, askedsize)
+ local mappings = name_index.mappings
+ local match = sizes.default
+ local exact
+ local inrange = { } --- distance * index list
+ local norange = { } --- distance * index list
+ local fontname, subfont
+ if askedsize ~= 0 then
+ --- firstly, look for an exactly matching design size or
+ --- matching range
+ for i = 1, #sizes do
+ local dsnsize, high, low, index = unpack (sizes [i])
+ if dsnsize == askedsize then
+ --- exact match, this is what we were looking for
+ exact = index
+ goto skip
+ elseif askedsize < low then
+ --- below range, add to the norange table
+ local d = low - askedsize
+ norange [#norange + 1] = { d, index }
+ elseif askedsize > high then
+ --- beyond range, add to the norange table
+ local d = askedsize - high
+ norange [#norange + 1] = { d, index }
+ else
+ --- range match
+ local d = ((low + high) / 2) - askedsize
+ if d < 0 then
+ d = -d
+ end
+ inrange [#inrange + 1] = { d, index }
+ end
+ end
+ end
+::skip::
+ if exact then
+ match = exact
+ elseif #inrange > 0 then
+ match = choose_closest (inrange)
+ elseif #norange > 0 then
+ match = choose_closest (norange)
+ end
+ return match
+end
+
+--[[doc--
+
+ resolve_familyname -- Query the families table for an entry
+ matching the specification.
+ The parameters “name” and “style” are pre-sanitized.
+
+--doc]]--
+--- spec -> string -> string -> int -> string * int
+local resolve_familyname = function (specification, name, style, askedsize)
+ local families = name_index.families
+ local mappings = name_index.mappings
+ local candidates = nil
+ --- arrow code alert
+ for i = 1, #location_precedence do
+ local location = location_precedence [i]
+ local locgroup = families [location]
+ for j = 1, #format_precedence do
+ local format = format_precedence [j]
+ local fmtgroup = locgroup [format]
+ if fmtgroup then
+ local familygroup = fmtgroup [name]
+ if familygroup then
+ local stylegroup = familygroup [style]
+ if stylegroup then --- suitable match
+ candidates = stylegroup
+ goto done
+ end
+ end
+ end
+ end
+ end
+ if true then
+ return nil, nil
+ end
+::done::
+ index = choose_size (candidates, askedsize)
+ local success, resolved, subfont = get_font_file (index)
+ if not success then
+ return nil, nil
+ end
+ report ("info", 2, "db", "Match found: %s(%d).",
+ resolved, subfont or 0)
+ return resolved, subfont
+end
+
+local resolve_fontname = function (specification, name, style)
+ local mappings = name_index.mappings
+ local fallback = nil
+ local lastresort = nil
+ style = style_category [style]
+ for i = 1, #mappings do
+ local face = mappings [i]
+ local prefmodifiers = face.prefmodifiers
+ local subfamily = face.subfamily
+ if face.fontname == name
+ or face.splainname == name
+ or face.fullname == name
+ or face.psname == name
+ then
+ return face.basename, face.subfont
+ elseif face.familyname == name then
+ if prefmodifiers == style
+ or subfamily == style
+ then
+ fallback = face
+ elseif regular_synonym [prefmodifiers]
+ or regular_synonym [subfamily]
+ then
+ lastresort = face
+ end
+ elseif face.metafamily == name
+ and (regular_synonym [prefmodifiers]
+ or regular_synonym [subfamily])
+ then
+ lastresort = face
+ end
+ end
+ if fallback then
+ return fallback.basename, fallback.subfont
+ end
+ if lastresort then
+ return lastresort.basename, lastresort.subfont
+ end
+ return nil, nil
+end
+
+--[[doc--
+
+ resolve_name -- Perform a name: lookup. This first queries the
+ font families table and, if there is no match for the spec, the
+ font names table.
+ The return value is a pair consisting of the file name and the
+ subfont index if appropriate..
+
+ the request specification has the fields:
+
+ · features: table
+ · normal: set of { ccmp clig itlc kern liga locl mark mkmk rlig }
+ · ???
+ · forced: string
+ · lookup: "name"
+ · method: string
+ · name: string
+ · resolved: string
+ · size: int
+ · specification: string (== <lookup> ":" <name>)
+ · sub: string
+
+ The “size” field deserves special attention: if its value is
+ negative, then it actually specifies a scalefactor of the
+ design size of the requested font. This happens e.g. if a font is
+ requested without an explicit “at size”. If the font is part of a
+ larger collection with different design sizes, this complicates
+ matters a bit: Normally, the resolver prefers fonts that have a
+ design size as close as possible to the requested size. If no
+ size specified, then the design size is implied. But which design
+ size should that be? Xetex appears to pick the “normal” (unmarked)
+ size: with Adobe fonts this would be the one that is neither
+ “caption” nor “subhead” nor “display” &c ... For fonts by Adobe this
+ seems to be the one that does not receive a “prefmodifiers” field.
+ (IOW Adobe uses the “prefmodifiers” field to encode the design size
+ in more or less human readable format.) However, this is not true
+ of LM and EB Garamond. As this matters only where there are
+ multiple design sizes to a given font/style combination, we put a
+ workaround in place that chooses that unmarked version.
+
+ The first return value of “resolve_name” is the file name of the
+ requested font (string). It can be passed to the fullname resolver
+ get_font_file().
+ The second value is either “false” or an integer indicating the
+ subfont index in a TTC.
+
+--doc]]--
+
+--- table -> string * (int | bool)
+resolve_name = function (specification)
+ local resolved, subfont
+ if not name_index then name_index = load_names () end
+ local name = sanitize_fontname (specification.name)
+ local style = sanitize_fontname (specification.style) or "r"
+ local askedsize = specification.optsize
+
+ if askedsize then
+ askedsize = tonumber (askedsize)
+ else
+ askedsize = specification.size
+ if askedsize and askedsize >= 0 then
+ askedsize = askedsize / 65536
+ else
+ askedsize = 0
+ end
+ end
+
+ resolved, subfont = resolve_familyname (specification,
+ name,
+ style,
+ askedsize)
+ if not resolved then
+ resolved, subfont = resolve_fontname (specification,
+ name,
+ style)
+ end
+
+ if not resolved then
+ if not fonts_reloaded and config.luaotfload.db.update_live == true then
+ return reload_db (stringformat ("Font %s not found.",
+ specification.name or "<?>"),
+ resolve_name,
+ specification)
+ end
+ end
+ return resolved, subfont
+end
+
+resolve_fullpath = function (fontname, ext) --- getfilename()
+ if not name_index then name_index = load_names () end
+ local files = name_index.files
+ local basedata = files.base
+ local baredata = files.bare
+ for i = 1, #location_precedence do
+ local location = location_precedence [i]
+ local basenames = basedata [location]
+ local idx
+ if basenames ~= nil then
+ idx = basenames [fontname]
+ end
+ if ext then
+ local barenames = baredata [location] [ext]
+ if not idx and barenames ~= nil then
+ idx = barenames [fontname]
+ end
+ end
+ if idx then
+ return files.full [idx]
+ end
+ end
+ return ""
+end
+
+--- when reload is triggered we update the database
+--- and then re-run the caller with the arg list
+
+--- string -> ('a -> 'a) -> 'a list -> 'a
+reload_db = function (why, caller, ...)
+ local namedata = name_index
+ local formats = tableconcat (namedata.meta.formats, ",")
+
+ report ("both", 0, "db",
+ "Reload initiated (formats: %s); reason: %q.",
+ formats, why)
+
+ set_font_filter (formats)
+ namedata = update_names (namedata, false, false)
+
+ if namedata then
+ fonts_reloaded = true
+ name_index = namedata
+ return caller (...)
+ end
+
+ report ("both", 0, "db", "Database update unsuccessful.")
+end
+
+--- string -> string -> int
+local iterative_levenshtein = function (s1, s2)
+
+ local costs = { }
+ local len1, len2 = #s1, #s2
+
+ for i = 0, len1 do
+ local last = i
+ for j = 0, len2 do
+ if i == 0 then
+ costs[j] = j
+ else
+ if j > 0 then
+ local current = costs[j-1]
+ if stringsub(s1, i, i) ~= stringsub(s2, j, j) then
+ current = mathmin(current, last, costs[j]) + 1
+ end
+ costs[j-1] = last
+ last = current
+ end
+ end
+ end
+ if i > 0 then costs[len2] = last end
+ end
+
+ return costs[len2]--- lower right has the distance
+end
+
+--- string -> int -> bool
+find_closest = function (name, limit)
+ local name = sanitize_fontname (name)
+ limit = limit or fuzzy_limit
+
+ if not name_index then name_index = load_names () end
+ if not name_index or type (name_index) ~= "table" then
+ if not fonts_reloaded then
+ return reload_db("Font index missing.", find_closest, name)
+ end
+ return false
+ end
+
+ local by_distance = { } --- (int, string list) dict
+ local distances = { } --- int list
+ local cached = { } --- (string, int) dict
+ local mappings = name_index.mappings
+ local n_fonts = #mappings
+
+ for n = 1, n_fonts do
+ local current = mappings[n]
+ --[[
+ This is simplistic but surpisingly fast.
+ Matching is performed against the “fullname” field
+ of a db record in preprocessed form. We then store the
+ raw “fullname” at its edit distance.
+ We should probably do some weighting over all the
+ font name categories as well as whatever agrep
+ does.
+ --]]
+ local fullname = current.plainname
+ local sfullname = current.fullname
+ local dist = cached[sfullname]--- maybe already calculated
+
+ if not dist then
+ dist = iterative_levenshtein(name, sfullname)
+ cached[sfullname] = dist
+ end
+ local namelst = by_distance[dist]
+ if not namelst then --- first entry
+ namelst = { fullname }
+ distances[#distances+1] = dist
+ else --- append
+ namelst[#namelst+1] = fullname
+ end
+ by_distance[dist] = namelst
+ end
+
+ --- print the matches according to their distance
+ local n_distances = #distances
+ if n_distances > 0 then --- got some data
+ tablesort(distances)
+ limit = mathmin(n_distances, limit)
+ report(false, 1, "query",
+ "Displaying %d distance levels.", limit)
+
+ for i = 1, limit do
+ local dist = distances[i]
+ local namelst = by_distance[dist]
+ report(false, 0, "query",
+ "Distance from \"%s\": %s\n "
+ .. tableconcat (namelst, "\n "),
+ name, dist)
+ end
+
+ return true
+ end
+ return false
+end --- find_closest()
+
+--[[doc--
+
+ load_font_file -- Safely open a font file. See
+ <http://www.ntg.nl/pipermail/ntg-context/2013/075885.html>
+ regarding the omission of ``fontloader.close()``.
+
+ TODO -- check if fontloader.info() is ready for prime in 0.78+
+ -- fields /tables needed:
+ -- names
+ -- postscriptname
+ -- validation_state
+ -- ..
+
+--doc]]--
+
+local load_font_file = function (filename, subfont)
+ local rawfont, _msg = fontloaderopen (filename, subfont)
+ --local rawfont, _msg = fontloaderinfo (filename, subfont)
+ if not rawfont then
+ report ("log", 1, "db", "ERROR: failed to open %s.", filename)
+ return
+ end
+ return rawfont
+end
+
+--- rawdata -> (int * int * int | bool)
+
+local get_size_info = function (metadata)
+ local design_size = metadata.design_size
+ local design_range_top = metadata.design_range_top
+ local design_range_bottom = metadata.design_range_bottom
+
+ local fallback_size = design_size ~= 0 and design_size
+ or design_range_bottom ~= 0 and design_range_bottom
+ or design_range_top ~= 0 and design_range_top
+
+ if fallback_size then
+ design_size = (design_size or fallback_size) / 10
+ design_range_top = (design_range_top or fallback_size) / 10
+ design_range_bottom = (design_range_bottom or fallback_size) / 10
+ return {
+ design_size, design_range_top, design_range_bottom,
+ }
+ end
+
+ return false
+end
+
+local get_english_names = function (metadata)
+ local names = metadata.names
+ local english_names
+
+ if names then
+ --inspect(names)
+ for _, raw_namedata in next, names do
+ if raw_namedata.lang == "English (US)" then
+ return raw_namedata.names
+ end
+ end
+ end
+
+ -- no (English) names table, probably a broken font
+ report("both", 3, "db",
+ "%s: missing or broken English names table.", basename)
+ return { fontname = metadata.fontname,
+ fullname = metadata.fullname, }
+end
+
+--[[--
+ In case of broken PS names we set some dummies. However, we cannot
+ directly modify the font data as returned by fontloader.open() because
+ it is a userdata object.
+
+ For this reason we copy what is necessary whilst keeping the table
+ structure the same as in the tfmdata.
+--]]--
+local get_raw_info = function (metadata, basename)
+ local fullname
+ local fontname = metadata.fontname
+ local fullname = metadata.fullname
+ local psname
+
+ local validation_state = metadata.validation_state
+ if (validation_state and tablecontains (validation_state, "bad_ps_fontname"))
+ or not fontname
+ then
+ --- Broken names table, e.g. avkv.ttf with UTF-16 strings;
+ --- we put some dummies in place like the fontloader
+ --- (font-otf.lua) does.
+ report("both", 3, "db",
+ "%s has invalid postscript font names, using dummies.",
+ basename)
+ fontname = "bad-fontname-" .. basename
+ fullname = "bad-fullname-" .. basename
+ end
+
+ return {
+ familyname = metadata.familyname,
+ fontname = fontname,
+ fontstyle_name = metadata.fontstyle_name,
+ fullname = fullname,
+ italicangle = metadata.italicangle,
+ names = metadata.names,
+ pfminfo = metadata.pfminfo,
+ units_per_em = metadata.units_per_em,
+ version = metadata.version,
+ design_size = metadata.design_size,
+ design_range_top = metadata.design_range_top,
+ design_range_bottom = metadata.design_range_bottom,
+ }
+end
+
+local organize_namedata = function (rawinfo,
+ english_names,
+ basename,
+ info)
+ local default_name = english_names.compatfull
+ or english_names.fullname
+ or english_names.postscriptname
+ or rawinfo.fullname
+ or rawinfo.fontname
+ or info.fullname
+ or info.fontname
+ local default_family = english_names.preffamily
+ or english_names.family
+ or rawinfo.familyname
+ or info.familyname
+-- local default_modifier = english_names.prefmodifiers
+-- or english_names.subfamily
+ local fontnames = {
+ --- see
+ --- https://developer.apple.com/fonts/TTRefMan/RM06/Chap6name.html
+ --- http://www.microsoft.com/typography/OTSPEC/name.htm#NameIDs
+ english = {
+ --- where a “compatfull” field is given, the value of “fullname” is
+ --- either identical or differs by separating the style
+ --- with a hyphen and omitting spaces. (According to the
+ --- spec, “compatfull” is “Macintosh only”.)
+ --- Of the three “fullname” fields, this one appears to be the one
+ --- with the entire name given in a legible,
+ --- non-abbreviated fashion, for most fonts at any rate.
+ --- However, in some fonts (e.g. CMU) all three fields are
+ --- identical.
+ fullname = --[[ 18 ]] english_names.compatfull
+ or --[[ 4 ]] english_names.fullname
+ or default_name,
+ --- we keep both the “preferred family” and the “family”
+ --- values around since both are valid but can turn out
+ --- quite differently, e.g. with Latin Modern:
+ --- preffamily: “Latin Modern Sans”,
+ --- family: “LM Sans 10”
+ preffamily = --[[ 16 ]] english_names.preffamilyname,
+ family = --[[ 1 ]] english_names.family or default_family,
+ prefmodifiers = --[[ 17 ]] english_names.prefmodifiers,
+ subfamily = --[[ 2 ]] english_names.subfamily,
+ psname = --[[ 6 ]] english_names.postscriptname,
+ },
+
+ metadata = {
+ fullname = rawinfo.fullname,
+ fontname = rawinfo.fontname,
+ familyname = rawinfo.familyname,
+ },
+
+ info = {
+ fullname = info.fullname,
+ familyname = info.familyname,
+ fontname = info.fontname,
+ },
+ }
+
+ -- see http://www.microsoft.com/typography/OTSPEC/features_pt.htm#size
+ if rawinfo.fontstyle_name then
+ --- not present in all fonts, often differs from the preferred
+ --- subfamily as well as subfamily fields, e.g. with
+ --- LMSans10-BoldOblique:
+ --- subfamily: “Bold Italic”
+ --- prefmodifiers: “10 Bold Oblique”
+ --- fontstyle_name: “Bold Oblique”
+ for _, name in next, rawinfo.fontstyle_name do
+ if name.lang == 1033 then --- I hate magic numbers
+ fontnames.fontstyle_name = name.name
+ end
+ end
+ end
+
+ return {
+ sanitized = sanitize_fontnames (fontnames),
+ fontname = rawinfo.fontname,
+ fullname = rawinfo.fullname,
+ familyname = rawinfo.familyname,
+ }
+end
+
+
+local dashsplitter = lpegsplitat "-"
+
+local split_fontname = function (fontname)
+ --- sometimes the style hides in the latter part of the
+ --- fontname, separated by a dash, e.g. “Iwona-Regular”,
+ --- “GFSSolomos-Regular”
+ local splitted = { lpegmatch (dashsplitter, fontname) }
+ if next (splitted) then
+ return sanitize_fontname (splitted [#splitted])
+ end
+end
+
+local organize_styledata = function (fontname,
+ metadata,
+ english_names,
+ info)
+ local pfminfo = metadata.pfminfo or { }
+ local names = metadata.names
+
+ return {
+ --- see http://www.microsoft.com/typography/OTSPEC/features_pt.htm#size
+ size = get_size_info (metadata),
+ weight = pfminfo.weight or 400,
+ split = split_fontname (fontname),
+ width = pfminfo.width,
+ italicangle = metadata.italicangle,
+ --- this is for querying, see www.ntg.nl/maps/40/07.pdf for details
+ units_per_em = metadata.units_per_em,
+ version = metadata.version,
+ }
+end
+
+--[[doc--
+The data inside an Opentype font file can be quite heterogeneous.
+Thus in order to get the relevant information, parts of the original
+table as returned by the font file reader need to be relocated.
+--doc]]--
+
+--- string -> int -> bool -> string -> fontentry
+
+ot_fullinfo = function (filename,
+ subfont,
+ location,
+ basename,
+ format,
+ info)
+
+ local metadata = load_font_file (filename, subfont)
+ if not metadata then
+ return nil
+ end
+
+ local rawinfo = get_raw_info (metadata, basename)
+ --- Closing the file manually is a tad faster and more memory
+ --- efficient than having it closed by the gc
+ fontloaderclose (metadata)
+
+ local english_names = get_english_names (rawinfo)
+ local namedata = organize_namedata (rawinfo,
+ english_names,
+ basename,
+ info)
+ local style = organize_styledata (namedata.fontname,
+ rawinfo,
+ english_names,
+ info)
+
+ local res = {
+ file = { base = basename,
+ full = filename,
+ subfont = subfont,
+ location = location or "system" },
+ format = format,
+ names = namedata,
+ style = style,
+ version = rawinfo.version,
+ }
+ return res
+end
+
+--[[doc--
+
+ Type1 font inspector. In comparison with OTF, PFB’s contain a good
+ deal less name fields which makes it tricky in some parts to find a
+ meaningful representation for the database.
+
+ Good read: http://www.adobe.com/devnet/font/pdfs/5004.AFM_Spec.pdf
+
+--doc]]--
+
+--- string -> int -> bool -> string -> fontentry
+
+t1_fullinfo = function (filename, _subfont, location, basename, format)
+ local sanitized
+ local metadata = load_font_file (filename)
+ local fontname = metadata.fontname
+ local fullname = metadata.fullname
+ local familyname = metadata.familyname
+ local italicangle = metadata.italicangle
+ local splitstyle = split_fontname (fontname)
+ local style = ""
+ local weight
+
+ sanitized = sanitize_fontnames ({
+ fontname = fontname,
+ psname = fullname,
+ pfullname = fullname,
+ metafamily = family,
+ familyname = familyname,
+ weight = metadata.weight, --- string identifier
+ prefmodifiers = style,
+ })
+
+ weight = sanitized.weight
+
+ if weight == "bold" then
+ style = weight
+ end
+
+ if italicangle ~= 0 then
+ style = style .. "italic"
+ end
+
+ return {
+ basename = basename,
+ fullpath = filename,
+ subfont = false,
+ location = location or "system",
+ format = format,
+ fullname = sanitized.fullname,
+ fontname = sanitized.fontname,
+ familyname = sanitized.familyname,
+ plainname = fullname,
+ splainname = sanitized.fullname,
+ psname = sanitized.fontname,
+ version = metadata.version,
+ size = false,
+ splitstyle = splitstyle,
+ fontstyle_name = style ~= "" and style or weight,
+ weight = metadata.pfminfo.weight or 400,
+ italicangle = italicangle,
+ }
+end
+
+local loaders = {
+ dfont = ot_fullinfo,
+ otf = ot_fullinfo,
+ ttc = ot_fullinfo,
+ ttf = ot_fullinfo,
+
+ pfb = t1_fullinfo,
+ pfa = t1_fullinfo,
+}
+
+--- not side-effect free!
+
+local compare_timestamps = function (fullname,
+ currentstatus,
+ currententrystatus,
+ currentmappings,
+ targetstatus,
+ targetentrystatus,
+ targetmappings)
+
+ local currenttimestamp = currententrystatus
+ and currententrystatus.timestamp
+ local targettimestamp = lfsattributes (fullname, "modification")
+
+ if targetentrystatus ~= nil
+ and targetentrystatus.timestamp == targettimestamp then
+ report ("log", 3, "db", "Font %q already read.", fullname)
+ return false
+ end
+
+ targetentrystatus.timestamp = targettimestamp
+ targetentrystatus.index = targetentrystatus.index or { }
+
+ if currenttimestamp == targettimestamp
+ and not targetentrystatus.index [1]
+ then
+ --- copy old namedata into new
+
+ for _, currentindex in next, currententrystatus.index do
+
+ local targetindex = #targetentrystatus.index
+ local fullinfo = currentmappings [currentindex]
+ local location = #targetmappings + 1
+
+ targetmappings [location] = fullinfo
+ targetentrystatus.index [targetindex + 1] = location
+ end
+
+ report ("log", 3, "db", "Font %q already indexed.", fullname)
+
+ return false
+ end
+
+ return true
+end
+
+local insert_fullinfo = function (fullname,
+ basename,
+ n_font,
+ loader,
+ format,
+ location,
+ targetmappings,
+ targetentrystatus,
+ info)
+
+ local subfont
+ if n_font ~= false then
+ subfont = n_font - 1
+ else
+ subfont = false
+ n_font = 1
+ end
+
+ local fullinfo = loader (fullname, subfont,
+ location, basename,
+ format, info)
+
+ if not fullinfo then
+ return false
+ end
+
+ local index = targetentrystatus.index [n_font]
+
+ if not index then
+ index = #targetmappings + 1
+ end
+
+ targetmappings [index] = fullinfo
+ targetentrystatus.index [n_font] = index
+
+ return true
+end
+
+
+
+--- we return true if the font is new or re-indexed
+--- string -> dbobj -> dbobj -> bool
+
+local read_font_names = function (fullname,
+ currentnames,
+ targetnames,
+ location)
+
+ local targetmappings = targetnames.mappings
+ local targetstatus = targetnames.status --- by full path
+ local targetentrystatus = targetstatus [fullname]
+
+ if targetentrystatus == nil then
+ targetentrystatus = { }
+ targetstatus [fullname] = targetentrystatus
+ end
+
+ local currentmappings = currentnames.mappings
+ local currentstatus = currentnames.status
+ local currententrystatus = currentstatus [fullname]
+
+ local basename = filebasename (fullname)
+ local barename = filenameonly (fullname)
+ local entryname = fullname
+
+ if location == "texmf" then
+ entryname = basename
+ end
+
+ --- 1) skip if blacklisted
+
+ if names.blacklist[fullname] or names.blacklist[basename] then
+ report("log", 2, "db",
+ "Ignoring blacklisted font %q.", fullname)
+ return false
+ end
+
+ --- 2) skip if known with same timestamp
+
+ if not compare_timestamps (fullname,
+ currentstatus,
+ currententrystatus,
+ currentmappings,
+ targetstatus,
+ targetentrystatus,
+ targetmappings)
+ then
+ return false
+ end
+
+ --- 3) new font; choose a loader, abort if unknown
+
+ local format = stringlower (filesuffix (basename))
+ local loader = loaders [format] --- ot_fullinfo, t1_fullinfo
+
+ if not loader then
+ report ("both", 0, "db",
+ "Unknown format: %q, skipping.", format)
+ return false
+ end
+
+ --- 4) get basic info, abort if fontloader can’t read it
+
+ local info = fontloaderinfo (fullname)
+
+ if not info then
+ report ("log", 1, "db",
+ "Failed to read basic information from %q", basename)
+ return false
+ end
+
+
+ --- 5) check for subfonts and process each of them
+
+ if type (info) == "table" and #info > 1 then --- ttc
+
+ local success = false --- true if at least one subfont got read
+
+ for n_font = 1, #info do
+ if insert_fullinfo (fullname, basename, n_font,
+ loader, format, location,
+ targetmappings, targetentrystatus,
+ info)
+ then
+ success = true
+ end
+ end
+
+ return success
+ end
+
+ return insert_fullinfo (fullname, basename, false,
+ loader, format, location,
+ targetmappings, targetentrystatus,
+ info)
+end
+
+local path_normalize
+do
+ --- os.type and os.name are constants so we
+ --- choose a normalization function in advance
+ --- instead of testing with every call
+ local os_type, os_name = os.type, os.name
+ local filecollapsepath = filecollapsepath
+ local lfsreadlink = lfs.readlink
+
+ --- windows and dos
+ if os_type == "windows" or os_type == "msdos" then
+ --- ms platfom specific stuff
+ path_normalize = function (path)
+ path = stringgsub(path, '\\', '/')
+ path = stringlower(path)
+ path = filecollapsepath(path)
+ return path
+ end
+--[[doc--
+ The special treatment for cygwin was removed with a patch submitted
+ by Ken Brown.
+ Reference: http://cygwin.com/ml/cygwin/2013-05/msg00006.html
+--doc]]--
+
+ else -- posix
+ path_normalize = function (path)
+ local dest = lfsreadlink(path)
+ if dest then
+ if kpsereadable_file(dest) then
+ path = dest
+ elseif kpsereadable_file(filejoin(filedirname(path), dest)) then
+ path = filejoin(file.dirname(path), dest)
+ else
+ -- broken symlink?
+ end
+ end
+ path = filecollapsepath(path)
+ return path
+ end
+ end
+end
+
+fonts.path_normalize = path_normalize
+
+names.blacklist = { }
+
+local blacklist = names.blacklist
+local p_blacklist --- prefixes of dirs
+
+--- string list -> string list
+local collapse_prefixes = function (lst)
+ --- avoid redundancies in blacklist
+ if #lst < 2 then
+ return lst
+ end
+
+ tablesort(lst)
+ local cur = lst[1]
+ local result = { cur }
+ for i=2, #lst do
+ local elm = lst[i]
+ if stringsub(elm, 1, #cur) ~= cur then
+ --- different prefix
+ cur = elm
+ result[#result+1] = cur
+ end
+ end
+ return result
+end
+
+--- string list -> string list -> (string, bool) hash_t
+local create_blacklist = function (blacklist, whitelist)
+ local result = { }
+ local dirs = { }
+
+ report("info", 2, "db", "Blacklisting %d files and directories.",
+ #blacklist)
+ for i=1, #blacklist do
+ local entry = blacklist[i]
+ if lfsisdir(entry) then
+ dirs[#dirs+1] = entry
+ else
+ result[blacklist[i]] = true
+ end
+ end
+
+ report("info", 2, "db", "Whitelisting %d files.", #whitelist)
+ for i=1, #whitelist do
+ result[whitelist[i]] = nil
+ end
+
+ dirs = collapse_prefixes(dirs)
+
+ --- build the disjunction of the blacklisted directories
+ for i=1, #dirs do
+ local p_dir = P(dirs[i])
+ if p_blacklist then
+ p_blacklist = p_blacklist + p_dir
+ else
+ p_blacklist = p_dir
+ end
+ end
+
+ if p_blacklist == nil then
+ --- always return false
+ p_blacklist = Cc(false)
+ end
+
+ return result
+end
+
+--- unit -> unit
+read_blacklist = function ()
+ local files = {
+ kpselookup ("luaotfload-blacklist.cnf",
+ {all=true, format="tex"})
+ }
+ local blacklist = { }
+ local whitelist = { }
+
+ if files and type(files) == "table" then
+ for _, path in next, files do
+ for line in iolines (path) do
+ line = stringstrip(line) -- to get rid of lines like " % foo"
+ local first_chr = stringsub(line, 1, 1)
+ if first_chr == "%" or stringis_empty(line) then
+ -- comment or empty line
+ elseif first_chr == "-" then
+ report ("both", 3, "db",
+ "Whitelisted file %q via %q.",
+ line, path)
+ whitelist[#whitelist+1] = stringsub(line, 2, -1)
+ else
+ local cmt = stringfind(line, "%%")
+ if cmt then
+ line = stringsub(line, 1, cmt - 1)
+ end
+ line = stringstrip(line)
+ report ("both", 3, "db",
+ "Blacklisted file %q via %q.",
+ line, path)
+ blacklist[#blacklist+1] = line
+ end
+ end
+ end
+ end
+ names.blacklist = create_blacklist(blacklist, whitelist)
+end
+
+local p_font_filter
+
+do
+ local current_formats = { }
+
+ local extension_pattern = function (list)
+ local pat
+ for i=#list, 1, -1 do
+ local e = list[i]
+ if not pat then
+ pat = P(e)
+ else
+ pat = pat + P(e)
+ end
+ end
+ pat = pat * P(-1)
+ return (1 - pat)^1 * pat
+ end
+
+ --- small helper to adjust the font filter pattern (--formats
+ --- option)
+
+ set_font_filter = function (formats)
+
+ if not formats or type (formats) ~= "string" then
+ return
+ end
+
+ if stringsub (formats, 1, 1) == "+" then -- add
+ formats = lpegmatch (splitcomma, stringsub (formats, 2))
+ if formats then
+ current_formats = tableappend (current_formats, formats)
+ end
+ elseif stringsub (formats, 1, 1) == "-" then -- add
+ formats = lpegmatch (splitcomma, stringsub (formats, 2))
+ if formats then
+ local newformats = { }
+ for i = 1, #current_formats do
+ local fmt = current_formats[i]
+ local include = true
+ for j = 1, #formats do
+ if current_formats[i] == formats[j] then
+ include = false
+ goto skip
+ end
+ end
+ newformats[#newformats+1] = fmt
+ ::skip::
+ end
+ current_formats = newformats
+ end
+ else -- set
+ formats = lpegmatch (splitcomma, formats)
+ if formats then
+ current_formats = formats
+ end
+ end
+
+ p_font_filter = extension_pattern (current_formats)
+ end
+
+ get_font_filter = function (formats)
+ return tablefastcopy (current_formats)
+ end
+end
+
+local process_dir_tree
+process_dir_tree = function (acc, dirs)
+ if not next (dirs) then --- done
+ return acc
+ end
+
+ local pwd = lfscurrentdir ()
+ local dir = dirs[#dirs]
+ dirs[#dirs] = nil
+
+ if lfschdir (dir) then
+ lfschdir (pwd)
+
+ local newfiles = { }
+ local blacklist = names.blacklist
+ for ent in lfsdir (dir) do
+ --- filter right away
+ if ent ~= "." and ent ~= ".." and not blacklist[ent] then
+ local fullpath = dir .. "/" .. ent
+ if lfsisdir (fullpath)
+ and not lpegmatch (p_blacklist, fullpath)
+ then
+ dirs[#dirs+1] = fullpath
+ elseif lfsisfile (fullpath) then
+ ent = stringlower (ent)
+
+ if lpegmatch (p_font_filter, ent) then
+ if filesuffix (ent) == "afm" then
+ --- fontloader.open() will load the afm
+ --- iff both files are in the same directory
+ local pfbpath = filereplacesuffix
+ (fullpath, "pfb")
+ if lfsisfile (pfbpath) then
+ newfiles[#newfiles+1] = pfbpath
+ end
+ else
+ newfiles[#newfiles+1] = fullpath
+ end
+ end
+
+ end
+ end
+ end
+ return process_dir_tree (tableappend (acc, newfiles), dirs)
+ end
+ --- cannot cd; skip
+ return process_dir_tree (acc, dirs)
+end
+
+local process_dir = function (dir)
+ local pwd = lfscurrentdir ()
+ if lfschdir (dir) then
+ lfschdir (pwd)
+
+ local files = { }
+ local blacklist = names.blacklist
+ for ent in lfsdir (dir) do
+ if ent ~= "." and ent ~= ".." and not blacklist[ent] then
+ local fullpath = dir .. "/" .. ent
+ if lfsisfile (fullpath) then
+ ent = stringlower (ent)
+ if lpegmatch (p_font_filter, ent)
+ then
+ if filesuffix (ent) == "afm" then
+ --- fontloader.open() will load the afm
+ --- iff both files are in the same
+ --- directory
+ local pfbpath = filereplacesuffix
+ (fullpath, "pfb")
+ if lfsisfile (pfbpath) then
+ files[#files+1] = pfbpath
+ end
+ else
+ files[#files+1] = fullpath
+ end
+ end
+ end
+ end
+ end
+ return files
+ end
+ return { }
+end
+
+--- string -> bool -> string list
+local find_font_files = function (root, recurse)
+ if lfsisdir (root) then
+ if recurse == true then
+ return process_dir_tree ({}, { root })
+ else --- kpathsea already delivered the necessary subdirs
+ return process_dir (root)
+ end
+ end
+end
+
+--- truncate_string -- Cut the first part of a string to fit it
+--- into a given terminal width. The parameter “restrict” (int)
+--- indicates the number of characters already consumed on the
+--- line.
+local truncate_string = function (str, restrict)
+ local tw = config.luaotfload.misc.termwidth
+ local wd = tw - restrict
+ local len = utf8len (str)
+ if wd - len < 0 then
+ --- combined length exceeds terminal,
+ str = ".." .. stringsub(str, len - wd + 2)
+ end
+ return str
+end
+
+
+--[[doc--
+
+ collect_font_filenames_dir -- Traverse the directory root at
+ ``dirname`` looking for font files. Returns a list of {*filename*;
+ *location*} pairs.
+
+--doc]]--
+
+--- string -> string -> string * string list
+local collect_font_filenames_dir = function (dirname, location)
+ if lpegmatch (p_blacklist, dirname) then
+ report ("both", 4, "db",
+ "Skipping blacklisted directory %s.", dirname)
+ --- ignore
+ return { }
+ end
+ local found = find_font_files (dirname, location ~= "texmf" and location ~= "local")
+ if not found then
+ report ("both", 4, "db",
+ "No such directory: %q; skipping.", dirname)
+ return { }
+ end
+
+ local nfound = #found
+ local files = { }
+
+ report ("both", 4, "db",
+ "%d font files detected in %s.",
+ nfound, dirname)
+ for j = 1, nfound do
+ local fullname = found[j]
+ files[#files + 1] = { path_normalize (fullname), location }
+ end
+ return files
+end
+
+
+--- string list -> string list
+local filter_out_pwd = function (dirs)
+ local result = { }
+ local pwd = path_normalize (lpegmatch (stripslashes,
+ lfscurrentdir ()))
+ for i = 1, #dirs do
+ --- better safe than sorry
+ local dir = path_normalize (lpegmatch (stripslashes, dirs[i]))
+ if not (dir == "." or dir == pwd) then
+ result[#result+1] = dir
+ end
+ end
+ return result
+end
+
+local path_separator = ostype == "windows" and ";" or ":"
+
+--[[doc--
+
+ collect_font_filenames_texmf -- Scan texmf tree for font files
+ relying on the kpathsea variables $OPENTYPEFONTS and $TTFONTS of
+ texmf.cnf.
+ The current working directory comes as “.” (texlive) or absolute
+ path (miktex) and will always be filtered out.
+
+ Returns a list of { *filename*; *location* } pairs.
+
+--doc]]--
+
+--- unit -> string * string list
+local collect_font_filenames_texmf = function ()
+
+ local osfontdir = kpseexpand_path "$OSFONTDIR"
+
+ if stringis_empty (osfontdir) then
+ report ("info", 1, "db", "Scanning TEXMF for fonts...")
+ else
+ report ("info", 1, "db", "Scanning TEXMF and $OSFONTDIR for fonts...")
+ if log.get_loglevel () > 3 then
+ local osdirs = filesplitpath (osfontdir)
+ report ("info", 0, "db", "$OSFONTDIR has %d entries:", #osdirs)
+ for i = 1, #osdirs do
+ report ("info", 0, "db", "[%d] %s", i, osdirs[i])
+ end
+ end
+ end
+
+ fontdirs = kpseexpand_path "$OPENTYPEFONTS"
+ fontdirs = fontdirs .. path_separator .. kpseexpand_path "$TTFONTS"
+ fontdirs = fontdirs .. path_separator .. kpseexpand_path "$T1FONTS"
+
+ if stringis_empty (fontdirs) then
+ return { }
+ end
+
+ local tasks = filter_out_pwd (filesplitpath (fontdirs))
+ report ("info", 3, "db",
+ "Initiating scan of %d directories.", #tasks)
+
+ local files = { }
+ for _, dir in next, tasks do
+ files = tableappend (files, collect_font_filenames_dir (dir, "texmf"))
+ end
+ report ("term", 3, "db", "Collected %d files.", #files)
+ return files
+end
+
+--- unit -> string list
+local function get_os_dirs ()
+ if os.name == 'macosx' then
+ return {
+ filejoin(kpseexpand_path('~'), "Library/Fonts"),
+ "/Library/Fonts",
+ "/System/Library/Fonts",
+ "/Network/Library/Fonts",
+ }
+ elseif os.type == "windows" or os.type == "msdos" then
+ local windir = osgetenv("WINDIR")
+ return { filejoin(windir, 'Fonts') }
+ else
+ local fonts_conves = { --- plural, much?
+ "/usr/local/etc/fonts/fonts.conf",
+ "/etc/fonts/fonts.conf",
+ }
+ local os_dirs = read_fonts_conf(fonts_conves, find_files)
+ return os_dirs
+ end
+ return {}
+end
+
+--[[doc--
+
+ retrieve_namedata -- Scan the list of collected fonts and populate
+ the list of namedata.
+
+ · dirname : name of the directory to scan
+ · currentnames : current font db object
+ · targetnames : font db object to fill
+ · dry_run : don’t touch anything
+
+ Returns the number of fonts that were actually added to the index.
+
+--doc]]--
+
+--- string * string list -> dbobj -> dbobj -> bool? -> int
+local retrieve_namedata = function (files, currentnames, targetnames, dry_run)
+
+ local nfiles = #files
+ local nnew = 0
+
+ report ("info", 1, "db", "Scanning %d collected font files ...", nfiles)
+
+ local bylocation = { texmf = { 0, 0 }
+ , ["local"] = { 0, 0 }
+ , system = { 0, 0 }
+ }
+ report_status_start (2, 4)
+ for i = 1, nfiles do
+ local fullname, location = unpack (files[i])
+ local count = bylocation[location]
+ count[1] = count[1] + 1
+ if dry_run == true then
+ local truncated = truncate_string (fullname, 43)
+ report ("log", 2, "db", "Would have been loading %s.", fullname)
+ report_status ("term", "db", "Would have been loading %s", truncated)
+ --- skip the read_font_names part
+ else
+ local truncated = truncate_string (fullname, 32)
+ report ("log", 2, "db", "Loading font %s.", fullname)
+ report_status ("term", "db", "Loading font %s", truncated)
+ local new = read_font_names (fullname, currentnames,
+ targetnames, location)
+ if new == true then
+ nnew = nnew + 1
+ count[2] = count[2] + 1
+ end
+ end
+ end
+ report_status_stop ("term", "db", "Scanned %d files, %d new.", nfiles, nnew)
+ for location, count in next, bylocation do
+ report ("term", 4, "db", " * %s: %d files, %d new",
+ location, count[1], count[2])
+ end
+ return nnew
+end
+
+--- unit -> string * string list
+local collect_font_filenames_system = function ()
+
+ local n_scanned, n_new = 0, 0
+ report ("info", 1, "db", "Scanning system fonts...")
+ report ("info", 2, "db",
+ "Searching in static system directories...")
+
+ local files = { }
+ for _, dir in next, get_os_dirs () do
+ tableappend (files, collect_font_filenames_dir (dir, "system"))
+ end
+ report ("term", 3, "db", "Collected %d files.", #files)
+ return files
+end
+
+--- unit -> bool
+flush_lookup_cache = function ()
+ lookup_cache = { }
+ collectgarbage "collect"
+ return true
+end
+
+--[[doc--
+
+ collect_font_filenames_local -- Scan $PWD (during a TeX run)
+ for font files.
+
+ Side effect: This sets the “local” flag in the subtable “meta” to
+ prevent the merged table from being saved to disk.
+
+ TODO the local tree could be cached in $PWD.
+
+--doc]]--
+
+--- unit -> string * string list
+local collect_font_filenames_local = function ()
+ local pwd = lfscurrentdir ()
+ report ("both", 1, "db", "Scanning for fonts in $PWD (%q) ...", pwd)
+
+ local files = collect_font_filenames_dir (pwd, "local")
+ local nfiles = #files
+ if nfiles > 0 then
+ targetnames.meta["local"] = true --- prevent saving to disk
+ report ("term", 1, "db", "Found %d files.", pwd)
+ else
+ report ("term", 1, "db",
+ "Couldn’t find a thing here. What a waste.", pwd)
+ end
+ report ("term", 3, "db", "Collected %d files.", #files)
+ return files
+end
+
+--- dbobj -> dbobj -> int * int
+
+--- fontentry list -> filemap
+
+generate_filedata = function (mappings)
+
+ report ("both", 2, "db", "Creating filename map.")
+
+ local nmappings = #mappings
+
+ local files = {
+ bare = {
+ ["local"] = { },
+ system = { }, --- mapped to mapping format -> index in full
+ texmf = { }, --- mapped to mapping format -> “true”
+ },
+ base = {
+ ["local"] = { },
+ system = { }, --- mapped to index in “full”
+ texmf = { }, --- set; all values are “true”
+ },
+ full = { }, --- non-texmf
+ }
+
+ local base = files.base
+ local bare = files.bare
+ local full = files.full
+
+ local conflicts = {
+ basenames = 0,
+ barenames = 0,
+ }
+
+ for index = 1, nmappings do
+ local entry = mappings [index]
+
+ local filedata = entry.file
+ local format
+ local location
+ local fullpath
+ local basename
+ local barename
+ local subfont
+
+ if filedata then --- new entry
+ format = entry.format --- otf, afm, ...
+ location = filedata.location --- texmf, system, ...
+ fullpath = filedata.full
+ basename = filedata.base
+ barename = filenameonly (fullpath)
+ subfont = filedata.subfont
+ else
+ format = entry.format --- otf, afm, ...
+ location = entry.location --- texmf, system, ...
+ fullpath = entry.fullpath
+ basename = entry.basename
+ barename = filenameonly (fullpath)
+ subfont = entry.subfont
+ end
+
+ entry.index = index
+
+ --- 1) add to basename table
+
+ local inbase = base [location] --- no format since the suffix is known
+
+ if inbase then
+ local present = inbase [basename]
+ if present then
+ report ("both", 4, "db",
+ "Conflicting basename: %q already indexed \z
+ in category %s, ignoring.",
+ barename, location)
+ conflicts.basenames = conflicts.basenames + 1
+
+ --- track conflicts per font
+ local conflictdata = entry.conflicts
+
+ if not conflictdata then
+ entry.conflicts = { basename = present }
+ else -- some conflicts already detected
+ conflictdata.basename = present
+ end
+
+ else
+ inbase [basename] = index
+ end
+ else
+ inbase = { basename = index }
+ base [location] = inbase
+ end
+
+ --- 2) add to barename table
+
+ local inbare = bare [location] [format]
+
+ if inbare then
+ local present = inbare [barename]
+ if present then
+ report ("both", 4, "db",
+ "Conflicting barename: %q already indexed \z
+ in category %s/%s, ignoring.",
+ barename, location, format)
+ conflicts.barenames = conflicts.barenames + 1
+
+ --- track conflicts per font
+ local conflictdata = entry.conflicts
+
+ if not conflictdata then
+ entry.conflicts = { barename = present }
+ else -- some conflicts already detected
+ conflictdata.barename = present
+ end
+
+ else
+ inbare [barename] = index
+ end
+ else
+ inbare = { [barename] = index }
+ bare [location] [format] = inbare
+ end
+
+ --- 3) add to fullpath map
+
+ full [index] = fullpath
+ end
+
+ return files
+end
+
+local pick_style
+local check_regular
+
+do
+ local splitfontname = lpeg.splitat "-"
+
+ local choose_exact = function (field)
+ --- only clean matches, without guessing
+ if italic_synonym [field] then
+ return "i"
+ end
+
+ if field == "bold" then
+ return "b"
+ end
+
+ if field == "bolditalic" or field == "boldoblique" then
+ return "bi"
+ end
+
+ return false
+ end
+
+ pick_style = function (fontstyle_name,
+ prefmodifiers,
+ subfamily,
+ splitstyle)
+ local style
+ if fontstyle_name then
+ style = choose_exact (fontstyle_name)
+ end
+ if not style then
+ if prefmodifiers then
+ style = choose_exact (prefmodifiers)
+ elseif subfamily then
+ style = choose_exact (subfamily)
+ end
+ end
+ return style
+ end
+
+ pick_fallback_style = function (italicangle, weight)
+ --- more aggressive, but only to determine bold faces
+ if weight > 500 then --- bold spectrum matches
+ if italicangle == 0 then
+ return tostring (weight)
+ else
+ return tostring (weight) .. "i"
+ end
+ end
+ return false
+ end
+
+ --- we use only exact matches here since there are constructs
+ --- like “regularitalic” (Cabin, Bodoni Old Fashion)
+
+ check_regular = function (fontstyle_name,
+ prefmodifiers,
+ subfamily,
+ splitstyle,
+ italicangle,
+ weight)
+
+ if fontstyle_name then
+ return regular_synonym [fontstyle_name]
+ elseif prefmodifiers then
+ return regular_synonym [prefmodifiers]
+ elseif subfamily then
+ return regular_synonym [subfamily]
+ elseif splitstyle then
+ return regular_synonym [splitstyle]
+ elseif italicangle == 0 and weight == 400 then
+ return true
+ end
+
+ return nil
+ end
+end
+
+local pull_values = function (entry)
+ local file = entry.file
+ local names = entry.names
+ local style = entry.style
+ local sanitized = names.sanitized
+ local english = sanitized.english
+ local info = sanitized.info
+ local metadata = sanitized.metadata
+
+ --- pull file info ...
+ entry.basename = file.base
+ entry.fullpath = file.full
+ entry.location = file.location
+ entry.subfont = file.subfont
+
+ --- pull name info ...
+ entry.psname = english.psname
+ entry.fontname = info.fontname or metadata.fontname
+ entry.fullname = english.fullname or info.fullname
+ entry.splainname = metadata.fullname
+ entry.prefmodifiers = english.prefmodifiers
+ local metafamily = metadata.familyname
+ local familyname = english.preffamily or english.family
+ entry.familyname = familyname
+ if familyname ~= metafamily then
+ entry.metafamily = metadata.familyname
+ end
+ entry.fontstyle_name = sanitized.fontstyle_name
+ entry.plainname = names.fullname
+ entry.subfamily = english.subfamily
+
+ --- pull style info ...
+ entry.italicangle = style.italicangle
+ entry.size = style.size
+ entry.splitstyle = style.split
+ entry.weight = style.weight
+
+ if config.luaotfload.db.strip == true then
+ entry.file = nil
+ entry.names = nil
+ entry.style = nil
+ end
+end
+
+local add_family = function (name, subtable, modifier, entry)
+ if not name then --- probably borked font
+ return
+ end
+ local familytable = subtable [name]
+ if not familytable then
+ familytable = { }
+ subtable [name] = familytable
+ end
+
+ local size = entry.size
+
+ familytable [#familytable + 1] = {
+ index = entry.index,
+ modifier = modifier,
+ }
+end
+
+local get_subtable = function (families, entry)
+ local location = entry.location
+ local format = entry.format
+ local subtable = families [location] [format]
+ if not subtable then
+ subtable = { }
+ families [location] [format] = subtable
+ end
+ return subtable
+end
+
+collect_families = function (mappings)
+
+ report ("info", 2, "db", "Analyzing families.")
+
+ local families = {
+ ["local"] = { },
+ system = { },
+ texmf = { },
+ }
+
+ for i = 1, #mappings do
+
+ local entry = mappings [i]
+
+ if entry.file then
+ pull_values (entry)
+ end
+
+ local subtable = get_subtable (families, entry)
+
+ local familyname = entry.familyname
+ local metafamily = entry.metafamily
+ local fontstyle_name = entry.fontstyle_name
+ local prefmodifiers = entry.prefmodifiers
+ local subfamily = entry.subfamily
+
+ local weight = entry.weight
+ local italicangle = entry.italicangle
+ local splitstyle = entry.splitstyle
+
+ local modifier = pick_style (fontstyle_name,
+ prefmodifiers,
+ subfamily,
+ splitstyle)
+
+ if not modifier then --- regular, exact only
+ modifier = check_regular (fontstyle_name,
+ prefmodifiers,
+ subfamily,
+ splitstyle,
+ italicangle,
+ weight)
+ end
+
+ if modifier then
+ add_family (familyname, subtable, modifier, entry)
+ --- registering the metafamilies is unreliable within the
+ --- same table as identifiers might interfere with an
+ --- unmarked style that lacks a metafamily, e.g.
+ ---
+ --- iwona condensed regular ->
+ --- family: iwonacond
+ --- metafamily: iwona
+ --- iwona regular ->
+ --- family: iwona
+ --- metafamily: ø
+ ---
+ --- Both would be registered as under the same family,
+ --- i.e. “iwona”, and depending on the loading order
+ --- the query “name:iwona” can resolve to the condensed
+ --- version instead of the actual unmarked one. The only
+ --- way around this would be to introduce a separate
+ --- table for metafamilies and do fallback queries on it.
+ --- At the moment this is not pressing enough to justify
+ --- further increasing the index size, maybe if need
+ --- arises from the user side.
+-- if metafamily and metafamily ~= familyname then
+-- add_family (metafamily, subtable, modifier, entry)
+-- end
+ elseif weight > 500 then -- in bold spectrum
+ modifier = pick_fallback_style (italicangle, weight)
+ if modifier then
+ add_family (familyname, subtable, modifier, entry)
+ end
+ end
+ end
+
+ collectgarbage "collect"
+ return families
+end
+
+--[[doc--
+
+ group_modifiers -- For not-quite-bold faces, determine whether
+ they can fill in for a missing bold face slot in a matching family.
+
+ Some families like Lucida do not contain real bold / bold italic
+ members. Instead, they have semibold variants at weight 600 which
+ we must add in a separate pass.
+
+--doc]]--
+
+local bold_spectrum_low = 501 --- 500 is medium, 900 heavy/black
+local bold_weight = 700
+local style_categories = { "r", "b", "i", "bi" }
+local bold_categories = { "b", "bi" }
+
+group_modifiers = function (mappings, families)
+ report ("info", 2, "db", "Analyzing shapes, weights, and styles.")
+ for location, location_data in next, families do
+ for format, format_data in next, location_data do
+ for familyname, collected in next, format_data do
+ local styledata = { } --- will replace the “collected” table
+ --- First, fill in the ordinary style data that
+ --- fits neatly into the four relevant modifier
+ --- categories.
+ for _, modifier in next, style_categories do
+ local entries
+ for key, info in next, collected do
+ if info.modifier == modifier then
+ if not entries then
+ entries = { }
+ end
+ local index = info.index
+ local entry = mappings [index]
+ local size = entry.size
+ if size then
+ entries [#entries + 1] = {
+ size [1],
+ size [2],
+ size [3],
+ index,
+ }
+ else
+ entries.default = index
+ end
+ collected [key] = nil
+ end
+ styledata [modifier] = entries
+ end
+ end
+
+ --- At this point the family set may still lack
+ --- entries for bold or bold italic. We will fill
+ --- those in using the modifier with the numeric
+ --- weight that is closest to bold (700).
+ if next (collected) then --- there are uncategorized entries
+ for _, modifier in next, bold_categories do
+ if not styledata [modifier] then
+ local closest
+ local minimum = 2^51
+ for key, info in next, collected do
+ local info_modifier = tonumber (info.modifier) and "b" or "bi"
+ if modifier == info_modifier then
+ local index = info.index
+ local entry = mappings [index]
+ local weight = entry.weight
+ local diff = weight < 700 and 700 - weight or weight - 700
+ if diff < minimum then
+ minimum = diff
+ closest = weight
+ end
+ end
+ end
+ if closest then
+ --- We know there is a substitute face for the modifier.
+ --- Now we scan the list again to extract the size data
+ --- in case the shape is available at multiple sizes.
+ local entries = { }
+ for key, info in next, collected do
+ local info_modifier = tonumber (info.modifier) and "b" or "bi"
+ if modifier == info_modifier then
+ local index = info.index
+ local entry = mappings [index]
+ local size = entry.size
+ if entry.weight == closest then
+ if size then
+ entries [#entries + 1] = {
+ size [1],
+ size [2],
+ size [3],
+ index,
+ }
+ else
+ entries.default = index
+ end
+ end
+ end
+ end
+ styledata [modifier] = entries
+ end
+ end
+ end
+ end
+ format_data [familyname] = styledata
+ end
+ end
+ end
+ return families
+end
+
+local cmp_sizes = function (a, b)
+ return a [1] < b [1]
+end
+
+order_design_sizes = function (families)
+
+ report ("info", 2, "db", "Ordering design sizes.")
+
+ for location, data in next, families do
+ for format, data in next, data do
+ for familyname, data in next, data do
+ for style, data in next, data do
+ tablesort (data, cmp_sizes)
+ end
+ end
+ end
+ end
+
+ return families
+end
+
+--[[doc--
+
+ collect_font_filenames -- Scan the three search path categories for
+ font files. This constitutes the first pass of the update mode.
+
+--doc]]--
+
+--- unit -> string * bool list
+local collect_font_filenames = function ()
+
+ report ("info", 4, "db", "Scanning the filesystem for font files.")
+
+ local filenames = { }
+ local bisect = config.luaotfload.misc.bisect
+ local max_fonts = config.luaotfload.db.max_fonts --- XXX revisit for lua 5.3 wrt integers
+
+ tableappend (filenames, collect_font_filenames_texmf ())
+ tableappend (filenames, collect_font_filenames_system ())
+ if config.luaotfload.db.scan_local == true then
+ tableappend (filenames, collect_font_filenames_local ())
+ end
+ --- Now drop everything above max_fonts.
+ if max_fonts < #filenames then
+ filenames = { unpack (filenames, 1, max_fonts) }
+ end
+ --- And choose the requested slice if in bisect mode.
+ if bisect then
+ return { unpack (filenames, bisect[1], bisect[2]) }
+ end
+ return filenames
+end
+
+--[[doc--
+
+ nth_font_file -- Return the filename of the nth font.
+
+--doc]]--
+
+--- int -> string
+local nth_font_filename = function (n)
+ report ("info", 4, "db", "Picking font file no. %d.", n)
+ if not p_blacklist then
+ read_blacklist ()
+ end
+ local filenames = collect_font_filenames ()
+ return filenames[n] and filenames[n][1] or "<error>"
+end
+
+--[[doc--
+
+ font_slice -- Return the fonts in the range from lo to hi.
+
+--doc]]--
+
+local font_slice = function (lo, hi)
+ report ("info", 4, "db", "Retrieving font files nos. %d--%d.", lo, hi)
+ if not p_blacklist then
+ read_blacklist ()
+ end
+ local filenames = collect_font_filenames ()
+ local result = { }
+ for i = lo, hi do
+ result[#result + 1] = filenames[i][1]
+ end
+ return result
+end
+
+--[[doc
+
+ count_font_files -- Return the number of files found by
+ collect_font_filenames. This function is exported primarily
+ for use with luaotfload-tool.lua in bisect mode.
+
+--doc]]--
+
+--- unit -> int
+local count_font_files = function ()
+ report ("info", 4, "db", "Counting font files.")
+ if not p_blacklist then
+ read_blacklist ()
+ end
+ return #collect_font_filenames ()
+end
+
+--- dbobj -> stats
+
+local collect_statistics = function (mappings)
+ local sum_dsnsize, n_dsnsize = 0, 0
+
+ local fullname, family, families = { }, { }, { }
+ local subfamily, prefmodifiers, fontstyle_name = { }, { }, { }
+
+ local addtohash = function (hash, item)
+ if item then
+ local times = hash [item]
+ if times then
+ hash [item] = times + 1
+ else
+ hash [item] = 1
+ end
+ end
+ end
+
+ local appendtohash = function (hash, key, value)
+ if key and value then
+ local entry = hash [key]
+ if entry then
+ entry [#entry + 1] = value
+ else
+ hash [key] = { value }
+ end
+ end
+ end
+
+ local addtoset = function (hash, key, value)
+ if key and value then
+ local set = hash [key]
+ if set then
+ set [value] = true
+ else
+ hash [key] = { [value] = true }
+ end
+ end
+ end
+
+ local setsize = function (set)
+ local n = 0
+ for _, _ in next, set do
+ n = n + 1
+ end
+ return n
+ end
+
+ local hashsum = function (hash)
+ local n = 0
+ for _, m in next, hash do
+ n = n + m
+ end
+ return n
+ end
+
+ for _, entry in next, mappings do
+ local style = entry.style
+ local names = entry.names.sanitized
+ local englishnames = names.english
+
+ addtohash (fullname, englishnames.fullname)
+ addtohash (family, englishnames.family)
+ addtohash (subfamily, englishnames.subfamily)
+ addtohash (prefmodifiers, englishnames.prefmodifiers)
+ addtohash (fontstyle_name, names.fontstyle_name)
+
+ addtoset (families, englishnames.family, englishnames.fullname)
+
+ local sizeinfo = entry.style.size
+ if sizeinfo then
+ sum_dsnsize = sum_dsnsize + sizeinfo [1]
+ n_dsnsize = n_dsnsize + 1
+ end
+ end
+
+ --inspect (families)
+
+ local n_fullname = setsize (fullname)
+ local n_family = setsize (family)
+
+ if log.get_loglevel () > 1 then
+ local pprint_top = function (hash, n, set)
+
+ local freqs = { }
+ local items = { }
+
+ for item, value in next, hash do
+ if set then
+ freq = setsize (value)
+ else
+ freq = value
+ end
+ local ifreq = items [freq]
+ if ifreq then
+ ifreq [#ifreq + 1] = item
+ else
+ items [freq] = { item }
+ freqs [#freqs + 1] = freq
+ end
+ end
+
+ tablesort (freqs)
+
+ local from = #freqs
+ local to = from - (n - 1)
+ if to < 1 then
+ to = 1
+ end
+
+ for i = from, to, -1 do
+ local freq = freqs [i]
+ local itemlist = items [freq]
+
+ if type (itemlist) == "table" then
+ itemlist = tableconcat (itemlist, ", ")
+ end
+
+ report ("both", 0, "db",
+ " · %4d × %s.",
+ freq, itemlist)
+ end
+ end
+
+ report ("both", 0, "", "~~~~ font index statistics ~~~~")
+ report ("both", 0, "db",
+ " · Collected %d fonts (%d names) in %d families.",
+ #mappings, n_fullname, n_family)
+ pprint_top (families, 4, true)
+
+ report ("both", 0, "db",
+ " · %d different “subfamily” kinds.",
+ setsize (subfamily))
+ pprint_top (subfamily, 4)
+
+ report ("both", 0, "db",
+ " · %d different “prefmodifiers” kinds.",
+ setsize (prefmodifiers))
+ pprint_top (prefmodifiers, 4)
+
+ report ("both", 0, "db",
+ " · %d different “fontstyle_name” kinds.",
+ setsize (fontstyle_name))
+ pprint_top (fontstyle_name, 4)
+ end
+
+ local mean_dsnsize = 0
+ if n_dsnsize > 0 then
+ mean_dsnsize = sum_dsnsize / n_dsnsize
+ end
+
+ return {
+ mean_dsnsize = mean_dsnsize,
+ names = {
+ fullname = n_fullname,
+ families = n_family,
+ },
+-- style = {
+-- subfamily = subfamily,
+-- prefmodifiers = prefmodifiers,
+-- fontstyle_name = fontstyle_name,
+-- },
+ }
+end
+
+--- force: dictate rebuild from scratch
+--- dry_dun: don’t write to the db, just scan dirs
+
+--- dbobj? -> bool? -> bool? -> dbobj
+update_names = function (currentnames, force, dry_run)
+ local targetnames
+
+ if config.luaotfload.db.update_live == false then
+ report ("info", 2, "db",
+ "Skipping database update.")
+ --- skip all db updates
+ return currentnames or name_index
+ end
+
+ local starttime = osgettimeofday ()
+
+ --[[
+ The main function, scans everything
+ - “targetnames” is the final table to return
+ - force is whether we rebuild it from scratch or not
+ ]]
+ report("both", 1, "db", "Updating the font names database"
+ .. (force and " forcefully." or "."))
+
+ if config.luaotfload.db.skip_read == true then
+ --- the difference to a “dry run” is that we don’t search
+ --- for font files entirely. we also ignore the “force”
+ --- parameter since it concerns only the font files.
+ report ("info", 2, "db",
+ "Ignoring font files, reusing old data.")
+ currentnames = load_names (false)
+ targetnames = currentnames
+ else
+ if force then
+ currentnames = initialize_namedata (get_font_filter ())
+ else
+ if not currentnames then
+ currentnames = load_names (dry_run)
+ end
+ if currentnames.meta.version ~= names.version then
+ report ("both", 1, "db", "No font names database or old "
+ .. "one found; generating new one.")
+ currentnames = initialize_namedata (get_font_filter ())
+ end
+ end
+
+ targetnames = initialize_namedata (get_font_filter (),
+ currentnames.meta and currentnames.meta.created)
+
+ read_blacklist ()
+
+ --- pass 1: Collect the names of all fonts we are going to process.
+ local font_filenames = collect_font_filenames ()
+
+ --- pass 2: read font files (normal case) or reuse information
+ --- present in index
+
+ n_new = retrieve_namedata (font_filenames,
+ currentnames,
+ targetnames,
+ dry_run)
+ report ("info", 3, "db",
+ "Found %d font files; %d new entries.",
+ #font_filenames, n_new)
+ end
+
+ --- pass 3 (optional): collect some stats about the raw font info
+ if config.luaotfload.misc.statistics == true then
+ targetnames.meta.statistics = collect_statistics
+ (targetnames.mappings)
+ end
+
+ --- we always generate the file lookup tables because
+ --- non-texmf entries are redirected there and the mapping
+ --- needs to be 100% consistent
+
+ --- pass 4: build filename table
+ targetnames.files = generate_filedata (targetnames.mappings)
+
+ --- pass 5: build family lookup table
+ targetnames.families = collect_families (targetnames.mappings)
+
+ --- pass 6: arrange style and size info
+ targetnames.families = group_modifiers (targetnames.mappings,
+ targetnames.families)
+
+ --- pass 7: order design size tables
+ targetnames.families = order_design_sizes (targetnames.families)
+
+
+ report ("info", 3, "db",
+ "Rebuilt in %0.f ms.",
+ 1000 * (osgettimeofday () - starttime))
+ name_index = targetnames
+
+ if dry_run ~= true then
+
+ if n_new == 0 then
+ report ("info", 2, "db", "No new fonts found, skip saving to disk.")
+ else
+ local success, reason = save_names ()
+ if not success then
+ report ("both", 0, "db",
+ "Failed to save database to disk: %s",
+ reason)
+ end
+ end
+
+ if flush_lookup_cache () and save_lookups () then
+ report ("both", 2, "cache", "Lookup cache emptied.")
+ return targetnames
+ end
+ end
+ return targetnames
+end
+
+--- unit -> bool
+save_lookups = function ( )
+ local paths = config.luaotfload.paths
+ local luaname, lucname = paths.lookup_path_lua, paths.lookup_path_luc
+ if fileiswritable (luaname) and fileiswritable (lucname) then
+ tabletofile (luaname, lookup_cache, true)
+ osremove (lucname)
+ caches.compile (lookup_cache, luaname, lucname)
+ --- double check ...
+ if lfsisfile (luaname) and lfsisfile (lucname) then
+ report ("both", 3, "cache", "Lookup cache saved.")
+ return true
+ end
+ report ("info", 0, "cache", "Could not compile lookup cache.")
+ return false
+ end
+ report ("info", 0, "cache", "Lookup cache file not writable.")
+ if not fileiswritable (luaname) then
+ report ("info", 0, "cache", "Failed to write %s.", luaname)
+ end
+ if not fileiswritable (lucname) then
+ report ("info", 0, "cache", "Failed to write %s.", lucname)
+ end
+ return false
+end
+
+--- save_names() is usually called without the argument
+--- dbobj? -> bool * string option
+save_names = function (currentnames)
+ if not currentnames then
+ currentnames = name_index
+ end
+ if not currentnames or type (currentnames) ~= "table" then
+ return false, "invalid names table"
+ elseif currentnames.meta and currentnames.meta["local"] then
+ return false, "table contains local entries"
+ end
+ local paths = config.luaotfload.paths
+ local luaname, lucname = paths.index_path_lua, paths.index_path_luc
+ if fileiswritable (luaname) and fileiswritable (lucname) then
+ osremove (lucname)
+ local gzname = luaname .. ".gz"
+ if config.luaotfload.db.compress then
+ local serialized = tableserialize (currentnames, true)
+ save_gzipped (gzname, serialized)
+ caches.compile (currentnames, "", lucname)
+ else
+ tabletofile (luaname, currentnames, true)
+ caches.compile (currentnames, luaname, lucname)
+ end
+ report ("info", 2, "db", "Font index saved at ...")
+ local success = false
+ if lfsisfile (luaname) then
+ report ("info", 2, "db", "Text: " .. luaname)
+ success = true
+ end
+ if lfsisfile (gzname) then
+ report ("info", 2, "db", "Gzip: " .. gzname)
+ success = true
+ end
+ if lfsisfile (lucname) then
+ report ("info", 2, "db", "Byte: " .. lucname)
+ success = true
+ end
+ if success then
+ return true
+ else
+ report ("info", 0, "db", "Could not compile font index.")
+ return false
+ end
+ end
+ report ("info", 0, "db", "Index file not writable")
+ if not fileiswritable (luaname) then
+ report ("info", 0, "db", "Failed to write %s.", luaname)
+ end
+ if not fileiswritable (lucname) then
+ report ("info", 0, "db", "Failed to write %s.", lucname)
+ end
+ return false
+end
+
+--[[doc--
+
+ Below set of functions is modeled after mtx-cache.
+
+--doc]]--
+
+--- string -> string -> string list -> string list -> string list -> unit
+local print_cache = function (category, path, luanames, lucnames, rest)
+ local report_indeed = function (...)
+ report("info", 0, "cache", ...)
+ end
+ report_indeed("Luaotfload cache: %s", category)
+ report_indeed("location: %s", path)
+ report_indeed("[raw] %4i", #luanames)
+ report_indeed("[compiled] %4i", #lucnames)
+ report_indeed("[other] %4i", #rest)
+ report_indeed("[total] %4i", #luanames + #lucnames + #rest)
+end
+
+--- string -> string -> string list -> bool -> bool
+local purge_from_cache = function (category, path, list, all)
+ report("info", 1, "cache", "Luaotfload cache: %s %s",
+ (all and "erase" or "purge"), category)
+ report("info", 1, "cache", "location: %s",path)
+ local n = 0
+ for i=1,#list do
+ local filename = list[i]
+ if stringfind(filename,"luatex%-cache") then -- safeguard
+ if all then
+ report("info", 5, "cache", "Removing %s.", filename)
+ osremove(filename)
+ n = n + 1
+ else
+ local suffix = filesuffix(filename)
+ if suffix == "lua" then
+ local checkname = file.replacesuffix(
+ filename, "lua", "luc")
+ if lfsisfile(checkname) then
+ report("info", 5, "cache", "Removing %s.", filename)
+ osremove(filename)
+ n = n + 1
+ end
+ end
+ end
+ end
+ end
+ report("info", 1, "cache", "Removed lua files : %i", n)
+ return true
+end
+
+--- string -> string list -> int -> string list -> string list -> string list ->
+--- (string list * string list * string list * string list)
+local collect_cache collect_cache = function (path, all, n, luanames,
+ lucnames, rest)
+ if not all then
+ local all = find_files (path)
+
+ local luanames, lucnames, rest = { }, { }, { }
+ return collect_cache(nil, all, 1, luanames, lucnames, rest)
+ end
+
+ local filename = all[n]
+ if filename then
+ local suffix = filesuffix(filename)
+ if suffix == "lua" then
+ luanames[#luanames+1] = filename
+ elseif suffix == "luc" then
+ lucnames[#lucnames+1] = filename
+ else
+ rest[#rest+1] = filename
+ end
+ return collect_cache(nil, all, n+1, luanames, lucnames, rest)
+ end
+ return luanames, lucnames, rest, all
+end
+
+local getwritablecachepath = function ( )
+ --- fonts.handlers.otf doesn’t exist outside a Luatex run,
+ --- so we have to improvise
+ local writable = getwritablepath (config.luaotfload.paths.cache_dir)
+ if writable then
+ return writable
+ end
+end
+
+local getreadablecachepaths = function ( )
+ local readables = caches.getreadablepaths
+ (config.luaotfload.paths.cache_dir)
+ local result = { }
+ if readables then
+ for i=1, #readables do
+ local readable = readables[i]
+ if lfsisdir (readable) then
+ result[#result+1] = readable
+ end
+ end
+ end
+ return result
+end
+
+--- unit -> unit
+local purge_cache = function ( )
+ local writable_path = getwritablecachepath ()
+ local luanames, lucnames, rest = collect_cache(writable_path)
+ if log.get_loglevel() > 1 then
+ print_cache("writable path", writable_path, luanames, lucnames, rest)
+ end
+ local success = purge_from_cache("writable path", writable_path, luanames, false)
+ return success
+end
+
+--- unit -> unit
+local erase_cache = function ( )
+ local writable_path = getwritablecachepath ()
+ local luanames, lucnames, rest, all = collect_cache(writable_path)
+ if log.get_loglevel() > 1 then
+ print_cache("writable path", writable_path, luanames, lucnames, rest)
+ end
+ local success = purge_from_cache("writable path", writable_path, all, true)
+ return success
+end
+
+local separator = function ( )
+ report("info", 0, string.rep("-", 67))
+end
+
+--- unit -> unit
+local show_cache = function ( )
+ local readable_paths = getreadablecachepaths ()
+ local writable_path = getwritablecachepath ()
+ local luanames, lucnames, rest = collect_cache(writable_path)
+
+ separator ()
+ print_cache ("writable path", writable_path,
+ luanames, lucnames, rest)
+ texio.write_nl""
+ for i=1,#readable_paths do
+ local readable_path = readable_paths[i]
+ if readable_path ~= writable_path then
+ local luanames, lucnames = collect_cache (readable_path)
+ print_cache ("readable path",
+ readable_path, luanames, lucnames, rest)
+ end
+ end
+ separator()
+ return true
+end
+
+-----------------------------------------------------------------------
+--- export functionality to the namespace “fonts.names”
+-----------------------------------------------------------------------
+
+names.set_font_filter = set_font_filter
+names.flush_lookup_cache = flush_lookup_cache
+names.save_lookups = save_lookups
+names.load = load_names
+names.access_font_index = access_font_index
+names.data = function () return name_index end
+names.save = save_names
+names.update = update_names
+names.font_file_lookup = font_file_lookup
+names.read_blacklist = read_blacklist
+names.sanitize_fontname = sanitize_fontname
+names.getfilename = resolve_fullpath
+names.getmetadata = getmetadata
+names.set_location_precedence = set_location_precedence
+names.count_font_files = count_font_files
+names.nth_font_filename = nth_font_filename
+names.font_slice = font_slice
+names.resolve_cached = resolve_cached
+names.resolve_name = resolve_name
+
+--- font cache
+names.purge_cache = purge_cache
+names.erase_cache = erase_cache
+names.show_cache = show_cache
+
+names.find_closest = find_closest
+
+-- for testing purpose
+names.read_fonts_conf = read_fonts_conf
+
+-- vim:tw=71:sw=4:ts=4:expandtab
diff --git a/src/luaotfload-diagnostics.lua b/src/luaotfload-diagnostics.lua
new file mode 100644
index 0000000..d9b13f5
--- /dev/null
+++ b/src/luaotfload-diagnostics.lua
@@ -0,0 +1,684 @@
+#!/usr/bin/env texlua
+-----------------------------------------------------------------------
+-- FILE: luaotfload-diagnostics.lua
+-- DESCRIPTION: functionality accessible by the --diagnose option
+-- REQUIREMENTS: luaotfload-tool.lua
+-- AUTHOR: Philipp Gesang (Phg), <phg42.2a@gmail.com>
+-- VERSION: 2.5
+-- MODIFIED: 2014-01-02 21:23:06+0100
+-----------------------------------------------------------------------
+--
+local names = fonts.names
+
+local kpse = require "kpse"
+local kpseexpand_path = kpse.expand_path
+local kpseexpand_var = kpse.expand_var
+local kpsefind_file = kpse.find_file
+
+local lfs = require "lfs"
+local lfsattributes = lfs.attributes
+local lfsisfile = lfs.isfile
+local lfsreadlink = lfs.readlink
+
+local md5 = require "md5"
+local md5sumhexa = md5.sumhexa
+
+local ioopen = io.open
+
+local osgetenv = os.getenv
+local osname = os.name
+local osremove = os.remove
+local ostype = os.type
+local stringformat = string.format
+local stringlower = string.lower
+local stringsub = string.sub
+
+local fileisreadable = file.isreadable
+local fileiswritable = file.iswritable
+local filesplitpath = file.splitpath
+local filesuffix = file.suffix
+local ioloaddata = io.loaddata
+local lua_of_json = utilities.json.tolua
+local tableconcat = table.concat
+local tablesortedkeys = table.sortedkeys
+local tabletohash = table.tohash
+
+local lpeg = require "lpeg"
+local C, Cg, Ct = lpeg.C, lpeg.Cg, lpeg.Ct
+local lpegmatch = lpeg.match
+
+local report = luaotfload.log.report
+local out = function (...)
+ report (false, 0, "diagnose", ...)
+end
+
+local parsers = luaotfload.parsers
+local stripslashes = parsers.stripslashes
+local splitcomma = parsers.splitcomma
+
+local check_index = function (errcnt)
+
+ out "================= font names =================="
+ local namedata = names.data()
+
+ if not namedata then
+ namedata = names.load ()
+ end
+
+ local mappings = namedata.mappings
+
+ if not namedata and namedata.formats and namedata.version then
+ out "Database corrupt."
+ return errcnt + 1
+ end
+
+ out ("Database version: %.3f.", namedata.meta.version)
+ out ("Font formats indexed: %s.",
+ tableconcat (namedata.meta.formats, ", "))
+ out ("%d font files indexed.", #mappings)
+
+ local by_format = { }
+ for i = 1, #mappings do
+ local record = mappings[i]
+ local format = stringlower (filesuffix (record.filename))
+ local count = by_format[format]
+ if count then
+ by_format[format] = count + 1
+ else
+ by_format[format] = 1
+ end
+ end
+
+ local formats = tablesortedkeys (by_format)
+ for i = 1, #formats do
+ local format = formats[i]
+ out ("%20s: %5d", format, by_format[format])
+ end
+ return errcnt
+end
+
+local verify_files = function (errcnt)
+ out "================ verify files ================="
+ local status = config.luaotfload.status
+ local hashes = status.hashes
+ local notes = status.notes
+ if not hashes or #hashes == 0 then
+ out ("FAILED: cannot read checksums from %s.", status_file)
+ return 1/0
+ elseif not notes then
+ out ("FAILED: cannot read commit metadata from %s.",
+ status_file)
+ return 1/0
+ end
+
+ out ("Luaotfload revision %s.", notes.revision)
+ out ("Committed by %s.", notes.committer)
+ out ("Timestamp %s.", notes.timestamp)
+
+ local nhashes = #hashes
+ out ("Testing %d files for integrity.", nhashes)
+ for i = 1, nhashes do
+ local fname, canonicalsum = unpack (hashes[i])
+ local location = kpsefind_file (fname)
+ or kpsefind_file (fname, "texmfscripts")
+ if not location then
+ errcnt = errcnt + 1
+ out ("FAILED: file %s missing.", fname)
+ else
+ out ("File: %s.", location)
+ local raw = ioloaddata (location)
+ if not raw then
+ errcnt = errcnt + 1
+ out ("FAILED: file %d not readable.", fname)
+ else
+ local sum = md5sumhexa (raw)
+ if sum ~= canonicalsum then
+ errcnt = errcnt + 1
+ out ("FAILED: checksum mismatch for file %s.",
+ fname)
+ out ("Expected %s.", canonicalsum)
+ out ("Got %s.", sum)
+ else
+ out ("Ok, %s passed.", fname)
+ end
+ end
+ end
+ end
+ return errcnt
+end
+
+local get_tentative_attributes = function (file)
+ if not lfsisfile (file) then
+ local chan = ioopen (file, "w")
+ if chan then
+ chan:close ()
+ local attributes = lfsattributes (file)
+ os.remove (file)
+ return attributes
+ end
+ end
+end
+
+local p_permissions = Ct(Cg(Ct(C(1) * C(1) * C(1)), "u")
+ * Cg(Ct(C(1) * C(1) * C(1)), "g")
+ * Cg(Ct(C(1) * C(1) * C(1)), "o"))
+
+local analyze_permissions = function (raw)
+ return lpegmatch (p_permissions, raw)
+end
+
+local get_permissions = function (t, location)
+ if stringsub (location, #location) == "/" then
+ --- strip trailing slashes (lfs idiosyncrasy on Win)
+ location = lpegmatch (stripslashes, location)
+ end
+ local attributes = lfsattributes (location)
+
+ if not attributes and t == "f" then
+ attributes = get_tentative_attributes (location)
+ if not attributes then
+ return false
+ end
+ end
+
+ local permissions
+
+ if fileisreadable (location) then
+ --- link handling appears to be unnecessary because
+ --- lfs.attributes() will return the information on
+ --- the link target.
+ if mode == "link" then --follow and repeat
+ location = lfsreadlink (location)
+ attributes = lfsattributes (location)
+ end
+ end
+
+ permissions = analyze_permissions (attributes.permissions)
+
+ return {
+ location = location,
+ mode = attributes.mode,
+ owner = attributes.uid, --- useless on windows
+ permissions = permissions,
+ attributes = attributes,
+ }
+end
+
+local check_conformance = function (spec, permissions, errcnt)
+ local uid = permissions.attributes.uid
+ local gid = permissions.attributes.gid
+ local raw = permissions.attributes.permissions
+
+ out ("Owner: %d, group %d, permissions %s.", uid, gid, raw)
+ if ostype == "unix" then
+ if uid == 0 or gid == 0 then
+ out "Owned by the superuser, permission conflict likely."
+ errcnt = errcnt + 1
+ end
+ end
+
+ local user = permissions.permissions.u
+ if spec.r == true then
+ if user[1] == "r" then
+ out "Readable: ok."
+ else
+ out "Not readable: permissions need fixing."
+ errcnt = errcnt + 1
+ end
+ end
+
+ if spec.w == true then
+ if user[2] == "w"
+ or fileiswritable (permissions.location) then
+ out "Writable: ok."
+ else
+ out "Not writable: permissions need fixing."
+ errcnt = errcnt + 1
+ end
+ end
+
+ return errcnt
+end
+
+local desired_permissions
+local init_desired_permissions = function ()
+ inspect(config.luaotfload.paths)
+ local paths = config.luaotfload.paths
+ desired_permissions = {
+ { "d", {"r","w"}, function () return caches.getwritablepath () end },
+ { "d", {"r","w"}, paths.prefix },
+ { "f", {"r","w"}, paths.index_path_lua .. ".gz" },
+ { "f", {"r","w"}, paths.index_path_luc },
+ { "f", {"r","w"}, paths.lookup_path_lua },
+ { "f", {"r","w"}, paths.lookup_path_luc },
+ }
+end
+
+local check_permissions = function (errcnt)
+ out [[=============== file permissions ==============]]
+ if not desired_permissions then init_desired_permissions () end
+ for i = 1, #desired_permissions do
+ local t, spec, path = unpack (desired_permissions[i])
+ if type (path) == "function" then
+ path = path ()
+ end
+
+ spec = tabletohash (spec)
+
+ out ("Checking permissions of %s.", path)
+
+ local permissions = get_permissions (t, path)
+ if permissions then
+ --inspect (permissions)
+ errcnt = check_conformance (spec, permissions, errcnt)
+ else
+ errcnt = errcnt + 1
+ end
+ end
+ return errcnt
+end
+
+local check_upstream
+
+if kpsefind_file ("https.lua", "lua") == nil then
+ check_upstream = function (errcnt)
+ out [[============= upstream repository =============
+ WARNING: Cannot retrieve repository data.
+ Github API access requires the luasec library.
+ Grab it from <https://github.com/brunoos/luasec>
+ and retry.]]
+ return errcnt
+ end
+else
+--- github api stuff begin
+ local https = require "ssl.https"
+
+ local gh_api_root = [[https://api.github.com]]
+ local release_url = [[https://github.com/lualatex/luaotfload/releases]]
+ local luaotfload_repo = [[lualatex/luaotfload]]
+ local user_agent = [[lualatex/luaotfload integrity check]]
+ local shortbytes = 8
+
+ local gh_shortrevision = function (rev)
+ return stringsub (rev, 1, shortbytes)
+ end
+
+ local gh_encode_parameters = function (parameters)
+ local acc = {}
+ for field, value in next, parameters do
+ --- unsafe, non-urlencoded coz it’s all ascii chars
+ acc[#acc+1] = field .. "=" .. value
+ end
+ return "?" .. tableconcat (acc, "&")
+ end
+
+ local gh_make_url = function (components, parameters)
+ local url = tableconcat ({ gh_api_root,
+ unpack (components) },
+ "/")
+ if parameters then
+ url = url .. gh_encode_parameters (parameters)
+ end
+ return url
+ end
+
+ local alright = [[HTTP/1.1 200 OK]]
+
+ local gh_api_request = function (...)
+ local args = {...}
+ local nargs = #args
+ local final = args[nargs]
+ local request = {
+ url = "",
+ headers = { ["user-agent"] = user_agent },
+ }
+ if type (final) == "table" then
+ args[nargs] = nil
+ request = gh_make_url (args, final)
+ else
+ request = gh_make_url (args)
+ end
+
+ out ("Requesting <%s>.", request)
+ local response, code, headers, status = https.request (request)
+ if status ~= alright then
+ out "Request failed!"
+ return false
+ end
+ return response
+ end
+
+ local gh_api_checklimit = function (headers)
+ local rawlimit = gh_api_request "rate_limit"
+ local limitdata = lua_of_json (rawlimit)
+ if not limitdata and limitdata.rate then
+ out "Cannot parse API rate limit."
+ return false
+ end
+ limitdata = limitdata.rate
+
+ local limit = tonumber (limitdata.limit)
+ local left = tonumber (limitdata.remaining)
+ local reset = tonumber (limitdata.reset)
+
+ out ("%d of %d Github API requests left.", left, limit)
+ if left == 0 then
+ out ("Cannot make any more API requests.")
+ if ostype == "unix" then
+ out ("Try again later at %s.", osdate ("%F %T", reset))
+ else --- windows doesn’t C99
+ out ("Try again later at %s.",
+ osdate ("%Y-%m-d %H:%M:%S", reset))
+ end
+ end
+ return true
+ end
+
+ local gh_tags = function ()
+ out "Fetching tags from repository, please stand by."
+ local rawtags = gh_api_request ("repos",
+ luaotfload_repo,
+ "tags")
+ local taglist = lua_of_json (rawtags)
+ if not taglist or #taglist == 0 then
+ out "Cannot parse response."
+ return false
+ end
+
+ local ntags = #taglist
+ out ("Repository contains %d tags.", ntags)
+ local _idx, latest = next (taglist)
+ out ("The most recent release is %s (revision %s).",
+ latest.name,
+ gh_shortrevision (latest.commit.sha))
+ return latest
+ end
+
+ local gh_compare = function (head, base)
+ if base == nil then
+ base = "HEAD"
+ end
+ out ("Fetching comparison between %s and %s, \z
+ please stand by.",
+ gh_shortrevision (head),
+ gh_shortrevision (base))
+ local comparison = base .. "..." .. head
+ local rawstatus = gh_api_request ("repos",
+ luaotfload_repo,
+ "compare",
+ comparison)
+ local status = lua_of_json (rawstatus)
+ if not status then
+ out "Cannot parse response for status request."
+ return false
+ end
+ return status
+ end
+
+ local gh_news = function (since)
+ local compared = gh_compare (since)
+ if not compared then
+ return false
+ end
+ local behind_by = compared.behind_by
+ local ahead_by = compared.ahead_by
+ local status = compared.status
+ out ("Comparison state: %s.", status)
+ if behind_by > 0 then
+ out ("Your Luaotfload is %d \z
+ revisions behind upstream.",
+ behind_by)
+ return behind_by
+ elseif status == "ahead" then
+ out "Since you are obviously from the future \z
+ I assume you already know the repository state."
+ else
+ out "Everything up to date. \z
+ Luaotfload is in sync with upstream."
+ end
+ return false
+ end
+
+ local gh_catchup = function (current, latest)
+ local compared = gh_compare (latest, current)
+ local ahead_by = tonumber (compared.ahead_by)
+ if ahead_by > 0 then
+ local permalink_url = compared.permalink_url
+ out ("Your Luaotfload is %d revisions \z
+ behind the most recent release.",
+ ahead_by)
+ out ("To view the commit log, visit <%s>.",
+ permalink_url)
+ out ("You can grab an up to date tarball at <%s>.",
+ release_url)
+ return true
+ else
+ out "There weren't any new releases in the meantime."
+ out "Luaotfload is up to date."
+ end
+ return false
+ end
+
+ check_upstream = function (current)
+ out "============= upstream repository ============="
+ local _succ = gh_api_checklimit ()
+ local behind = gh_news (current)
+ if behind then
+ local latest = gh_tags ()
+ local _behind = gh_catchup (current,
+ latest.commit.sha,
+ latest.name)
+ end
+ end
+
+ --- trivium: diff since the first revision as pushed by Élie
+ --- in 2009
+ --- local firstrevision = "c3ccb3ee07e0a67171c24960966ae974e0dd8e98"
+ --- check_upstream (firstrevision)
+end
+--- github api stuff end
+
+local print_envvar = function (var)
+ local val = osgetenv (var)
+ if val then
+ out ("%20s: %q", stringformat ("$%s", var), val)
+ return val
+ else
+ out ("%20s: <unset>", stringformat ("$%s", var))
+ end
+end
+
+local print_path = function (var)
+ local val = osgetenv (var)
+ if val then
+ local paths = filesplitpath (val)
+ if paths then
+ local npaths = #paths
+ if npaths == 1 then
+ out ("%20s: %q", stringformat ("$%s", var), val)
+ elseif npaths > 1 then
+ out ("%20s: <%d items>", stringformat ("$%s", var), npaths)
+ for i = 1, npaths do
+ out (" +: %q", paths[i])
+ end
+ else
+ out ("%20s: <empty>")
+ end
+ end
+ else
+ out ("%20s: <unset>", stringformat ("$%s", var))
+ end
+end
+
+local print_kpsevar = function (var)
+ var = "$" .. var
+ local val = kpseexpand_var (var)
+ if val and val ~= var then
+ out ("%20s: %q", var, val)
+ return val
+ else
+ out ("%20s: <empty or unset>", var)
+ end
+end
+
+local print_kpsepath = function (var)
+ var = "$" .. var
+ local val = kpseexpand_path (var)
+ if val and val ~= "" then
+ local paths = filesplitpath (val)
+ if paths then
+ local npaths = #paths
+ if npaths == 1 then
+ out ("%20s: %q", var, paths[1])
+ elseif npaths > 1 then
+ out ("%20s: <%d items>", var, npaths)
+ for i = 1, npaths do
+ out (" +: %q", paths[i])
+ end
+ else
+ out ("%20s: <empty>")
+ end
+ end
+ else
+ out ("%20s: <empty or unset>", var)
+ end
+end
+
+--- this test first if a variable is set and then expands the
+--- paths; this is necessitated by the fact that expand-path will
+--- return the empty string both if the variable is unset and if
+--- the directory does not exist
+
+local print_kpsepathvar = function (var)
+ local vvar = "$" .. var
+ local val = kpseexpand_var (vvar)
+ if val and val ~= vvar then
+ out ("%20s: %q", vvar, val)
+ print_kpsepath (var)
+ else
+ out ("%20s: <empty or unset>", var)
+ end
+end
+
+local check_environment = function (errcnt)
+ out "============ environment settings ============="
+ out ("system: %s/%s", ostype, osname)
+ if ostype == "unix" and io.popen then
+ local chan = io.popen ("uname -a", "r")
+ if chan then
+ out ("info: %s", chan:read "*all")
+ chan:close ()
+ end
+ end
+
+ out "1) *shell environment*"
+ print_envvar "SHELL"
+ print_path "PATH"
+ print_path "OSFONTDIR"
+ print_envvar "USER"
+ if ostype == "windows" then
+ print_envvar "WINDIR"
+ print_envvar "CD"
+ print_path "TEMP"
+ elseif ostype == "unix" then
+ print_envvar "HOME"
+ print_envvar "PWD"
+ print_path "TMPDIR"
+ end
+
+ out "2) *kpathsea*"
+ print_kpsepathvar "OPENTYPEFONTS"
+ print_kpsepathvar "TTFONTS"
+
+ print_kpsepathvar "TEXMFCACHE"
+ print_kpsepathvar "TEXMFVAR"
+
+ --- the expansion of these can be quite large; as they aren’t
+ --- usually essential to luaotfload, we won’t dump every single
+ --- path
+ print_kpsevar "LUAINPUTS"
+ print_kpsevar "CLUAINPUTS"
+
+ return errcnt
+end
+
+local anamneses = {
+ "environment",
+ "files",
+ "index",
+ "repository",
+ "permissions"
+}
+
+local diagnose = function (job)
+ local errcnt = 0
+ local asked = job.asked_diagnostics
+ if asked == "all" or asked == "thorough" then
+ asked = tabletohash (anamneses, true)
+ else
+ asked = lpegmatch (splitcomma, asked)
+ asked = tabletohash (asked, true)
+ end
+
+ if asked.index == true then
+ errcnt = check_index (errcnt)
+ asked.index = nil
+ end
+
+ if asked.environment == true then
+ errcnt = check_environment (errcnt)
+ asked.environment = nil
+ end
+
+ if asked.files == true then
+ errcnt = verify_files (errcnt)
+ asked.files = nil
+ end
+
+ if asked.permissions == true then
+ errcnt = check_permissions (errcnt)
+ asked.permissions = nil
+ end
+
+ if asked.repository == true then
+ check_upstream (status.notes.revision)
+ asked.repository = nil
+ end
+
+ local rest = next (asked)
+ if rest ~= nil then --> something unknown
+ out ("Unknown diagnostic %q.", rest)
+ end
+ if errcnt == 0 then --> success
+ out ("Everything appears to be in order, \z
+ you may sleep well.")
+ return true, false
+ end
+ out ( [[===============================================
+ WARNING
+ ===============================================
+
+ The diagnostic detected %d errors.
+
+ This version of luaotfload may have been
+ tampered with. Modified versions of the
+ luaotfload source are unsupported. Read the log
+ carefully and get a clean version from CTAN or
+ github:
+
+ × http://ctan.org/tex-archive/macros/luatex/generic/luaotfload
+ × https://github.com/lualatex/luaotfload/releases
+
+ If you are uncertain as to how to proceed, then
+ ask on the lualatex mailing list:
+
+ http://www.tug.org/mailman/listinfo/lualatex-dev
+
+ ===============================================
+]], errcnt)
+ return true, false
+end
+
+return diagnose
+
+-- vim:tw=71:sw=4:ts=4:expandtab
diff --git a/src/luaotfload-features.lua b/src/luaotfload-features.lua
new file mode 100644
index 0000000..1fb6d7c
--- /dev/null
+++ b/src/luaotfload-features.lua
@@ -0,0 +1,1238 @@
+if not modules then modules = { } end modules ["features"] = {
+ version = "2.5",
+ comment = "companion to luaotfload-main.lua",
+ author = "Hans Hagen, Khaled Hosny, Elie Roux, Philipp Gesang",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local type = type
+local next = next
+local tonumber = tonumber
+local tostring = tostring
+
+local lpeg = require "lpeg"
+local lpegmatch = lpeg.match
+local P = lpeg.P
+local R = lpeg.R
+local C = lpeg.C
+
+local table = table
+local tabletohash = table.tohash
+local setmetatableindex = table.setmetatableindex
+local insert = table.insert
+
+---[[ begin included font-ltx.lua ]]
+--- this appears to be based in part on luatex-fonts-def.lua
+
+local fonts = fonts
+local definers = fonts.definers
+local handlers = fonts.handlers
+
+local as_script, normalize
+
+if handlers then
+ normalize = handlers.otf.features.normalize
+else
+ normalize = function () end
+ as_script = true
+end
+
+
+--HH A bit of tuning for definitions.
+
+if fonts.constructors then
+ fonts.constructors.namemode = "specification" -- somehow latex needs this (changed name!) => will change into an overload
+end
+
+--[[HH--
+ tricky: we sort of bypass the parser and directly feed all into
+ the sub parser
+--HH]]--
+
+function fonts.definers.getspecification(str)
+ return "", str, "", ":", str
+end
+
+local log = luaotfload.log
+local report = log.report
+
+local stringfind = string.find
+local stringlower = string.lower
+local stringgsub = string.gsub
+local stringsub = string.sub
+local stringformat = string.format
+local stringis_empty = string.is_empty
+local mathceil = math.ceil
+
+
+---[[ begin excerpt from font-ott.lua ]]
+
+local scripts = {
+ ['arab'] = 'arabic',
+ ['armn'] = 'armenian',
+ ['bali'] = 'balinese',
+ ['beng'] = 'bengali',
+ ['bopo'] = 'bopomofo',
+ ['brai'] = 'braille',
+ ['bugi'] = 'buginese',
+ ['buhd'] = 'buhid',
+ ['byzm'] = 'byzantine music',
+ ['cans'] = 'canadian syllabics',
+ ['cher'] = 'cherokee',
+ ['copt'] = 'coptic',
+ ['cprt'] = 'cypriot syllabary',
+ ['cyrl'] = 'cyrillic',
+ ['deva'] = 'devanagari',
+ ['dsrt'] = 'deseret',
+ ['ethi'] = 'ethiopic',
+ ['geor'] = 'georgian',
+ ['glag'] = 'glagolitic',
+ ['goth'] = 'gothic',
+ ['grek'] = 'greek',
+ ['gujr'] = 'gujarati',
+ ['guru'] = 'gurmukhi',
+ ['hang'] = 'hangul',
+ ['hani'] = 'cjk ideographic',
+ ['hano'] = 'hanunoo',
+ ['hebr'] = 'hebrew',
+ ['ital'] = 'old italic',
+ ['jamo'] = 'hangul jamo',
+ ['java'] = 'javanese',
+ ['kana'] = 'hiragana and katakana',
+ ['khar'] = 'kharosthi',
+ ['khmr'] = 'khmer',
+ ['knda'] = 'kannada',
+ ['lao' ] = 'lao',
+ ['latn'] = 'latin',
+ ['limb'] = 'limbu',
+ ['linb'] = 'linear b',
+ ['math'] = 'mathematical alphanumeric symbols',
+ ['mlym'] = 'malayalam',
+ ['mong'] = 'mongolian',
+ ['musc'] = 'musical symbols',
+ ['mymr'] = 'myanmar',
+ ['nko' ] = "n'ko",
+ ['ogam'] = 'ogham',
+ ['orya'] = 'oriya',
+ ['osma'] = 'osmanya',
+ ['phag'] = 'phags-pa',
+ ['phnx'] = 'phoenician',
+ ['runr'] = 'runic',
+ ['shaw'] = 'shavian',
+ ['sinh'] = 'sinhala',
+ ['sylo'] = 'syloti nagri',
+ ['syrc'] = 'syriac',
+ ['tagb'] = 'tagbanwa',
+ ['tale'] = 'tai le',
+ ['talu'] = 'tai lu',
+ ['taml'] = 'tamil',
+ ['telu'] = 'telugu',
+ ['tfng'] = 'tifinagh',
+ ['tglg'] = 'tagalog',
+ ['thaa'] = 'thaana',
+ ['thai'] = 'thai',
+ ['tibt'] = 'tibetan',
+ ['ugar'] = 'ugaritic cuneiform',
+ ['xpeo'] = 'old persian cuneiform',
+ ['xsux'] = 'sumero-akkadian cuneiform',
+ ['yi' ] = 'yi',
+}
+
+local languages = {
+ ['aba'] = 'abaza',
+ ['abk'] = 'abkhazian',
+ ['ady'] = 'adyghe',
+ ['afk'] = 'afrikaans',
+ ['afr'] = 'afar',
+ ['agw'] = 'agaw',
+ ['als'] = 'alsatian',
+ ['alt'] = 'altai',
+ ['amh'] = 'amharic',
+ ['ara'] = 'arabic',
+ ['ari'] = 'aari',
+ ['ark'] = 'arakanese',
+ ['asm'] = 'assamese',
+ ['ath'] = 'athapaskan',
+ ['avr'] = 'avar',
+ ['awa'] = 'awadhi',
+ ['aym'] = 'aymara',
+ ['aze'] = 'azeri',
+ ['bad'] = 'badaga',
+ ['bag'] = 'baghelkhandi',
+ ['bal'] = 'balkar',
+ ['bau'] = 'baule',
+ ['bbr'] = 'berber',
+ ['bch'] = 'bench',
+ ['bcr'] = 'bible cree',
+ ['bel'] = 'belarussian',
+ ['bem'] = 'bemba',
+ ['ben'] = 'bengali',
+ ['bgr'] = 'bulgarian',
+ ['bhi'] = 'bhili',
+ ['bho'] = 'bhojpuri',
+ ['bik'] = 'bikol',
+ ['bil'] = 'bilen',
+ ['bkf'] = 'blackfoot',
+ ['bli'] = 'balochi',
+ ['bln'] = 'balante',
+ ['blt'] = 'balti',
+ ['bmb'] = 'bambara',
+ ['bml'] = 'bamileke',
+ ['bos'] = 'bosnian',
+ ['bre'] = 'breton',
+ ['brh'] = 'brahui',
+ ['bri'] = 'braj bhasha',
+ ['brm'] = 'burmese',
+ ['bsh'] = 'bashkir',
+ ['bti'] = 'beti',
+ ['cat'] = 'catalan',
+ ['ceb'] = 'cebuano',
+ ['che'] = 'chechen',
+ ['chg'] = 'chaha gurage',
+ ['chh'] = 'chattisgarhi',
+ ['chi'] = 'chichewa',
+ ['chk'] = 'chukchi',
+ ['chp'] = 'chipewyan',
+ ['chr'] = 'cherokee',
+ ['chu'] = 'chuvash',
+ ['cmr'] = 'comorian',
+ ['cop'] = 'coptic',
+ ['cos'] = 'corsican',
+ ['cre'] = 'cree',
+ ['crr'] = 'carrier',
+ ['crt'] = 'crimean tatar',
+ ['csl'] = 'church slavonic',
+ ['csy'] = 'czech',
+ ['dan'] = 'danish',
+ ['dar'] = 'dargwa',
+ ['dcr'] = 'woods cree',
+ ['deu'] = 'german',
+ ['dgr'] = 'dogri',
+ ['div'] = 'divehi',
+ ['djr'] = 'djerma',
+ ['dng'] = 'dangme',
+ ['dnk'] = 'dinka',
+ ['dri'] = 'dari',
+ ['dun'] = 'dungan',
+ ['dzn'] = 'dzongkha',
+ ['ebi'] = 'ebira',
+ ['ecr'] = 'eastern cree',
+ ['edo'] = 'edo',
+ ['efi'] = 'efik',
+ ['ell'] = 'greek',
+ ['eng'] = 'english',
+ ['erz'] = 'erzya',
+ ['esp'] = 'spanish',
+ ['eti'] = 'estonian',
+ ['euq'] = 'basque',
+ ['evk'] = 'evenki',
+ ['evn'] = 'even',
+ ['ewe'] = 'ewe',
+ ['fan'] = 'french antillean',
+ ['far'] = 'farsi',
+ ['fin'] = 'finnish',
+ ['fji'] = 'fijian',
+ ['fle'] = 'flemish',
+ ['fne'] = 'forest nenets',
+ ['fon'] = 'fon',
+ ['fos'] = 'faroese',
+ ['fra'] = 'french',
+ ['fri'] = 'frisian',
+ ['frl'] = 'friulian',
+ ['fta'] = 'futa',
+ ['ful'] = 'fulani',
+ ['gad'] = 'ga',
+ ['gae'] = 'gaelic',
+ ['gag'] = 'gagauz',
+ ['gal'] = 'galician',
+ ['gar'] = 'garshuni',
+ ['gaw'] = 'garhwali',
+ ['gez'] = "ge'ez",
+ ['gil'] = 'gilyak',
+ ['gmz'] = 'gumuz',
+ ['gon'] = 'gondi',
+ ['grn'] = 'greenlandic',
+ ['gro'] = 'garo',
+ ['gua'] = 'guarani',
+ ['guj'] = 'gujarati',
+ ['hai'] = 'haitian',
+ ['hal'] = 'halam',
+ ['har'] = 'harauti',
+ ['hau'] = 'hausa',
+ ['haw'] = 'hawaiin',
+ ['hbn'] = 'hammer-banna',
+ ['hil'] = 'hiligaynon',
+ ['hin'] = 'hindi',
+ ['hma'] = 'high mari',
+ ['hnd'] = 'hindko',
+ ['ho'] = 'ho',
+ ['hri'] = 'harari',
+ ['hrv'] = 'croatian',
+ ['hun'] = 'hungarian',
+ ['hye'] = 'armenian',
+ ['ibo'] = 'igbo',
+ ['ijo'] = 'ijo',
+ ['ilo'] = 'ilokano',
+ ['ind'] = 'indonesian',
+ ['ing'] = 'ingush',
+ ['inu'] = 'inuktitut',
+ ['iri'] = 'irish',
+ ['irt'] = 'irish traditional',
+ ['isl'] = 'icelandic',
+ ['ism'] = 'inari sami',
+ ['ita'] = 'italian',
+ ['iwr'] = 'hebrew',
+ ['jan'] = 'japanese',
+ ['jav'] = 'javanese',
+ ['jii'] = 'yiddish',
+ ['jud'] = 'judezmo',
+ ['jul'] = 'jula',
+ ['kab'] = 'kabardian',
+ ['kac'] = 'kachchi',
+ ['kal'] = 'kalenjin',
+ ['kan'] = 'kannada',
+ ['kar'] = 'karachay',
+ ['kat'] = 'georgian',
+ ['kaz'] = 'kazakh',
+ ['keb'] = 'kebena',
+ ['kge'] = 'khutsuri georgian',
+ ['kha'] = 'khakass',
+ ['khk'] = 'khanty-kazim',
+ ['khm'] = 'khmer',
+ ['khs'] = 'khanty-shurishkar',
+ ['khv'] = 'khanty-vakhi',
+ ['khw'] = 'khowar',
+ ['kik'] = 'kikuyu',
+ ['kir'] = 'kirghiz',
+ ['kis'] = 'kisii',
+ ['kkn'] = 'kokni',
+ ['klm'] = 'kalmyk',
+ ['kmb'] = 'kamba',
+ ['kmn'] = 'kumaoni',
+ ['kmo'] = 'komo',
+ ['kms'] = 'komso',
+ ['knr'] = 'kanuri',
+ ['kod'] = 'kodagu',
+ ['koh'] = 'korean old hangul',
+ ['kok'] = 'konkani',
+ ['kon'] = 'kikongo',
+ ['kop'] = 'komi-permyak',
+ ['kor'] = 'korean',
+ ['koz'] = 'komi-zyrian',
+ ['kpl'] = 'kpelle',
+ ['kri'] = 'krio',
+ ['krk'] = 'karakalpak',
+ ['krl'] = 'karelian',
+ ['krm'] = 'karaim',
+ ['krn'] = 'karen',
+ ['krt'] = 'koorete',
+ ['ksh'] = 'kashmiri',
+ ['ksi'] = 'khasi',
+ ['ksm'] = 'kildin sami',
+ ['kui'] = 'kui',
+ ['kul'] = 'kulvi',
+ ['kum'] = 'kumyk',
+ ['kur'] = 'kurdish',
+ ['kuu'] = 'kurukh',
+ ['kuy'] = 'kuy',
+ ['kyk'] = 'koryak',
+ ['lad'] = 'ladin',
+ ['lah'] = 'lahuli',
+ ['lak'] = 'lak',
+ ['lam'] = 'lambani',
+ ['lao'] = 'lao',
+ ['lat'] = 'latin',
+ ['laz'] = 'laz',
+ ['lcr'] = 'l-cree',
+ ['ldk'] = 'ladakhi',
+ ['lez'] = 'lezgi',
+ ['lin'] = 'lingala',
+ ['lma'] = 'low mari',
+ ['lmb'] = 'limbu',
+ ['lmw'] = 'lomwe',
+ ['lsb'] = 'lower sorbian',
+ ['lsm'] = 'lule sami',
+ ['lth'] = 'lithuanian',
+ ['ltz'] = 'luxembourgish',
+ ['lub'] = 'luba',
+ ['lug'] = 'luganda',
+ ['luh'] = 'luhya',
+ ['luo'] = 'luo',
+ ['lvi'] = 'latvian',
+ ['maj'] = 'majang',
+ ['mak'] = 'makua',
+ ['mal'] = 'malayalam traditional',
+ ['man'] = 'mansi',
+ ['map'] = 'mapudungun',
+ ['mar'] = 'marathi',
+ ['maw'] = 'marwari',
+ ['mbn'] = 'mbundu',
+ ['mch'] = 'manchu',
+ ['mcr'] = 'moose cree',
+ ['mde'] = 'mende',
+ ['men'] = "me'en",
+ ['miz'] = 'mizo',
+ ['mkd'] = 'macedonian',
+ ['mle'] = 'male',
+ ['mlg'] = 'malagasy',
+ ['mln'] = 'malinke',
+ ['mlr'] = 'malayalam reformed',
+ ['mly'] = 'malay',
+ ['mnd'] = 'mandinka',
+ ['mng'] = 'mongolian',
+ ['mni'] = 'manipuri',
+ ['mnk'] = 'maninka',
+ ['mnx'] = 'manx gaelic',
+ ['moh'] = 'mohawk',
+ ['mok'] = 'moksha',
+ ['mol'] = 'moldavian',
+ ['mon'] = 'mon',
+ ['mor'] = 'moroccan',
+ ['mri'] = 'maori',
+ ['mth'] = 'maithili',
+ ['mts'] = 'maltese',
+ ['mun'] = 'mundari',
+ ['nag'] = 'naga-assamese',
+ ['nan'] = 'nanai',
+ ['nas'] = 'naskapi',
+ ['ncr'] = 'n-cree',
+ ['ndb'] = 'ndebele',
+ ['ndg'] = 'ndonga',
+ ['nep'] = 'nepali',
+ ['new'] = 'newari',
+ ['ngr'] = 'nagari',
+ ['nhc'] = 'norway house cree',
+ ['nis'] = 'nisi',
+ ['niu'] = 'niuean',
+ ['nkl'] = 'nkole',
+ ['nko'] = "n'ko",
+ ['nld'] = 'dutch',
+ ['nog'] = 'nogai',
+ ['nor'] = 'norwegian',
+ ['nsm'] = 'northern sami',
+ ['nta'] = 'northern tai',
+ ['nto'] = 'esperanto',
+ ['nyn'] = 'nynorsk',
+ ['oci'] = 'occitan',
+ ['ocr'] = 'oji-cree',
+ ['ojb'] = 'ojibway',
+ ['ori'] = 'oriya',
+ ['oro'] = 'oromo',
+ ['oss'] = 'ossetian',
+ ['paa'] = 'palestinian aramaic',
+ ['pal'] = 'pali',
+ ['pan'] = 'punjabi',
+ ['pap'] = 'palpa',
+ ['pas'] = 'pashto',
+ ['pgr'] = 'polytonic greek',
+ ['pil'] = 'pilipino',
+ ['plg'] = 'palaung',
+ ['plk'] = 'polish',
+ ['pro'] = 'provencal',
+ ['ptg'] = 'portuguese',
+ ['qin'] = 'chin',
+ ['raj'] = 'rajasthani',
+ ['rbu'] = 'russian buriat',
+ ['rcr'] = 'r-cree',
+ ['ria'] = 'riang',
+ ['rms'] = 'rhaeto-romanic',
+ ['rom'] = 'romanian',
+ ['roy'] = 'romany',
+ ['rsy'] = 'rusyn',
+ ['rua'] = 'ruanda',
+ ['rus'] = 'russian',
+ ['sad'] = 'sadri',
+ ['san'] = 'sanskrit',
+ ['sat'] = 'santali',
+ ['say'] = 'sayisi',
+ ['sek'] = 'sekota',
+ ['sel'] = 'selkup',
+ ['sgo'] = 'sango',
+ ['shn'] = 'shan',
+ ['sib'] = 'sibe',
+ ['sid'] = 'sidamo',
+ ['sig'] = 'silte gurage',
+ ['sks'] = 'skolt sami',
+ ['sky'] = 'slovak',
+ ['sla'] = 'slavey',
+ ['slv'] = 'slovenian',
+ ['sml'] = 'somali',
+ ['smo'] = 'samoan',
+ ['sna'] = 'sena',
+ ['snd'] = 'sindhi',
+ ['snh'] = 'sinhalese',
+ ['snk'] = 'soninke',
+ ['sog'] = 'sodo gurage',
+ ['sot'] = 'sotho',
+ ['sqi'] = 'albanian',
+ ['srb'] = 'serbian',
+ ['srk'] = 'saraiki',
+ ['srr'] = 'serer',
+ ['ssl'] = 'south slavey',
+ ['ssm'] = 'southern sami',
+ ['sur'] = 'suri',
+ ['sva'] = 'svan',
+ ['sve'] = 'swedish',
+ ['swa'] = 'swadaya aramaic',
+ ['swk'] = 'swahili',
+ ['swz'] = 'swazi',
+ ['sxt'] = 'sutu',
+ ['syr'] = 'syriac',
+ ['tab'] = 'tabasaran',
+ ['taj'] = 'tajiki',
+ ['tam'] = 'tamil',
+ ['tat'] = 'tatar',
+ ['tcr'] = 'th-cree',
+ ['tel'] = 'telugu',
+ ['tgn'] = 'tongan',
+ ['tgr'] = 'tigre',
+ ['tgy'] = 'tigrinya',
+ ['tha'] = 'thai',
+ ['tht'] = 'tahitian',
+ ['tib'] = 'tibetan',
+ ['tkm'] = 'turkmen',
+ ['tmn'] = 'temne',
+ ['tna'] = 'tswana',
+ ['tne'] = 'tundra nenets',
+ ['tng'] = 'tonga',
+ ['tod'] = 'todo',
+ ['trk'] = 'turkish',
+ ['tsg'] = 'tsonga',
+ ['tua'] = 'turoyo aramaic',
+ ['tul'] = 'tulu',
+ ['tuv'] = 'tuvin',
+ ['twi'] = 'twi',
+ ['udm'] = 'udmurt',
+ ['ukr'] = 'ukrainian',
+ ['urd'] = 'urdu',
+ ['usb'] = 'upper sorbian',
+ ['uyg'] = 'uyghur',
+ ['uzb'] = 'uzbek',
+ ['ven'] = 'venda',
+ ['vit'] = 'vietnamese',
+ ['wa' ] = 'wa',
+ ['wag'] = 'wagdi',
+ ['wcr'] = 'west-cree',
+ ['wel'] = 'welsh',
+ ['wlf'] = 'wolof',
+ ['xbd'] = 'tai lue',
+ ['xhs'] = 'xhosa',
+ ['yak'] = 'yakut',
+ ['yba'] = 'yoruba',
+ ['ycr'] = 'y-cree',
+ ['yic'] = 'yi classic',
+ ['yim'] = 'yi modern',
+ ['zhh'] = 'chinese hong kong',
+ ['zhp'] = 'chinese phonetic',
+ ['zhs'] = 'chinese simplified',
+ ['zht'] = 'chinese traditional',
+ ['znd'] = 'zande',
+ ['zul'] = 'zulu'
+}
+
+local features = {
+ ['aalt'] = 'access all alternates',
+ ['abvf'] = 'above-base forms',
+ ['abvm'] = 'above-base mark positioning',
+ ['abvs'] = 'above-base substitutions',
+ ['afrc'] = 'alternative fractions',
+ ['akhn'] = 'akhands',
+ ['blwf'] = 'below-base forms',
+ ['blwm'] = 'below-base mark positioning',
+ ['blws'] = 'below-base substitutions',
+ ['c2pc'] = 'petite capitals from capitals',
+ ['c2sc'] = 'small capitals from capitals',
+ ['calt'] = 'contextual alternates',
+ ['case'] = 'case-sensitive forms',
+ ['ccmp'] = 'glyph composition/decomposition',
+ ['cjct'] = 'conjunct forms',
+ ['clig'] = 'contextual ligatures',
+ ['cpsp'] = 'capital spacing',
+ ['cswh'] = 'contextual swash',
+ ['curs'] = 'cursive positioning',
+ ['dflt'] = 'default processing',
+ ['dist'] = 'distances',
+ ['dlig'] = 'discretionary ligatures',
+ ['dnom'] = 'denominators',
+ ['dtls'] = 'dotless forms', -- math
+ ['expt'] = 'expert forms',
+ ['falt'] = 'final glyph alternates',
+ ['fin2'] = 'terminal forms #2',
+ ['fin3'] = 'terminal forms #3',
+ ['fina'] = 'terminal forms',
+ ['flac'] = 'flattened accents over capitals', -- math
+ ['frac'] = 'fractions',
+ ['fwid'] = 'full width',
+ ['half'] = 'half forms',
+ ['haln'] = 'halant forms',
+ ['halt'] = 'alternate half width',
+ ['hist'] = 'historical forms',
+ ['hkna'] = 'horizontal kana alternates',
+ ['hlig'] = 'historical ligatures',
+ ['hngl'] = 'hangul',
+ ['hojo'] = 'hojo kanji forms',
+ ['hwid'] = 'half width',
+ ['init'] = 'initial forms',
+ ['isol'] = 'isolated forms',
+ ['ital'] = 'italics',
+ ['jalt'] = 'justification alternatives',
+ ['jp04'] = 'jis2004 forms',
+ ['jp78'] = 'jis78 forms',
+ ['jp83'] = 'jis83 forms',
+ ['jp90'] = 'jis90 forms',
+ ['kern'] = 'kerning',
+ ['lfbd'] = 'left bounds',
+ ['liga'] = 'standard ligatures',
+ ['ljmo'] = 'leading jamo forms',
+ ['lnum'] = 'lining figures',
+ ['locl'] = 'localized forms',
+ ['mark'] = 'mark positioning',
+ ['med2'] = 'medial forms #2',
+ ['medi'] = 'medial forms',
+ ['mgrk'] = 'mathematical greek',
+ ['mkmk'] = 'mark to mark positioning',
+ ['mset'] = 'mark positioning via substitution',
+ ['nalt'] = 'alternate annotation forms',
+ ['nlck'] = 'nlc kanji forms',
+ ['nukt'] = 'nukta forms',
+ ['numr'] = 'numerators',
+ ['onum'] = 'old style figures',
+ ['opbd'] = 'optical bounds',
+ ['ordn'] = 'ordinals',
+ ['ornm'] = 'ornaments',
+ ['palt'] = 'proportional alternate width',
+ ['pcap'] = 'petite capitals',
+ ['pnum'] = 'proportional figures',
+ ['pref'] = 'pre-base forms',
+ ['pres'] = 'pre-base substitutions',
+ ['pstf'] = 'post-base forms',
+ ['psts'] = 'post-base substitutions',
+ ['pwid'] = 'proportional widths',
+ ['qwid'] = 'quarter widths',
+ ['rand'] = 'randomize',
+ ['rkrf'] = 'rakar forms',
+ ['rlig'] = 'required ligatures',
+ ['rphf'] = 'reph form',
+ ['rtbd'] = 'right bounds',
+ ['rtla'] = 'right-to-left alternates',
+ ['rtlm'] = 'right to left math', -- math
+ ['ruby'] = 'ruby notation forms',
+ ['salt'] = 'stylistic alternates',
+ ['sinf'] = 'scientific inferiors',
+ ['size'] = 'optical size',
+ ['smcp'] = 'small capitals',
+ ['smpl'] = 'simplified forms',
+ -- ['ss01'] = 'stylistic set 1',
+ -- ['ss02'] = 'stylistic set 2',
+ -- ['ss03'] = 'stylistic set 3',
+ -- ['ss04'] = 'stylistic set 4',
+ -- ['ss05'] = 'stylistic set 5',
+ -- ['ss06'] = 'stylistic set 6',
+ -- ['ss07'] = 'stylistic set 7',
+ -- ['ss08'] = 'stylistic set 8',
+ -- ['ss09'] = 'stylistic set 9',
+ -- ['ss10'] = 'stylistic set 10',
+ -- ['ss11'] = 'stylistic set 11',
+ -- ['ss12'] = 'stylistic set 12',
+ -- ['ss13'] = 'stylistic set 13',
+ -- ['ss14'] = 'stylistic set 14',
+ -- ['ss15'] = 'stylistic set 15',
+ -- ['ss16'] = 'stylistic set 16',
+ -- ['ss17'] = 'stylistic set 17',
+ -- ['ss18'] = 'stylistic set 18',
+ -- ['ss19'] = 'stylistic set 19',
+ -- ['ss20'] = 'stylistic set 20',
+ ['ssty'] = 'script style', -- math
+ ['subs'] = 'subscript',
+ ['sups'] = 'superscript',
+ ['swsh'] = 'swash',
+ ['titl'] = 'titling',
+ ['tjmo'] = 'trailing jamo forms',
+ ['tnam'] = 'traditional name forms',
+ ['tnum'] = 'tabular figures',
+ ['trad'] = 'traditional forms',
+ ['twid'] = 'third widths',
+ ['unic'] = 'unicase',
+ ['valt'] = 'alternate vertical metrics',
+ ['vatu'] = 'vattu variants',
+ ['vert'] = 'vertical writing',
+ ['vhal'] = 'alternate vertical half metrics',
+ ['vjmo'] = 'vowel jamo forms',
+ ['vkna'] = 'vertical kana alternates',
+ ['vkrn'] = 'vertical kerning',
+ ['vpal'] = 'proportional alternate vertical metrics',
+ ['vrt2'] = 'vertical rotation',
+ ['zero'] = 'slashed zero',
+
+ ['trep'] = 'traditional tex replacements',
+ ['tlig'] = 'traditional tex ligatures',
+
+ ['ss..'] = 'stylistic set ..',
+ ['cv..'] = 'character variant ..',
+ ['js..'] = 'justification ..',
+
+ ["dv.."] = "devanagari ..",
+}
+
+local baselines = {
+ ['hang'] = 'hanging baseline',
+ ['icfb'] = 'ideographic character face bottom edge baseline',
+ ['icft'] = 'ideographic character face tope edige baseline',
+ ['ideo'] = 'ideographic em-box bottom edge baseline',
+ ['idtp'] = 'ideographic em-box top edge baseline',
+ ['math'] = 'mathmatical centered baseline',
+ ['romn'] = 'roman baseline'
+}
+
+local swapped = function (h)
+ local r = { }
+ for k, v in next, h do
+ r[stringgsub(v,"[^a-z0-9]","")] = k -- is already lower
+ end
+ return r
+end
+
+local verbosescripts = swapped(scripts )
+local verboselanguages = swapped(languages)
+local verbosefeatures = swapped(features )
+local verbosebaselines = swapped(baselines)
+
+---[[ end excerpt from font-ott.lua ]]
+
+--[[doc--
+
+ As discussed, we will issue a warning because of incomplete support
+ when one of the scripts below is requested.
+
+ Reference: https://github.com/lualatex/luaotfload/issues/31
+
+--doc]]--
+
+local support_incomplete = tabletohash({
+ "deva", "beng", "guru", "gujr",
+ "orya", "taml", "telu", "knda",
+ "mlym", "sinh",
+}, true)
+
+--[[doc--
+
+ Which features are active by default depends on the script
+ requested.
+
+--doc]]--
+
+--- (string, string) dict -> (string, string) dict
+local set_default_features = function (speclist)
+ local default_features = luaotfload.features
+
+ speclist = speclist or { }
+ speclist[""] = nil --- invalid options stub
+
+ --- handle language tag
+ local language = speclist.language
+ if language then --- already lowercase at this point
+ language = stringgsub(language, "[^a-z0-9]", "")
+ language = rawget(verboselanguages, language) -- srsly, rawget?
+ or (languages[language] and language)
+ or "dflt"
+ else
+ language = "dflt"
+ end
+ speclist.language = language
+
+ --- handle script tag
+ local script = speclist.script
+ if script then
+ script = stringgsub(script, "[^a-z0-9]","")
+ script = rawget(verbosescripts, script)
+ or (scripts[script] and script)
+ or "dflt"
+ if support_incomplete[script] then
+ report("log", 0, "load",
+ "Support for the requested script: "
+ .. "%q may be incomplete.", script)
+ end
+ else
+ script = "dflt"
+ end
+ speclist.script = script
+
+ report("log", 1, "load",
+ "Auto-selecting default features for script: %s.",
+ script)
+
+ local requested = default_features.defaults[script]
+ if not requested then
+ report("log", 1, "load",
+ "No default features for script %q, falling back to \"dflt\".",
+ script)
+ requested = default_features.defaults.dflt
+ end
+
+ for feat, state in next, requested do
+ if not speclist[feat] then speclist[feat] = state end
+ end
+
+ for feat, state in next, default_features.global do
+ --- This is primarily intended for setting node
+ --- mode unless “base” is requested, as stated
+ --- in the manual.
+ if not speclist[feat] then speclist[feat] = state end
+ end
+ return speclist
+end
+
+local import_values = {
+ --- That’s what the 1.x parser did, not quite as graciously,
+ --- with an array of branch expressions.
+ -- "style", "optsize",--> from slashed notation; handled otherwise
+ { "lookup", false },
+ { "sub", false },
+ { "mode", true },
+}
+
+local lookup_types = { "anon", "file", "kpse", "my", "name", "path" }
+
+local select_lookup = function (request)
+ for i=1, #lookup_types do
+ local lookup = lookup_types[i]
+ local value = request[lookup]
+ if value then
+ return lookup, value
+ end
+ end
+end
+
+local supported = {
+ b = "b",
+ i = "i",
+ bi = "bi",
+ aat = false,
+ icu = false,
+ gr = false,
+}
+
+--- (string | (string * string) | bool) list -> (string * number)
+local handle_slashed = function (modifiers)
+ local style, optsize
+ for i=1, #modifiers do
+ local mod = modifiers[i]
+ if type(mod) == "table" and mod[1] == "optsize" then --> optical size
+ optsize = tonumber(mod[2])
+ elseif mod == false then
+ --- ignore
+ report("log", 0,
+ "load", "unsupported font option: %s", v)
+ elseif supported[mod] then
+ style = supported[mod]
+ elseif not stringis_empty(mod) then
+ style = stringgsub(mod, "[^%a%d]", "")
+ end
+ end
+ return style, optsize
+end
+
+local extract_subfont
+do
+ local eof = P(-1)
+ local digit = R"09"
+ --- Theoretically a valid subfont address can be up to ten
+ --- digits long.
+ local sub_expr = P"(" * C(digit^1) * P")" * eof
+ local full_path = C(P(1 - sub_expr)^1)
+ extract_subfont = full_path * sub_expr
+end
+
+--- spec -> spec
+local handle_request = function (specification)
+ local request = lpegmatch(luaotfload.parsers.font_request,
+ specification.specification)
+ if not request then
+ --- happens when called with an absolute path
+ --- in an anonymous lookup;
+ --- we try to behave as friendly as possible
+ --- just go with it ...
+ report("log", 1, "load", "invalid request %q of type anon",
+ specification.specification)
+ report("log", 1, "load",
+ "use square bracket syntax or consult the documentation.")
+ --- The result of \fontname must be re-feedable into \font
+ --- which is expected by the Latex font mechanism. Now this
+ --- is complicated with TTC fonts that need to pass the
+ --- number of the requested subfont along with the file name.
+ --- Thus we test whether the request is a bare path only or
+ --- ends in a subfont expression (decimal digits inside
+ --- parentheses).
+ --- https://github.com/lualatex/luaotfload/issues/57
+ local fullpath, sub = lpegmatch(extract_subfont,
+ specification.specification)
+ if fullpath and sub then
+ specification.sub = tonumber(sub)
+ specification.name = fullpath
+ else
+ specification.name = specification.specification
+ end
+ specification.lookup = "path"
+ return specification
+ end
+ local lookup, name = select_lookup(request)
+ request.features = set_default_features(request.features)
+
+ if name then
+ specification.name = name
+ specification.lookup = lookup or specification.lookup
+ end
+
+ if request.modifiers then
+ local style, optsize = handle_slashed(request.modifiers)
+ specification.style, specification.optsize = style, optsize
+ end
+
+ for n=1, #import_values do
+ local feat = import_values[n][1]
+ local keep = import_values[n][2]
+ local newvalue = request.features[feat]
+ if newvalue then
+ specification[feat] = request.features[feat]
+ if not keep then
+ request.features[feat] = nil
+ end
+ end
+ end
+
+ --- The next line sets the “rand” feature to “random”; I haven’t
+ --- investigated it any further (luatex-fonts-ext), so it will
+ --- just stay here.
+ specification.features.normal = normalize (request.features)
+ return specification
+end
+
+if as_script == true then --- skip the remainder of the file
+ fonts.names.handle_request = handle_request
+ report ("log", 5, "load",
+ "Exiting early from luaotfload-features.lua.")
+ return
+else
+ local registersplit = definers.registersplit
+ registersplit (":", handle_request, "cryptic")
+ registersplit ("", handle_request, "more cryptic") -- catches \font\text=[names]
+end
+
+---[[ end included font-ltx.lua ]]
+
+--[[doc--
+This uses the code from luatex-fonts-merged (<- font-otc.lua) instead
+of the removed luaotfload-font-otc.lua.
+
+TODO find out how far we get setting features without these lines,
+relying on luatex-fonts only (it *does* handle features somehow, after
+all).
+--doc]]--
+
+-- we assume that the other otf stuff is loaded already
+
+---[[ begin snippet from font-otc.lua ]]
+local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
+local report_otf = logs.reporter("fonts","otf loading")
+
+local otf = fonts.handlers.otf
+local registerotffeature = otf.features.register
+
+--[[HH--
+
+ In the userdata interface we can not longer tweak the loaded font as
+ conveniently as before. For instance, instead of pushing extra data in
+ in the table using the original structure, we now have to operate on
+ the mkiv representation. And as the fontloader interface is modelled
+ after fontforge we cannot change that one too much either.
+
+--HH]]--
+
+local types = {
+ substitution = "gsub_single",
+ ligature = "gsub_ligature",
+ alternate = "gsub_alternate",
+}
+
+setmetatableindex(types, function(t,k) t[k] = k return k end) -- "key"
+
+local everywhere = { ["*"] = { ["*"] = true } } -- or: { ["*"] = { "*" } }
+local noflags = { }
+
+local function addfeature(data,feature,specifications)
+ local descriptions = data.descriptions
+ local resources = data.resources
+ local lookups = resources.lookups
+ local gsubfeatures = resources.features.gsub
+ if gsubfeatures and gsubfeatures[feature] then
+ -- already present
+ else
+ local sequences = resources.sequences
+ local fontfeatures = resources.features
+ local unicodes = resources.unicodes
+ local lookuptypes = resources.lookuptypes
+ local splitter = lpeg.splitter(" ",unicodes)
+ local done = 0
+ local skip = 0
+ if not specifications[1] then
+ -- so we accept a one entry specification
+ specifications = { specifications }
+ end
+ -- subtables are tables themselves but we also accept flattened singular subtables
+ for s=1,#specifications do
+ local specification = specifications[s]
+ local valid = specification.valid
+ if not valid or valid(data,specification,feature) then
+ local initialize = specification.initialize
+ if initialize then
+ -- when false is returned we initialize only once
+ specification.initialize = initialize(specification) and initialize or nil
+ end
+ local askedfeatures = specification.features or everywhere
+ local subtables = specification.subtables or { specification.data } or { }
+ local featuretype = types[specification.type or "substitution"]
+ local featureflags = specification.flags or noflags
+ local featureorder = specification.order or { feature }
+ local added = false
+ local featurename = stringformat("ctx_%s_%s",feature,s)
+ local st = { }
+ for t=1,#subtables do
+ local list = subtables[t]
+ local full = stringformat("%s_%s",featurename,t)
+ st[t] = full
+ if featuretype == "gsub_ligature" then
+ lookuptypes[full] = "ligature"
+ for code, ligature in next, list do
+ local unicode = tonumber(code) or unicodes[code]
+ local description = descriptions[unicode]
+ if description then
+ local slookups = description.slookups
+ if type(ligature) == "string" then
+ ligature = { lpegmatch(splitter,ligature) }
+ end
+ local present = true
+ for i=1,#ligature do
+ if not descriptions[ligature[i]] then
+ present = false
+ break
+ end
+ end
+ if present then
+ if slookups then
+ slookups[full] = ligature
+ else
+ description.slookups = { [full] = ligature }
+ end
+ done, added = done + 1, true
+ else
+ skip = skip + 1
+ end
+ end
+ end
+ elseif featuretype == "gsub_single" then
+ lookuptypes[full] = "substitution"
+ for code, replacement in next, list do
+ local unicode = tonumber(code) or unicodes[code]
+ local description = descriptions[unicode]
+ if description then
+ local slookups = description.slookups
+ replacement = tonumber(replacement) or unicodes[replacement]
+ if descriptions[replacement] then
+ if slookups then
+ slookups[full] = replacement
+ else
+ description.slookups = { [full] = replacement }
+ end
+ done, added = done + 1, true
+ end
+ end
+ end
+ end
+ end
+ if added then
+ -- script = { lang1, lang2, lang3 } or script = { lang1 = true, ... }
+ for k, v in next, askedfeatures do
+ if v[1] then
+ askedfeatures[k] = tabletohash(v)
+ end
+ end
+ local sequence = {
+ chain = 0,
+ features = { [feature] = askedfeatures },
+ flags = featureflags,
+ name = featurename,
+ order = featureorder,
+ subtables = st,
+ type = featuretype,
+ }
+ if specification.prepend then
+ insert(sequences,1,sequence)
+ else
+ insert(sequences,sequence)
+ end
+ -- register in metadata (merge as there can be a few)
+ if not gsubfeatures then
+ gsubfeatures = { }
+ fontfeatures.gsub = gsubfeatures
+ end
+ local k = gsubfeatures[feature]
+ if not k then
+ k = { }
+ gsubfeatures[feature] = k
+ end
+ for script, languages in next, askedfeatures do
+ local kk = k[script]
+ if not kk then
+ kk = { }
+ k[script] = kk
+ end
+ for language, value in next, languages do
+ kk[language] = value
+ end
+ end
+ end
+ end
+ end
+ if trace_loading then
+ report_otf("registering feature %a, affected glyphs %a, skipped glyphs %a",feature,done,skip)
+ end
+ end
+end
+
+
+otf.enhancers.addfeature = addfeature
+
+local extrafeatures = { }
+
+function otf.addfeature(name,specification)
+ extrafeatures[name] = specification
+end
+
+local function enhance(data,filename,raw)
+ for feature, specification in next, extrafeatures do
+ addfeature(data,feature,specification)
+ end
+end
+
+otf.enhancers.register("check extra features",enhance)
+
+---[[ end snippet from font-otc.lua ]]
+
+local tlig = {
+ {
+ type = "substitution",
+ features = everywhere,
+ data = {
+ [0x0022] = 0x201D, -- quotedblright
+ [0x0027] = 0x2019, -- quoteleft
+ [0x0060] = 0x2018, -- quoteright
+ },
+ flags = { },
+ order = { "tlig" },
+ prepend = true,
+ },
+ {
+ type = "ligature",
+ features = everywhere,
+ data = {
+ [0x2013] = {0x002D, 0x002D}, -- endash
+ [0x2014] = {0x002D, 0x002D, 0x002D}, -- emdash
+ [0x201C] = {0x2018, 0x2018}, -- quotedblleft
+ [0x201D] = {0x2019, 0x2019}, -- quotedblright
+ [0x00A1] = {0x0021, 0x2018}, -- exclamdown
+ [0x00BF] = {0x003F, 0x2018}, -- questiondown
+ --- next three originate in T1 encoding; Xetex applies
+ --- them too
+ [0x201E] = {0x002C, 0x002C}, -- quotedblbase
+ [0x00AB] = {0x003C, 0x003C}, -- LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
+ [0x00BB] = {0x003E, 0x003E}, -- RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
+ },
+ flags = { },
+ order = { "tlig" },
+ prepend = true,
+ },
+ {
+ type = "ligature",
+ features = everywhere,
+ data = {
+ [0x201C] = {0x0060, 0x0060}, -- quotedblleft
+ [0x201D] = {0x0027, 0x0027}, -- quotedblright
+ [0x00A1] = {0x0021, 0x0060}, -- exclamdown
+ [0x00BF] = {0x003F, 0x0060}, -- questiondown
+ },
+ flags = { },
+ order = { "tlig" },
+ prepend = true,
+ },
+}
+
+otf.addfeature ("tlig", tlig)
+otf.addfeature ("trep", { })
+
+local anum_arabic = { --- these are the same as in font-otc
+ [0x0030] = 0x0660,
+ [0x0031] = 0x0661,
+ [0x0032] = 0x0662,
+ [0x0033] = 0x0663,
+ [0x0034] = 0x0664,
+ [0x0035] = 0x0665,
+ [0x0036] = 0x0666,
+ [0x0037] = 0x0667,
+ [0x0038] = 0x0668,
+ [0x0039] = 0x0669,
+}
+
+local anum_persian = {--- these are the same as in font-otc
+ [0x0030] = 0x06F0,
+ [0x0031] = 0x06F1,
+ [0x0032] = 0x06F2,
+ [0x0033] = 0x06F3,
+ [0x0034] = 0x06F4,
+ [0x0035] = 0x06F5,
+ [0x0036] = 0x06F6,
+ [0x0037] = 0x06F7,
+ [0x0038] = 0x06F8,
+ [0x0039] = 0x06F9,
+}
+
+local function valid(data)
+ local features = data.resources.features
+ if features then
+ for k, v in next, features do
+ for k, v in next, v do
+ if v.arab then
+ return true
+ end
+ end
+ end
+ end
+end
+
+local anum_specification = {
+ {
+ type = "substitution",
+ features = { arab = { far = true, urd = true, snd = true } },
+ data = anum_persian,
+ flags = { },
+ order = { "anum" },
+ valid = valid,
+ },
+ {
+ type = "substitution",
+ features = { arab = { ["*"] = true } },
+ data = anum_arabic,
+ flags = { },
+ order = { "anum" },
+ valid = valid,
+ },
+}
+
+otf.addfeature ("anum", anum_specification)
+
+registerotffeature {
+ name = "anum",
+ description = "arabic digits",
+}
+
+-- vim:tw=71:sw=4:ts=4:expandtab
diff --git a/src/luaotfload-fontloader.lua b/src/luaotfload-fontloader.lua
new file mode 100644
index 0000000..2f26be7
--- /dev/null
+++ b/src/luaotfload-fontloader.lua
@@ -0,0 +1,13910 @@
+-- merged file : luatex-fonts-merged.lua
+-- parent file : luatex-fonts.lua
+-- merge date : 07/06/14 22:50:12
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['l-lua']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local major,minor=string.match(_VERSION,"^[^%d]+(%d+)%.(%d+).*$")
+_MAJORVERSION=tonumber(major) or 5
+_MINORVERSION=tonumber(minor) or 1
+_LUAVERSION=_MAJORVERSION+_MINORVERSION/10
+if not lpeg then
+ lpeg=require("lpeg")
+end
+if loadstring then
+ local loadnormal=load
+ function load(first,...)
+ if type(first)=="string" then
+ return loadstring(first,...)
+ else
+ return loadnormal(first,...)
+ end
+ end
+else
+ loadstring=load
+end
+if not ipairs then
+ local function iterate(a,i)
+ i=i+1
+ local v=a[i]
+ if v~=nil then
+ return i,v
+ end
+ end
+ function ipairs(a)
+ return iterate,a,0
+ end
+end
+if not pairs then
+ function pairs(t)
+ return next,t
+ end
+end
+if not table.unpack then
+ table.unpack=_G.unpack
+elseif not unpack then
+ _G.unpack=table.unpack
+end
+if not package.loaders then
+ package.loaders=package.searchers
+end
+local print,select,tostring=print,select,tostring
+local inspectors={}
+function setinspector(inspector)
+ inspectors[#inspectors+1]=inspector
+end
+function inspect(...)
+ for s=1,select("#",...) do
+ local value=select(s,...)
+ local done=false
+ for i=1,#inspectors do
+ done=inspectors[i](value)
+ if done then
+ break
+ end
+ end
+ if not done then
+ print(tostring(value))
+ end
+ end
+end
+local dummy=function() end
+function optionalrequire(...)
+ local ok,result=xpcall(require,dummy,...)
+ if ok then
+ return result
+ end
+end
+if lua then
+ lua.mask=load([[τεχ = 1]]) and "utf" or "ascii"
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['l-lpeg']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+lpeg=require("lpeg")
+if not lpeg.print then function lpeg.print(...) print(lpeg.pcode(...)) end end
+local type,next,tostring=type,next,tostring
+local byte,char,gmatch,format=string.byte,string.char,string.gmatch,string.format
+local floor=math.floor
+local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt
+local lpegtype,lpegmatch,lpegprint=lpeg.type,lpeg.match,lpeg.print
+if setinspector then
+ setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
+end
+lpeg.patterns=lpeg.patterns or {}
+local patterns=lpeg.patterns
+local anything=P(1)
+local endofstring=P(-1)
+local alwaysmatched=P(true)
+patterns.anything=anything
+patterns.endofstring=endofstring
+patterns.beginofstring=alwaysmatched
+patterns.alwaysmatched=alwaysmatched
+local sign=S('+-')
+local zero=P('0')
+local digit=R('09')
+local octdigit=R("07")
+local lowercase=R("az")
+local uppercase=R("AZ")
+local underscore=P("_")
+local hexdigit=digit+lowercase+uppercase
+local cr,lf,crlf=P("\r"),P("\n"),P("\r\n")
+local newline=P("\r")*(P("\n")+P(true))+P("\n")
+local escaped=P("\\")*anything
+local squote=P("'")
+local dquote=P('"')
+local space=P(" ")
+local period=P(".")
+local comma=P(",")
+local utfbom_32_be=P('\000\000\254\255')
+local utfbom_32_le=P('\255\254\000\000')
+local utfbom_16_be=P('\254\255')
+local utfbom_16_le=P('\255\254')
+local utfbom_8=P('\239\187\191')
+local utfbom=utfbom_32_be+utfbom_32_le+utfbom_16_be+utfbom_16_le+utfbom_8
+local utftype=utfbom_32_be*Cc("utf-32-be")+utfbom_32_le*Cc("utf-32-le")+utfbom_16_be*Cc("utf-16-be")+utfbom_16_le*Cc("utf-16-le")+utfbom_8*Cc("utf-8")+alwaysmatched*Cc("utf-8")
+local utfstricttype=utfbom_32_be*Cc("utf-32-be")+utfbom_32_le*Cc("utf-32-le")+utfbom_16_be*Cc("utf-16-be")+utfbom_16_le*Cc("utf-16-le")+utfbom_8*Cc("utf-8")
+local utfoffset=utfbom_32_be*Cc(4)+utfbom_32_le*Cc(4)+utfbom_16_be*Cc(2)+utfbom_16_le*Cc(2)+utfbom_8*Cc(3)+Cc(0)
+local utf8next=R("\128\191")
+patterns.utfbom_32_be=utfbom_32_be
+patterns.utfbom_32_le=utfbom_32_le
+patterns.utfbom_16_be=utfbom_16_be
+patterns.utfbom_16_le=utfbom_16_le
+patterns.utfbom_8=utfbom_8
+patterns.utf_16_be_nl=P("\000\r\000\n")+P("\000\r")+P("\000\n")
+patterns.utf_16_le_nl=P("\r\000\n\000")+P("\r\000")+P("\n\000")
+patterns.utf8one=R("\000\127")
+patterns.utf8two=R("\194\223")*utf8next
+patterns.utf8three=R("\224\239")*utf8next*utf8next
+patterns.utf8four=R("\240\244")*utf8next*utf8next*utf8next
+patterns.utfbom=utfbom
+patterns.utftype=utftype
+patterns.utfstricttype=utfstricttype
+patterns.utfoffset=utfoffset
+local utf8char=patterns.utf8one+patterns.utf8two+patterns.utf8three+patterns.utf8four
+local validutf8char=utf8char^0*endofstring*Cc(true)+Cc(false)
+local utf8character=P(1)*R("\128\191")^0
+patterns.utf8=utf8char
+patterns.utf8char=utf8char
+patterns.utf8character=utf8character
+patterns.validutf8=validutf8char
+patterns.validutf8char=validutf8char
+local eol=S("\n\r")
+local spacer=S(" \t\f\v")
+local whitespace=eol+spacer
+local nonspacer=1-spacer
+local nonwhitespace=1-whitespace
+patterns.eol=eol
+patterns.spacer=spacer
+patterns.whitespace=whitespace
+patterns.nonspacer=nonspacer
+patterns.nonwhitespace=nonwhitespace
+local stripper=spacer^0*C((spacer^0*nonspacer^1)^0)
+local fullstripper=whitespace^0*C((whitespace^0*nonwhitespace^1)^0)
+local collapser=Cs(spacer^0/""*nonspacer^0*((spacer^0/" "*nonspacer^1)^0))
+patterns.stripper=stripper
+patterns.fullstripper=fullstripper
+patterns.collapser=collapser
+patterns.lowercase=lowercase
+patterns.uppercase=uppercase
+patterns.letter=patterns.lowercase+patterns.uppercase
+patterns.space=space
+patterns.tab=P("\t")
+patterns.spaceortab=patterns.space+patterns.tab
+patterns.newline=newline
+patterns.emptyline=newline^1
+patterns.equal=P("=")
+patterns.comma=comma
+patterns.commaspacer=comma*spacer^0
+patterns.period=period
+patterns.colon=P(":")
+patterns.semicolon=P(";")
+patterns.underscore=underscore
+patterns.escaped=escaped
+patterns.squote=squote
+patterns.dquote=dquote
+patterns.nosquote=(escaped+(1-squote))^0
+patterns.nodquote=(escaped+(1-dquote))^0
+patterns.unsingle=(squote/"")*patterns.nosquote*(squote/"")
+patterns.undouble=(dquote/"")*patterns.nodquote*(dquote/"")
+patterns.unquoted=patterns.undouble+patterns.unsingle
+patterns.unspacer=((patterns.spacer^1)/"")^0
+patterns.singlequoted=squote*patterns.nosquote*squote
+patterns.doublequoted=dquote*patterns.nodquote*dquote
+patterns.quoted=patterns.doublequoted+patterns.singlequoted
+patterns.digit=digit
+patterns.octdigit=octdigit
+patterns.hexdigit=hexdigit
+patterns.sign=sign
+patterns.cardinal=digit^1
+patterns.integer=sign^-1*digit^1
+patterns.unsigned=digit^0*period*digit^1
+patterns.float=sign^-1*patterns.unsigned
+patterns.cunsigned=digit^0*comma*digit^1
+patterns.cpunsigned=digit^0*(period+comma)*digit^1
+patterns.cfloat=sign^-1*patterns.cunsigned
+patterns.cpfloat=sign^-1*patterns.cpunsigned
+patterns.number=patterns.float+patterns.integer
+patterns.cnumber=patterns.cfloat+patterns.integer
+patterns.cpnumber=patterns.cpfloat+patterns.integer
+patterns.oct=zero*octdigit^1
+patterns.octal=patterns.oct
+patterns.HEX=zero*P("X")*(digit+uppercase)^1
+patterns.hex=zero*P("x")*(digit+lowercase)^1
+patterns.hexadecimal=zero*S("xX")*hexdigit^1
+patterns.hexafloat=sign^-1*zero*S("xX")*(hexdigit^0*period*hexdigit^1+hexdigit^1*period*hexdigit^0+hexdigit^1)*(S("pP")*sign^-1*hexdigit^1)^-1
+patterns.decafloat=sign^-1*(digit^0*period*digit^1+digit^1*period*digit^0+digit^1)*S("eE")*sign^-1*digit^1
+patterns.propername=(uppercase+lowercase+underscore)*(uppercase+lowercase+underscore+digit)^0*endofstring
+patterns.somecontent=(anything-newline-space)^1
+patterns.beginline=#(1-newline)
+patterns.longtostring=Cs(whitespace^0/""*((patterns.quoted+nonwhitespace^1+whitespace^1/""*(P(-1)+Cc(" ")))^0))
+local function anywhere(pattern)
+ return P { P(pattern)+1*V(1) }
+end
+lpeg.anywhere=anywhere
+function lpeg.instringchecker(p)
+ p=anywhere(p)
+ return function(str)
+ return lpegmatch(p,str) and true or false
+ end
+end
+function lpeg.splitter(pattern,action)
+ return (((1-P(pattern))^1)/action+1)^0
+end
+function lpeg.tsplitter(pattern,action)
+ return Ct((((1-P(pattern))^1)/action+1)^0)
+end
+local splitters_s,splitters_m,splitters_t={},{},{}
+local function splitat(separator,single)
+ local splitter=(single and splitters_s[separator]) or splitters_m[separator]
+ if not splitter then
+ separator=P(separator)
+ local other=C((1-separator)^0)
+ if single then
+ local any=anything
+ splitter=other*(separator*C(any^0)+"")
+ splitters_s[separator]=splitter
+ else
+ splitter=other*(separator*other)^0
+ splitters_m[separator]=splitter
+ end
+ end
+ return splitter
+end
+local function tsplitat(separator)
+ local splitter=splitters_t[separator]
+ if not splitter then
+ splitter=Ct(splitat(separator))
+ splitters_t[separator]=splitter
+ end
+ return splitter
+end
+lpeg.splitat=splitat
+lpeg.tsplitat=tsplitat
+function string.splitup(str,separator)
+ if not separator then
+ separator=","
+ end
+ return lpegmatch(splitters_m[separator] or splitat(separator),str)
+end
+local cache={}
+function lpeg.split(separator,str)
+ local c=cache[separator]
+ if not c then
+ c=tsplitat(separator)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
+end
+function string.split(str,separator)
+ if separator then
+ local c=cache[separator]
+ if not c then
+ c=tsplitat(separator)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
+ else
+ return { str }
+ end
+end
+local spacing=patterns.spacer^0*newline
+local empty=spacing*Cc("")
+local nonempty=Cs((1-spacing)^1)*spacing^-1
+local content=(empty+nonempty)^1
+patterns.textline=content
+local linesplitter=tsplitat(newline)
+patterns.linesplitter=linesplitter
+function string.splitlines(str)
+ return lpegmatch(linesplitter,str)
+end
+local cache={}
+function lpeg.checkedsplit(separator,str)
+ local c=cache[separator]
+ if not c then
+ separator=P(separator)
+ local other=C((1-separator)^1)
+ c=Ct(separator^0*other*(separator^1*other)^0)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
+end
+function string.checkedsplit(str,separator)
+ local c=cache[separator]
+ if not c then
+ separator=P(separator)
+ local other=C((1-separator)^1)
+ c=Ct(separator^0*other*(separator^1*other)^0)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
+end
+local function f2(s) local c1,c2=byte(s,1,2) return c1*64+c2-12416 end
+local function f3(s) local c1,c2,c3=byte(s,1,3) return (c1*64+c2)*64+c3-925824 end
+local function f4(s) local c1,c2,c3,c4=byte(s,1,4) return ((c1*64+c2)*64+c3)*64+c4-63447168 end
+local utf8byte=patterns.utf8one/byte+patterns.utf8two/f2+patterns.utf8three/f3+patterns.utf8four/f4
+patterns.utf8byte=utf8byte
+local cache={}
+function lpeg.stripper(str)
+ if type(str)=="string" then
+ local s=cache[str]
+ if not s then
+ s=Cs(((S(str)^1)/""+1)^0)
+ cache[str]=s
+ end
+ return s
+ else
+ return Cs(((str^1)/""+1)^0)
+ end
+end
+local cache={}
+function lpeg.keeper(str)
+ if type(str)=="string" then
+ local s=cache[str]
+ if not s then
+ s=Cs((((1-S(str))^1)/""+1)^0)
+ cache[str]=s
+ end
+ return s
+ else
+ return Cs((((1-str)^1)/""+1)^0)
+ end
+end
+function lpeg.frontstripper(str)
+ return (P(str)+P(true))*Cs(anything^0)
+end
+function lpeg.endstripper(str)
+ return Cs((1-P(str)*endofstring)^0)
+end
+function lpeg.replacer(one,two,makefunction,isutf)
+ local pattern
+ local u=isutf and utf8char or 1
+ if type(one)=="table" then
+ local no=#one
+ local p=P(false)
+ if no==0 then
+ for k,v in next,one do
+ p=p+P(k)/v
+ end
+ pattern=Cs((p+u)^0)
+ elseif no==1 then
+ local o=one[1]
+ one,two=P(o[1]),o[2]
+ pattern=Cs((one/two+u)^0)
+ else
+ for i=1,no do
+ local o=one[i]
+ p=p+P(o[1])/o[2]
+ end
+ pattern=Cs((p+u)^0)
+ end
+ else
+ pattern=Cs((P(one)/(two or "")+u)^0)
+ end
+ if makefunction then
+ return function(str)
+ return lpegmatch(pattern,str)
+ end
+ else
+ return pattern
+ end
+end
+function lpeg.finder(lst,makefunction,isutf)
+ local pattern
+ if type(lst)=="table" then
+ pattern=P(false)
+ if #lst==0 then
+ for k,v in next,lst do
+ pattern=pattern+P(k)
+ end
+ else
+ for i=1,#lst do
+ pattern=pattern+P(lst[i])
+ end
+ end
+ else
+ pattern=P(lst)
+ end
+ if isutf then
+ pattern=((utf8char or 1)-pattern)^0*pattern
+ else
+ pattern=(1-pattern)^0*pattern
+ end
+ if makefunction then
+ return function(str)
+ return lpegmatch(pattern,str)
+ end
+ else
+ return pattern
+ end
+end
+local splitters_f,splitters_s={},{}
+function lpeg.firstofsplit(separator)
+ local splitter=splitters_f[separator]
+ if not splitter then
+ local pattern=P(separator)
+ splitter=C((1-pattern)^0)
+ splitters_f[separator]=splitter
+ end
+ return splitter
+end
+function lpeg.secondofsplit(separator)
+ local splitter=splitters_s[separator]
+ if not splitter then
+ local pattern=P(separator)
+ splitter=(1-pattern)^0*pattern*C(anything^0)
+ splitters_s[separator]=splitter
+ end
+ return splitter
+end
+local splitters_s,splitters_p={},{}
+function lpeg.beforesuffix(separator)
+ local splitter=splitters_s[separator]
+ if not splitter then
+ local pattern=P(separator)
+ splitter=C((1-pattern)^0)*pattern*endofstring
+ splitters_s[separator]=splitter
+ end
+ return splitter
+end
+function lpeg.afterprefix(separator)
+ local splitter=splitters_p[separator]
+ if not splitter then
+ local pattern=P(separator)
+ splitter=pattern*C(anything^0)
+ splitters_p[separator]=splitter
+ end
+ return splitter
+end
+function lpeg.balancer(left,right)
+ left,right=P(left),P(right)
+ return P { left*((1-left-right)+V(1))^0*right }
+end
+local nany=utf8char/""
+function lpeg.counter(pattern)
+ pattern=Cs((P(pattern)/" "+nany)^0)
+ return function(str)
+ return #lpegmatch(pattern,str)
+ end
+end
+utf=utf or (unicode and unicode.utf8) or {}
+local utfcharacters=utf and utf.characters or string.utfcharacters
+local utfgmatch=utf and utf.gmatch
+local utfchar=utf and utf.char
+lpeg.UP=lpeg.P
+if utfcharacters then
+ function lpeg.US(str)
+ local p=P(false)
+ for uc in utfcharacters(str) do
+ p=p+P(uc)
+ end
+ return p
+ end
+elseif utfgmatch then
+ function lpeg.US(str)
+ local p=P(false)
+ for uc in utfgmatch(str,".") do
+ p=p+P(uc)
+ end
+ return p
+ end
+else
+ function lpeg.US(str)
+ local p=P(false)
+ local f=function(uc)
+ p=p+P(uc)
+ end
+ lpegmatch((utf8char/f)^0,str)
+ return p
+ end
+end
+local range=utf8byte*utf8byte+Cc(false)
+function lpeg.UR(str,more)
+ local first,last
+ if type(str)=="number" then
+ first=str
+ last=more or first
+ else
+ first,last=lpegmatch(range,str)
+ if not last then
+ return P(str)
+ end
+ end
+ if first==last then
+ return P(str)
+ elseif utfchar and (last-first<8) then
+ local p=P(false)
+ for i=first,last do
+ p=p+P(utfchar(i))
+ end
+ return p
+ else
+ local f=function(b)
+ return b>=first and b<=last
+ end
+ return utf8byte/f
+ end
+end
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p)=="pattern"
+end
+function lpeg.oneof(list,...)
+ if type(list)~="table" then
+ list={ list,... }
+ end
+ local p=P(list[1])
+ for l=2,#list do
+ p=p+P(list[l])
+ end
+ return p
+end
+local sort=table.sort
+local function copyindexed(old)
+ local new={}
+ for i=1,#old do
+ new[i]=old
+ end
+ return new
+end
+local function sortedkeys(tab)
+ local keys,s={},0
+ for key,_ in next,tab do
+ s=s+1
+ keys[s]=key
+ end
+ sort(keys)
+ return keys
+end
+function lpeg.append(list,pp,delayed,checked)
+ local p=pp
+ if #list>0 then
+ local keys=copyindexed(list)
+ sort(keys)
+ for i=#keys,1,-1 do
+ local k=keys[i]
+ if p then
+ p=P(k)+p
+ else
+ p=P(k)
+ end
+ end
+ elseif delayed then
+ local keys=sortedkeys(list)
+ if p then
+ for i=1,#keys,1 do
+ local k=keys[i]
+ local v=list[k]
+ p=P(k)/list+p
+ end
+ else
+ for i=1,#keys do
+ local k=keys[i]
+ local v=list[k]
+ if p then
+ p=P(k)+p
+ else
+ p=P(k)
+ end
+ end
+ if p then
+ p=p/list
+ end
+ end
+ elseif checked then
+ local keys=sortedkeys(list)
+ for i=1,#keys do
+ local k=keys[i]
+ local v=list[k]
+ if p then
+ if k==v then
+ p=P(k)+p
+ else
+ p=P(k)/v+p
+ end
+ else
+ if k==v then
+ p=P(k)
+ else
+ p=P(k)/v
+ end
+ end
+ end
+ else
+ local keys=sortedkeys(list)
+ for i=1,#keys do
+ local k=keys[i]
+ local v=list[k]
+ if p then
+ p=P(k)/v+p
+ else
+ p=P(k)/v
+ end
+ end
+ end
+ return p
+end
+local function make(t,hash)
+ local p=P(false)
+ local keys=sortedkeys(t)
+ for i=1,#keys do
+ local k=keys[i]
+ local v=t[k]
+ local h=hash[v]
+ if h then
+ if next(v) then
+ p=p+P(k)*(make(v,hash)+P(true))
+ else
+ p=p+P(k)*P(true)
+ end
+ else
+ if next(v) then
+ p=p+P(k)*make(v,hash)
+ else
+ p=p+P(k)
+ end
+ end
+ end
+ return p
+end
+function lpeg.utfchartabletopattern(list)
+ local tree={}
+ local hash={}
+ local n=#list
+ if n==0 then
+ for s in next,list do
+ local t=tree
+ for c in gmatch(s,".") do
+ local tc=t[c]
+ if not tc then
+ tc={}
+ t[c]=tc
+ end
+ t=tc
+ end
+ hash[t]=s
+ end
+ else
+ for i=1,n do
+ local t=tree
+ local s=list[i]
+ for c in gmatch(s,".") do
+ local tc=t[c]
+ if not tc then
+ tc={}
+ t[c]=tc
+ end
+ t=tc
+ end
+ hash[t]=s
+ end
+ end
+ return make(tree,hash)
+end
+patterns.containseol=lpeg.finder(eol)
+local function nextstep(n,step,result)
+ local m=n%step
+ local d=floor(n/step)
+ if d>0 then
+ local v=V(tostring(step))
+ local s=result.start
+ for i=1,d do
+ if s then
+ s=v*s
+ else
+ s=v
+ end
+ end
+ result.start=s
+ end
+ if step>1 and result.start then
+ local v=V(tostring(step/2))
+ result[tostring(step)]=v*v
+ end
+ if step>0 then
+ return nextstep(m,step/2,result)
+ else
+ return result
+ end
+end
+function lpeg.times(pattern,n)
+ return P(nextstep(n,2^16,{ "start",["1"]=pattern }))
+end
+local trailingzeros=zero^0*-digit
+local case_1=period*trailingzeros/""
+local case_2=period*(digit-trailingzeros)^1*(trailingzeros/"")
+local number=digit^1*(case_1+case_2)
+local stripper=Cs((number+1)^0)
+lpeg.patterns.stripzeros=stripper
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['l-functions']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+functions=functions or {}
+function functions.dummy() end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['l-string']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local string=string
+local sub,gmatch,format,char,byte,rep,lower=string.sub,string.gmatch,string.format,string.char,string.byte,string.rep,string.lower
+local lpegmatch,patterns=lpeg.match,lpeg.patterns
+local P,S,C,Ct,Cc,Cs=lpeg.P,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cc,lpeg.Cs
+local unquoted=patterns.squote*C(patterns.nosquote)*patterns.squote+patterns.dquote*C(patterns.nodquote)*patterns.dquote
+function string.unquoted(str)
+ return lpegmatch(unquoted,str) or str
+end
+function string.quoted(str)
+ return format("%q",str)
+end
+function string.count(str,pattern)
+ local n=0
+ for _ in gmatch(str,pattern) do
+ n=n+1
+ end
+ return n
+end
+function string.limit(str,n,sentinel)
+ if #str>n then
+ sentinel=sentinel or "..."
+ return sub(str,1,(n-#sentinel))..sentinel
+ else
+ return str
+ end
+end
+local stripper=patterns.stripper
+local fullstripper=patterns.fullstripper
+local collapser=patterns.collapser
+local longtostring=patterns.longtostring
+function string.strip(str)
+ return lpegmatch(stripper,str) or ""
+end
+function string.fullstrip(str)
+ return lpegmatch(fullstripper,str) or ""
+end
+function string.collapsespaces(str)
+ return lpegmatch(collapser,str) or ""
+end
+function string.longtostring(str)
+ return lpegmatch(longtostring,str) or ""
+end
+local pattern=P(" ")^0*P(-1)
+function string.is_empty(str)
+ if str=="" then
+ return true
+ else
+ return lpegmatch(pattern,str) and true or false
+ end
+end
+local anything=patterns.anything
+local allescapes=Cc("%")*S(".-+%?()[]*")
+local someescapes=Cc("%")*S(".-+%()[]")
+local matchescapes=Cc(".")*S("*?")
+local pattern_a=Cs ((allescapes+anything )^0 )
+local pattern_b=Cs ((someescapes+matchescapes+anything )^0 )
+local pattern_c=Cs (Cc("^")*(someescapes+matchescapes+anything )^0*Cc("$") )
+function string.escapedpattern(str,simple)
+ return lpegmatch(simple and pattern_b or pattern_a,str)
+end
+function string.topattern(str,lowercase,strict)
+ if str=="" or type(str)~="string" then
+ return ".*"
+ elseif strict then
+ str=lpegmatch(pattern_c,str)
+ else
+ str=lpegmatch(pattern_b,str)
+ end
+ if lowercase then
+ return lower(str)
+ else
+ return str
+ end
+end
+function string.valid(str,default)
+ return (type(str)=="string" and str~="" and str) or default or nil
+end
+string.itself=function(s) return s end
+local pattern=Ct(C(1)^0)
+function string.totable(str)
+ return lpegmatch(pattern,str)
+end
+local replacer=lpeg.replacer("@","%%")
+function string.tformat(fmt,...)
+ return format(lpegmatch(replacer,fmt),...)
+end
+string.quote=string.quoted
+string.unquote=string.unquoted
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['l-table']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type,next,tostring,tonumber,ipairs,select=type,next,tostring,tonumber,ipairs,select
+local table,string=table,string
+local concat,sort,insert,remove=table.concat,table.sort,table.insert,table.remove
+local format,lower,dump=string.format,string.lower,string.dump
+local getmetatable,setmetatable=getmetatable,setmetatable
+local getinfo=debug.getinfo
+local lpegmatch,patterns=lpeg.match,lpeg.patterns
+local floor=math.floor
+local stripper=patterns.stripper
+function table.strip(tab)
+ local lst,l={},0
+ for i=1,#tab do
+ local s=lpegmatch(stripper,tab[i]) or ""
+ if s=="" then
+ else
+ l=l+1
+ lst[l]=s
+ end
+ end
+ return lst
+end
+function table.keys(t)
+ if t then
+ local keys,k={},0
+ for key,_ in next,t do
+ k=k+1
+ keys[k]=key
+ end
+ return keys
+ else
+ return {}
+ end
+end
+local function compare(a,b)
+ local ta,tb=type(a),type(b)
+ if ta==tb then
+ return a<b
+ else
+ return tostring(a)<tostring(b)
+ end
+end
+local function sortedkeys(tab)
+ if tab then
+ local srt,category,s={},0,0
+ for key,_ in next,tab do
+ s=s+1
+ srt[s]=key
+ if category==3 then
+ else
+ local tkey=type(key)
+ if tkey=="string" then
+ category=(category==2 and 3) or 1
+ elseif tkey=="number" then
+ category=(category==1 and 3) or 2
+ else
+ category=3
+ end
+ end
+ end
+ if category==0 or category==3 then
+ sort(srt,compare)
+ else
+ sort(srt)
+ end
+ return srt
+ else
+ return {}
+ end
+end
+local function sortedhashonly(tab)
+ if tab then
+ local srt,s={},0
+ for key,_ in next,tab do
+ if type(key)=="string" then
+ s=s+1
+ srt[s]=key
+ end
+ end
+ sort(srt)
+ return srt
+ else
+ return {}
+ end
+end
+local function sortedindexonly(tab)
+ if tab then
+ local srt,s={},0
+ for key,_ in next,tab do
+ if type(key)=="number" then
+ s=s+1
+ srt[s]=key
+ end
+ end
+ sort(srt)
+ return srt
+ else
+ return {}
+ end
+end
+local function sortedhashkeys(tab,cmp)
+ if tab then
+ local srt,s={},0
+ for key,_ in next,tab do
+ if key then
+ s=s+1
+ srt[s]=key
+ end
+ end
+ sort(srt,cmp)
+ return srt
+ else
+ return {}
+ end
+end
+function table.allkeys(t)
+ local keys={}
+ for k,v in next,t do
+ for k,v in next,v do
+ keys[k]=true
+ end
+ end
+ return sortedkeys(keys)
+end
+table.sortedkeys=sortedkeys
+table.sortedhashonly=sortedhashonly
+table.sortedindexonly=sortedindexonly
+table.sortedhashkeys=sortedhashkeys
+local function nothing() end
+local function sortedhash(t,cmp)
+ if t then
+ local s
+ if cmp then
+ s=sortedhashkeys(t,function(a,b) return cmp(t,a,b) end)
+ else
+ s=sortedkeys(t)
+ end
+ local n=0
+ local m=#s
+ local function kv()
+ if n<m then
+ n=n+1
+ local k=s[n]
+ return k,t[k]
+ end
+ end
+ return kv
+ else
+ return nothing
+ end
+end
+table.sortedhash=sortedhash
+table.sortedpairs=sortedhash
+function table.append(t,list)
+ local n=#t
+ for i=1,#list do
+ n=n+1
+ t[n]=list[i]
+ end
+ return t
+end
+function table.prepend(t,list)
+ local nl=#list
+ local nt=nl+#t
+ for i=#t,1,-1 do
+ t[nt]=t[i]
+ nt=nt-1
+ end
+ for i=1,#list do
+ t[i]=list[i]
+ end
+ return t
+end
+function table.merge(t,...)
+ t=t or {}
+ for i=1,select("#",...) do
+ for k,v in next,(select(i,...)) do
+ t[k]=v
+ end
+ end
+ return t
+end
+function table.merged(...)
+ local t={}
+ for i=1,select("#",...) do
+ for k,v in next,(select(i,...)) do
+ t[k]=v
+ end
+ end
+ return t
+end
+function table.imerge(t,...)
+ local nt=#t
+ for i=1,select("#",...) do
+ local nst=select(i,...)
+ for j=1,#nst do
+ nt=nt+1
+ t[nt]=nst[j]
+ end
+ end
+ return t
+end
+function table.imerged(...)
+ local tmp,ntmp={},0
+ for i=1,select("#",...) do
+ local nst=select(i,...)
+ for j=1,#nst do
+ ntmp=ntmp+1
+ tmp[ntmp]=nst[j]
+ end
+ end
+ return tmp
+end
+local function fastcopy(old,metatabletoo)
+ if old then
+ local new={}
+ for k,v in next,old do
+ if type(v)=="table" then
+ new[k]=fastcopy(v,metatabletoo)
+ else
+ new[k]=v
+ end
+ end
+ if metatabletoo then
+ local mt=getmetatable(old)
+ if mt then
+ setmetatable(new,mt)
+ end
+ end
+ return new
+ else
+ return {}
+ end
+end
+local function copy(t,tables)
+ tables=tables or {}
+ local tcopy={}
+ if not tables[t] then
+ tables[t]=tcopy
+ end
+ for i,v in next,t do
+ if type(i)=="table" then
+ if tables[i] then
+ i=tables[i]
+ else
+ i=copy(i,tables)
+ end
+ end
+ if type(v)~="table" then
+ tcopy[i]=v
+ elseif tables[v] then
+ tcopy[i]=tables[v]
+ else
+ tcopy[i]=copy(v,tables)
+ end
+ end
+ local mt=getmetatable(t)
+ if mt then
+ setmetatable(tcopy,mt)
+ end
+ return tcopy
+end
+table.fastcopy=fastcopy
+table.copy=copy
+function table.derive(parent)
+ local child={}
+ if parent then
+ setmetatable(child,{ __index=parent })
+ end
+ return child
+end
+function table.tohash(t,value)
+ local h={}
+ if t then
+ if value==nil then value=true end
+ for _,v in next,t do
+ h[v]=value
+ end
+ end
+ return h
+end
+function table.fromhash(t)
+ local hsh,h={},0
+ for k,v in next,t do
+ if v then
+ h=h+1
+ hsh[h]=k
+ end
+ end
+ return hsh
+end
+local noquotes,hexify,handle,reduce,compact,inline,functions
+local reserved=table.tohash {
+ 'and','break','do','else','elseif','end','false','for','function','if',
+ 'in','local','nil','not','or','repeat','return','then','true','until','while',
+ 'NaN','goto',
+}
+local function simple_table(t)
+ if #t>0 then
+ local n=0
+ for _,v in next,t do
+ n=n+1
+ end
+ if n==#t then
+ local tt,nt={},0
+ for i=1,#t do
+ local v=t[i]
+ local tv=type(v)
+ if tv=="number" then
+ nt=nt+1
+ if hexify then
+ tt[nt]=format("0x%X",v)
+ else
+ tt[nt]=tostring(v)
+ end
+ elseif tv=="string" then
+ nt=nt+1
+ tt[nt]=format("%q",v)
+ elseif tv=="boolean" then
+ nt=nt+1
+ tt[nt]=v and "true" or "false"
+ else
+ tt=nil
+ break
+ end
+ end
+ return tt
+ end
+ end
+ return nil
+end
+local propername=patterns.propername
+local function dummy() end
+local function do_serialize(root,name,depth,level,indexed)
+ if level>0 then
+ depth=depth.." "
+ if indexed then
+ handle(format("%s{",depth))
+ else
+ local tn=type(name)
+ if tn=="number" then
+ if hexify then
+ handle(format("%s[0x%X]={",depth,name))
+ else
+ handle(format("%s[%s]={",depth,name))
+ end
+ elseif tn=="string" then
+ if noquotes and not reserved[name] and lpegmatch(propername,name) then
+ handle(format("%s%s={",depth,name))
+ else
+ handle(format("%s[%q]={",depth,name))
+ end
+ elseif tn=="boolean" then
+ handle(format("%s[%s]={",depth,name and "true" or "false"))
+ else
+ handle(format("%s{",depth))
+ end
+ end
+ end
+ if root and next(root) then
+ local first,last=nil,0
+ if compact then
+ last=#root
+ for k=1,last do
+ if root[k]==nil then
+ last=k-1
+ break
+ end
+ end
+ if last>0 then
+ first=1
+ end
+ end
+ local sk=sortedkeys(root)
+ for i=1,#sk do
+ local k=sk[i]
+ local v=root[k]
+ local tv,tk=type(v),type(k)
+ if compact and first and tk=="number" and k>=first and k<=last then
+ if tv=="number" then
+ if hexify then
+ handle(format("%s 0x%X,",depth,v))
+ else
+ handle(format("%s %s,",depth,v))
+ end
+ elseif tv=="string" then
+ if reduce and tonumber(v) then
+ handle(format("%s %s,",depth,v))
+ else
+ handle(format("%s %q,",depth,v))
+ end
+ elseif tv=="table" then
+ if not next(v) then
+ handle(format("%s {},",depth))
+ elseif inline then
+ local st=simple_table(v)
+ if st then
+ handle(format("%s { %s },",depth,concat(st,", ")))
+ else
+ do_serialize(v,k,depth,level+1,true)
+ end
+ else
+ do_serialize(v,k,depth,level+1,true)
+ end
+ elseif tv=="boolean" then
+ handle(format("%s %s,",depth,v and "true" or "false"))
+ elseif tv=="function" then
+ if functions then
+ handle(format('%s load(%q),',depth,dump(v)))
+ else
+ handle(format('%s "function",',depth))
+ end
+ else
+ handle(format("%s %q,",depth,tostring(v)))
+ end
+ elseif k=="__p__" then
+ if false then
+ handle(format("%s __p__=nil,",depth))
+ end
+ elseif tv=="number" then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%X]=0x%X,",depth,k,v))
+ else
+ handle(format("%s [%s]=%s,",depth,k,v))
+ end
+ elseif tk=="boolean" then
+ if hexify then
+ handle(format("%s [%s]=0x%X,",depth,k and "true" or "false",v))
+ else
+ handle(format("%s [%s]=%s,",depth,k and "true" or "false",v))
+ end
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ if hexify then
+ handle(format("%s %s=0x%X,",depth,k,v))
+ else
+ handle(format("%s %s=%s,",depth,k,v))
+ end
+ else
+ if hexify then
+ handle(format("%s [%q]=0x%X,",depth,k,v))
+ else
+ handle(format("%s [%q]=%s,",depth,k,v))
+ end
+ end
+ elseif tv=="string" then
+ if reduce and tonumber(v) then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%X]=%s,",depth,k,v))
+ else
+ handle(format("%s [%s]=%s,",depth,k,v))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%s,",depth,k and "true" or "false",v))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%s,",depth,k,v))
+ else
+ handle(format("%s [%q]=%s,",depth,k,v))
+ end
+ else
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%X]=%q,",depth,k,v))
+ else
+ handle(format("%s [%s]=%q,",depth,k,v))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%q,",depth,k and "true" or "false",v))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%q,",depth,k,v))
+ else
+ handle(format("%s [%q]=%q,",depth,k,v))
+ end
+ end
+ elseif tv=="table" then
+ if not next(v) then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%X]={},",depth,k))
+ else
+ handle(format("%s [%s]={},",depth,k))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]={},",depth,k and "true" or "false"))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s={},",depth,k))
+ else
+ handle(format("%s [%q]={},",depth,k))
+ end
+ elseif inline then
+ local st=simple_table(v)
+ if st then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%X]={ %s },",depth,k,concat(st,", ")))
+ else
+ handle(format("%s [%s]={ %s },",depth,k,concat(st,", ")))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]={ %s },",depth,k and "true" or "false",concat(st,", ")))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s={ %s },",depth,k,concat(st,", ")))
+ else
+ handle(format("%s [%q]={ %s },",depth,k,concat(st,", ")))
+ end
+ else
+ do_serialize(v,k,depth,level+1)
+ end
+ else
+ do_serialize(v,k,depth,level+1)
+ end
+ elseif tv=="boolean" then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%X]=%s,",depth,k,v and "true" or "false"))
+ else
+ handle(format("%s [%s]=%s,",depth,k,v and "true" or "false"))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%s,",depth,tostring(k),v and "true" or "false"))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%s,",depth,k,v and "true" or "false"))
+ else
+ handle(format("%s [%q]=%s,",depth,k,v and "true" or "false"))
+ end
+ elseif tv=="function" then
+ if functions then
+ local f=getinfo(v).what=="C" and dump(dummy) or dump(v)
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%X]=load(%q),",depth,k,f))
+ else
+ handle(format("%s [%s]=load(%q),",depth,k,f))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=load(%q),",depth,k and "true" or "false",f))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=load(%q),",depth,k,f))
+ else
+ handle(format("%s [%q]=load(%q),",depth,k,f))
+ end
+ end
+ else
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%X]=%q,",depth,k,tostring(v)))
+ else
+ handle(format("%s [%s]=%q,",depth,k,tostring(v)))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%q,",depth,k and "true" or "false",tostring(v)))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%q,",depth,k,tostring(v)))
+ else
+ handle(format("%s [%q]=%q,",depth,k,tostring(v)))
+ end
+ end
+ end
+ end
+ if level>0 then
+ handle(format("%s},",depth))
+ end
+end
+local function serialize(_handle,root,name,specification)
+ local tname=type(name)
+ if type(specification)=="table" then
+ noquotes=specification.noquotes
+ hexify=specification.hexify
+ handle=_handle or specification.handle or print
+ reduce=specification.reduce or false
+ functions=specification.functions
+ compact=specification.compact
+ inline=specification.inline and compact
+ if functions==nil then
+ functions=true
+ end
+ if compact==nil then
+ compact=true
+ end
+ if inline==nil then
+ inline=compact
+ end
+ else
+ noquotes=false
+ hexify=false
+ handle=_handle or print
+ reduce=false
+ compact=true
+ inline=true
+ functions=true
+ end
+ if tname=="string" then
+ if name=="return" then
+ handle("return {")
+ else
+ handle(name.."={")
+ end
+ elseif tname=="number" then
+ if hexify then
+ handle(format("[0x%X]={",name))
+ else
+ handle("["..name.."]={")
+ end
+ elseif tname=="boolean" then
+ if name then
+ handle("return {")
+ else
+ handle("{")
+ end
+ else
+ handle("t={")
+ end
+ if root then
+ if getmetatable(root) then
+ local dummy=root._w_h_a_t_e_v_e_r_
+ root._w_h_a_t_e_v_e_r_=nil
+ end
+ if next(root) then
+ do_serialize(root,name,"",0)
+ end
+ end
+ handle("}")
+end
+function table.serialize(root,name,specification)
+ local t,n={},0
+ local function flush(s)
+ n=n+1
+ t[n]=s
+ end
+ serialize(flush,root,name,specification)
+ return concat(t,"\n")
+end
+table.tohandle=serialize
+local maxtab=2*1024
+function table.tofile(filename,root,name,specification)
+ local f=io.open(filename,'w')
+ if f then
+ if maxtab>1 then
+ local t,n={},0
+ local function flush(s)
+ n=n+1
+ t[n]=s
+ if n>maxtab then
+ f:write(concat(t,"\n"),"\n")
+ t,n={},0
+ end
+ end
+ serialize(flush,root,name,specification)
+ f:write(concat(t,"\n"),"\n")
+ else
+ local function flush(s)
+ f:write(s,"\n")
+ end
+ serialize(flush,root,name,specification)
+ end
+ f:close()
+ io.flush()
+ end
+end
+local function flattened(t,f,depth)
+ if f==nil then
+ f={}
+ depth=0xFFFF
+ elseif tonumber(f) then
+ depth=f
+ f={}
+ elseif not depth then
+ depth=0xFFFF
+ end
+ for k,v in next,t do
+ if type(k)~="number" then
+ if depth>0 and type(v)=="table" then
+ flattened(v,f,depth-1)
+ else
+ f[#f+1]=v
+ end
+ end
+ end
+ for k=1,#t do
+ local v=t[k]
+ if depth>0 and type(v)=="table" then
+ flattened(v,f,depth-1)
+ else
+ f[#f+1]=v
+ end
+ end
+ return f
+end
+table.flattened=flattened
+local function unnest(t,f)
+ if not f then
+ f={}
+ end
+ for i=1,#t do
+ local v=t[i]
+ if type(v)=="table" then
+ if type(v[1])=="table" then
+ unnest(v,f)
+ else
+ f[#f+1]=v
+ end
+ else
+ f[#f+1]=v
+ end
+ end
+ return f
+end
+function table.unnest(t)
+ return unnest(t)
+end
+local function are_equal(a,b,n,m)
+ if a and b and #a==#b then
+ n=n or 1
+ m=m or #a
+ for i=n,m do
+ local ai,bi=a[i],b[i]
+ if ai==bi then
+ elseif type(ai)=="table" and type(bi)=="table" then
+ if not are_equal(ai,bi) then
+ return false
+ end
+ else
+ return false
+ end
+ end
+ return true
+ else
+ return false
+ end
+end
+local function identical(a,b)
+ for ka,va in next,a do
+ local vb=b[ka]
+ if va==vb then
+ elseif type(va)=="table" and type(vb)=="table" then
+ if not identical(va,vb) then
+ return false
+ end
+ else
+ return false
+ end
+ end
+ return true
+end
+table.identical=identical
+table.are_equal=are_equal
+function table.compact(t)
+ if t then
+ for k,v in next,t do
+ if not next(v) then
+ t[k]=nil
+ end
+ end
+ end
+end
+function table.contains(t,v)
+ if t then
+ for i=1,#t do
+ if t[i]==v then
+ return i
+ end
+ end
+ end
+ return false
+end
+function table.count(t)
+ local n=0
+ for k,v in next,t do
+ n=n+1
+ end
+ return n
+end
+function table.swapped(t,s)
+ local n={}
+ if s then
+ for k,v in next,s do
+ n[k]=v
+ end
+ end
+ for k,v in next,t do
+ n[v]=k
+ end
+ return n
+end
+function table.mirrored(t)
+ local n={}
+ for k,v in next,t do
+ n[v]=k
+ n[k]=v
+ end
+ return n
+end
+function table.reversed(t)
+ if t then
+ local tt,tn={},#t
+ if tn>0 then
+ local ttn=0
+ for i=tn,1,-1 do
+ ttn=ttn+1
+ tt[ttn]=t[i]
+ end
+ end
+ return tt
+ end
+end
+function table.reverse(t)
+ if t then
+ local n=#t
+ for i=1,floor(n/2) do
+ local j=n-i+1
+ t[i],t[j]=t[j],t[i]
+ end
+ return t
+ end
+end
+function table.sequenced(t,sep,simple)
+ if not t then
+ return ""
+ end
+ local n=#t
+ local s={}
+ if n>0 then
+ for i=1,n do
+ s[i]=tostring(t[i])
+ end
+ else
+ n=0
+ for k,v in sortedhash(t) do
+ if simple then
+ if v==true then
+ n=n+1
+ s[n]=k
+ elseif v and v~="" then
+ n=n+1
+ s[n]=k.."="..tostring(v)
+ end
+ else
+ n=n+1
+ s[n]=k.."="..tostring(v)
+ end
+ end
+ end
+ return concat(s,sep or " | ")
+end
+function table.print(t,...)
+ if type(t)~="table" then
+ print(tostring(t))
+ else
+ serialize(print,t,...)
+ end
+end
+if setinspector then
+ setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end)
+end
+function table.sub(t,i,j)
+ return { unpack(t,i,j) }
+end
+function table.is_empty(t)
+ return not t or not next(t)
+end
+function table.has_one_entry(t)
+ return t and not next(t,next(t))
+end
+function table.loweredkeys(t)
+ local l={}
+ for k,v in next,t do
+ l[lower(k)]=v
+ end
+ return l
+end
+function table.unique(old)
+ local hash={}
+ local new={}
+ local n=0
+ for i=1,#old do
+ local oi=old[i]
+ if not hash[oi] then
+ n=n+1
+ new[n]=oi
+ hash[oi]=true
+ end
+ end
+ return new
+end
+function table.sorted(t,...)
+ sort(t,...)
+ return t
+end
+function table.values(t,s)
+ if t then
+ local values,keys,v={},{},0
+ for key,value in next,t do
+ if not keys[value] then
+ v=v+1
+ values[v]=value
+ keys[k]=key
+ end
+ end
+ if s then
+ sort(values)
+ end
+ return values
+ else
+ return {}
+ end
+end
+function table.filtered(t,pattern,sort,cmp)
+ if t and type(pattern)=="string" then
+ if sort then
+ local s
+ if cmp then
+ s=sortedhashkeys(t,function(a,b) return cmp(t,a,b) end)
+ else
+ s=sortedkeys(t)
+ end
+ local n=0
+ local m=#s
+ local function kv(s)
+ while n<m do
+ n=n+1
+ local k=s[n]
+ if find(k,pattern) then
+ return k,t[k]
+ end
+ end
+ end
+ return kv,s
+ else
+ local n=next(t)
+ local function iterator()
+ while n do
+ local k=n
+ n=next(t,k)
+ if find(k,pattern) then
+ return k,t[k]
+ end
+ end
+ end
+ return iterator,t
+ end
+ else
+ return nothing
+ end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['l-io']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local io=io
+local byte,find,gsub,format=string.byte,string.find,string.gsub,string.format
+local concat=table.concat
+local floor=math.floor
+local type=type
+if string.find(os.getenv("PATH"),";",1,true) then
+ io.fileseparator,io.pathseparator="\\",";"
+else
+ io.fileseparator,io.pathseparator="/",":"
+end
+local function readall(f)
+ return f:read("*all")
+end
+local function readall(f)
+ local size=f:seek("end")
+ if size==0 then
+ return ""
+ elseif size<1024*1024 then
+ f:seek("set",0)
+ return f:read('*all')
+ else
+ local done=f:seek("set",0)
+ local step
+ if size<1024*1024 then
+ step=1024*1024
+ elseif size>16*1024*1024 then
+ step=16*1024*1024
+ else
+ step=floor(size/(1024*1024))*1024*1024/8
+ end
+ local data={}
+ while true do
+ local r=f:read(step)
+ if not r then
+ return concat(data)
+ else
+ data[#data+1]=r
+ end
+ end
+ end
+end
+io.readall=readall
+function io.loaddata(filename,textmode)
+ local f=io.open(filename,(textmode and 'r') or 'rb')
+ if f then
+ local data=readall(f)
+ f:close()
+ if #data>0 then
+ return data
+ end
+ end
+end
+function io.savedata(filename,data,joiner)
+ local f=io.open(filename,"wb")
+ if f then
+ if type(data)=="table" then
+ f:write(concat(data,joiner or ""))
+ elseif type(data)=="function" then
+ data(f)
+ else
+ f:write(data or "")
+ end
+ f:close()
+ io.flush()
+ return true
+ else
+ return false
+ end
+end
+function io.loadlines(filename,n)
+ local f=io.open(filename,'r')
+ if not f then
+ elseif n then
+ local lines={}
+ for i=1,n do
+ local line=f:read("*lines")
+ if line then
+ lines[#lines+1]=line
+ else
+ break
+ end
+ end
+ f:close()
+ lines=concat(lines,"\n")
+ if #lines>0 then
+ return lines
+ end
+ else
+ local line=f:read("*line") or ""
+ f:close()
+ if #line>0 then
+ return line
+ end
+ end
+end
+function io.loadchunk(filename,n)
+ local f=io.open(filename,'rb')
+ if f then
+ local data=f:read(n or 1024)
+ f:close()
+ if #data>0 then
+ return data
+ end
+ end
+end
+function io.exists(filename)
+ local f=io.open(filename)
+ if f==nil then
+ return false
+ else
+ f:close()
+ return true
+ end
+end
+function io.size(filename)
+ local f=io.open(filename)
+ if f==nil then
+ return 0
+ else
+ local s=f:seek("end")
+ f:close()
+ return s
+ end
+end
+function io.noflines(f)
+ if type(f)=="string" then
+ local f=io.open(filename)
+ if f then
+ local n=f and io.noflines(f) or 0
+ f:close()
+ return n
+ else
+ return 0
+ end
+ else
+ local n=0
+ for _ in f:lines() do
+ n=n+1
+ end
+ f:seek('set',0)
+ return n
+ end
+end
+local nextchar={
+ [ 4]=function(f)
+ return f:read(1,1,1,1)
+ end,
+ [ 2]=function(f)
+ return f:read(1,1)
+ end,
+ [ 1]=function(f)
+ return f:read(1)
+ end,
+ [-2]=function(f)
+ local a,b=f:read(1,1)
+ return b,a
+ end,
+ [-4]=function(f)
+ local a,b,c,d=f:read(1,1,1,1)
+ return d,c,b,a
+ end
+}
+function io.characters(f,n)
+ if f then
+ return nextchar[n or 1],f
+ end
+end
+local nextbyte={
+ [4]=function(f)
+ local a,b,c,d=f:read(1,1,1,1)
+ if d then
+ return byte(a),byte(b),byte(c),byte(d)
+ end
+ end,
+ [3]=function(f)
+ local a,b,c=f:read(1,1,1)
+ if b then
+ return byte(a),byte(b),byte(c)
+ end
+ end,
+ [2]=function(f)
+ local a,b=f:read(1,1)
+ if b then
+ return byte(a),byte(b)
+ end
+ end,
+ [1]=function (f)
+ local a=f:read(1)
+ if a then
+ return byte(a)
+ end
+ end,
+ [-2]=function (f)
+ local a,b=f:read(1,1)
+ if b then
+ return byte(b),byte(a)
+ end
+ end,
+ [-3]=function(f)
+ local a,b,c=f:read(1,1,1)
+ if b then
+ return byte(c),byte(b),byte(a)
+ end
+ end,
+ [-4]=function(f)
+ local a,b,c,d=f:read(1,1,1,1)
+ if d then
+ return byte(d),byte(c),byte(b),byte(a)
+ end
+ end
+}
+function io.bytes(f,n)
+ if f then
+ return nextbyte[n or 1],f
+ else
+ return nil,nil
+ end
+end
+function io.ask(question,default,options)
+ while true do
+ io.write(question)
+ if options then
+ io.write(format(" [%s]",concat(options,"|")))
+ end
+ if default then
+ io.write(format(" [%s]",default))
+ end
+ io.write(format(" "))
+ io.flush()
+ local answer=io.read()
+ answer=gsub(answer,"^%s*(.*)%s*$","%1")
+ if answer=="" and default then
+ return default
+ elseif not options then
+ return answer
+ else
+ for k=1,#options do
+ if options[k]==answer then
+ return answer
+ end
+ end
+ local pattern="^"..answer
+ for k=1,#options do
+ local v=options[k]
+ if find(v,pattern) then
+ return v
+ end
+ end
+ end
+ end
+end
+local function readnumber(f,n,m)
+ if m then
+ f:seek("set",n)
+ n=m
+ end
+ if n==1 then
+ return byte(f:read(1))
+ elseif n==2 then
+ local a,b=byte(f:read(2),1,2)
+ return 256*a+b
+ elseif n==3 then
+ local a,b,c=byte(f:read(3),1,3)
+ return 256*256*a+256*b+c
+ elseif n==4 then
+ local a,b,c,d=byte(f:read(4),1,4)
+ return 256*256*256*a+256*256*b+256*c+d
+ elseif n==8 then
+ local a,b=readnumber(f,4),readnumber(f,4)
+ return 256*a+b
+ elseif n==12 then
+ local a,b,c=readnumber(f,4),readnumber(f,4),readnumber(f,4)
+ return 256*256*a+256*b+c
+ elseif n==-2 then
+ local b,a=byte(f:read(2),1,2)
+ return 256*a+b
+ elseif n==-3 then
+ local c,b,a=byte(f:read(3),1,3)
+ return 256*256*a+256*b+c
+ elseif n==-4 then
+ local d,c,b,a=byte(f:read(4),1,4)
+ return 256*256*256*a+256*256*b+256*c+d
+ elseif n==-8 then
+ local h,g,f,e,d,c,b,a=byte(f:read(8),1,8)
+ return 256*256*256*256*256*256*256*a+256*256*256*256*256*256*b+256*256*256*256*256*c+256*256*256*256*d+256*256*256*e+256*256*f+256*g+h
+ else
+ return 0
+ end
+end
+io.readnumber=readnumber
+function io.readstring(f,n,m)
+ if m then
+ f:seek("set",n)
+ n=m
+ end
+ local str=gsub(f:read(n),"\000","")
+ return str
+end
+if not io.i_limiter then function io.i_limiter() end end
+if not io.o_limiter then function io.o_limiter() end end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['l-file']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+file=file or {}
+local file=file
+if not lfs then
+ lfs=optionalrequire("lfs")
+end
+if not lfs then
+ lfs={
+ getcurrentdir=function()
+ return "."
+ end,
+ attributes=function()
+ return nil
+ end,
+ isfile=function(name)
+ local f=io.open(name,'rb')
+ if f then
+ f:close()
+ return true
+ end
+ end,
+ isdir=function(name)
+ print("you need to load lfs")
+ return false
+ end
+ }
+elseif not lfs.isfile then
+ local attributes=lfs.attributes
+ function lfs.isdir(name)
+ return attributes(name,"mode")=="directory"
+ end
+ function lfs.isfile(name)
+ return attributes(name,"mode")=="file"
+ end
+end
+local insert,concat=table.insert,table.concat
+local match,find,gmatch=string.match,string.find,string.gmatch
+local lpegmatch=lpeg.match
+local getcurrentdir,attributes=lfs.currentdir,lfs.attributes
+local checkedsplit=string.checkedsplit
+local P,R,S,C,Cs,Cp,Cc,Ct=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Cs,lpeg.Cp,lpeg.Cc,lpeg.Ct
+local colon=P(":")
+local period=P(".")
+local periods=P("..")
+local fwslash=P("/")
+local bwslash=P("\\")
+local slashes=S("\\/")
+local noperiod=1-period
+local noslashes=1-slashes
+local name=noperiod^1
+local suffix=period/""*(1-period-slashes)^1*-1
+local pattern=C((1-(slashes^1*noslashes^1*-1))^1)*P(1)
+local function pathpart(name,default)
+ return name and lpegmatch(pattern,name) or default or ""
+end
+local pattern=(noslashes^0*slashes)^1*C(noslashes^1)*-1
+local function basename(name)
+ return name and lpegmatch(pattern,name) or name
+end
+local pattern=(noslashes^0*slashes^1)^0*Cs((1-suffix)^1)*suffix^0
+local function nameonly(name)
+ return name and lpegmatch(pattern,name) or name
+end
+local pattern=(noslashes^0*slashes)^0*(noperiod^1*period)^1*C(noperiod^1)*-1
+local function suffixonly(name)
+ return name and lpegmatch(pattern,name) or ""
+end
+local pattern=(noslashes^0*slashes)^0*noperiod^1*((period*C(noperiod^1))^1)*-1+Cc("")
+local function suffixesonly(name)
+ if name then
+ return lpegmatch(pattern,name)
+ else
+ return ""
+ end
+end
+file.pathpart=pathpart
+file.basename=basename
+file.nameonly=nameonly
+file.suffixonly=suffixonly
+file.suffix=suffixonly
+file.suffixesonly=suffixesonly
+file.suffixes=suffixesonly
+file.dirname=pathpart
+file.extname=suffixonly
+local drive=C(R("az","AZ"))*colon
+local path=C((noslashes^0*slashes)^0)
+local suffix=period*C(P(1-period)^0*P(-1))
+local base=C((1-suffix)^0)
+local rest=C(P(1)^0)
+drive=drive+Cc("")
+path=path+Cc("")
+base=base+Cc("")
+suffix=suffix+Cc("")
+local pattern_a=drive*path*base*suffix
+local pattern_b=path*base*suffix
+local pattern_c=C(drive*path)*C(base*suffix)
+local pattern_d=path*rest
+function file.splitname(str,splitdrive)
+ if not str then
+ elseif splitdrive then
+ return lpegmatch(pattern_a,str)
+ else
+ return lpegmatch(pattern_b,str)
+ end
+end
+function file.splitbase(str)
+ if str then
+ return lpegmatch(pattern_d,str)
+ else
+ return "",str
+ end
+end
+function file.nametotable(str,splitdrive)
+ if str then
+ local path,drive,subpath,name,base,suffix=lpegmatch(pattern_c,str)
+ if splitdrive then
+ return {
+ path=path,
+ drive=drive,
+ subpath=subpath,
+ name=name,
+ base=base,
+ suffix=suffix,
+ }
+ else
+ return {
+ path=path,
+ name=name,
+ base=base,
+ suffix=suffix,
+ }
+ end
+ end
+end
+local pattern=Cs(((period*(1-period-slashes)^1*-1)/""+1)^1)
+function file.removesuffix(name)
+ return name and lpegmatch(pattern,name)
+end
+local suffix=period/""*(1-period-slashes)^1*-1
+local pattern=Cs((noslashes^0*slashes^1)^0*((1-suffix)^1))*Cs(suffix)
+function file.addsuffix(filename,suffix,criterium)
+ if not filename or not suffix or suffix=="" then
+ return filename
+ elseif criterium==true then
+ return filename.."."..suffix
+ elseif not criterium then
+ local n,s=lpegmatch(pattern,filename)
+ if not s or s=="" then
+ return filename.."."..suffix
+ else
+ return filename
+ end
+ else
+ local n,s=lpegmatch(pattern,filename)
+ if s and s~="" then
+ local t=type(criterium)
+ if t=="table" then
+ for i=1,#criterium do
+ if s==criterium[i] then
+ return filename
+ end
+ end
+ elseif t=="string" then
+ if s==criterium then
+ return filename
+ end
+ end
+ end
+ return (n or filename).."."..suffix
+ end
+end
+local suffix=period*(1-period-slashes)^1*-1
+local pattern=Cs((1-suffix)^0)
+function file.replacesuffix(name,suffix)
+ if name and suffix and suffix~="" then
+ return lpegmatch(pattern,name).."."..suffix
+ else
+ return name
+ end
+end
+local reslasher=lpeg.replacer(P("\\"),"/")
+function file.reslash(str)
+ return str and lpegmatch(reslasher,str)
+end
+function file.is_writable(name)
+ if not name then
+ elseif lfs.isdir(name) then
+ name=name.."/m_t_x_t_e_s_t.tmp"
+ local f=io.open(name,"wb")
+ if f then
+ f:close()
+ os.remove(name)
+ return true
+ end
+ elseif lfs.isfile(name) then
+ local f=io.open(name,"ab")
+ if f then
+ f:close()
+ return true
+ end
+ else
+ local f=io.open(name,"ab")
+ if f then
+ f:close()
+ os.remove(name)
+ return true
+ end
+ end
+ return false
+end
+local readable=P("r")*Cc(true)
+function file.is_readable(name)
+ if name then
+ local a=attributes(name)
+ return a and lpegmatch(readable,a.permissions) or false
+ else
+ return false
+ end
+end
+file.isreadable=file.is_readable
+file.iswritable=file.is_writable
+function file.size(name)
+ if name then
+ local a=attributes(name)
+ return a and a.size or 0
+ else
+ return 0
+ end
+end
+function file.splitpath(str,separator)
+ return str and checkedsplit(lpegmatch(reslasher,str),separator or io.pathseparator)
+end
+function file.joinpath(tab,separator)
+ return tab and concat(tab,separator or io.pathseparator)
+end
+local someslash=S("\\/")
+local stripper=Cs(P(fwslash)^0/""*reslasher)
+local isnetwork=someslash*someslash*(1-someslash)+(1-fwslash-colon)^1*colon
+local isroot=fwslash^1*-1
+local hasroot=fwslash^1
+local reslasher=lpeg.replacer(S("\\/"),"/")
+local deslasher=lpeg.replacer(S("\\/")^1,"/")
+function file.join(...)
+ local lst={... }
+ local one=lst[1]
+ if lpegmatch(isnetwork,one) then
+ local one=lpegmatch(reslasher,one)
+ local two=lpegmatch(deslasher,concat(lst,"/",2))
+ if lpegmatch(hasroot,two) then
+ return one..two
+ else
+ return one.."/"..two
+ end
+ elseif lpegmatch(isroot,one) then
+ local two=lpegmatch(deslasher,concat(lst,"/",2))
+ if lpegmatch(hasroot,two) then
+ return two
+ else
+ return "/"..two
+ end
+ elseif one=="" then
+ return lpegmatch(stripper,concat(lst,"/",2))
+ else
+ return lpegmatch(deslasher,concat(lst,"/"))
+ end
+end
+local drivespec=R("az","AZ")^1*colon
+local anchors=fwslash+drivespec
+local untouched=periods+(1-period)^1*P(-1)
+local mswindrive=Cs(drivespec*(bwslash/"/"+fwslash)^0)
+local mswinuncpath=(bwslash+fwslash)*(bwslash+fwslash)*Cc("//")
+local splitstarter=(mswindrive+mswinuncpath+Cc(false))*Ct(lpeg.splitat(S("/\\")^1))
+local absolute=fwslash
+function file.collapsepath(str,anchor)
+ if not str then
+ return
+ end
+ if anchor==true and not lpegmatch(anchors,str) then
+ str=getcurrentdir().."/"..str
+ end
+ if str=="" or str=="." then
+ return "."
+ elseif lpegmatch(untouched,str) then
+ return lpegmatch(reslasher,str)
+ end
+ local starter,oldelements=lpegmatch(splitstarter,str)
+ local newelements={}
+ local i=#oldelements
+ while i>0 do
+ local element=oldelements[i]
+ if element=='.' then
+ elseif element=='..' then
+ local n=i-1
+ while n>0 do
+ local element=oldelements[n]
+ if element~='..' and element~='.' then
+ oldelements[n]='.'
+ break
+ else
+ n=n-1
+ end
+ end
+ if n<1 then
+ insert(newelements,1,'..')
+ end
+ elseif element~="" then
+ insert(newelements,1,element)
+ end
+ i=i-1
+ end
+ if #newelements==0 then
+ return starter or "."
+ elseif starter then
+ return starter..concat(newelements,'/')
+ elseif lpegmatch(absolute,str) then
+ return "/"..concat(newelements,'/')
+ else
+ newelements=concat(newelements,'/')
+ if anchor=="." and find(str,"^%./") then
+ return "./"..newelements
+ else
+ return newelements
+ end
+ end
+end
+local validchars=R("az","09","AZ","--","..")
+local pattern_a=lpeg.replacer(1-validchars)
+local pattern_a=Cs((validchars+P(1)/"-")^1)
+local whatever=P("-")^0/""
+local pattern_b=Cs(whatever*(1-whatever*-1)^1)
+function file.robustname(str,strict)
+ if str then
+ str=lpegmatch(pattern_a,str) or str
+ if strict then
+ return lpegmatch(pattern_b,str) or str
+ else
+ return str
+ end
+ end
+end
+file.readdata=io.loaddata
+file.savedata=io.savedata
+function file.copy(oldname,newname)
+ if oldname and newname then
+ local data=io.loaddata(oldname)
+ if data and data~="" then
+ file.savedata(newname,data)
+ end
+ end
+end
+local letter=R("az","AZ")+S("_-+")
+local separator=P("://")
+local qualified=period^0*fwslash+letter*colon+letter^1*separator+letter^1*fwslash
+local rootbased=fwslash+letter*colon
+lpeg.patterns.qualified=qualified
+lpeg.patterns.rootbased=rootbased
+function file.is_qualified_path(filename)
+ return filename and lpegmatch(qualified,filename)~=nil
+end
+function file.is_rootbased_path(filename)
+ return filename and lpegmatch(rootbased,filename)~=nil
+end
+function file.strip(name,dir)
+ if name then
+ local b,a=match(name,"^(.-)"..dir.."(.*)$")
+ return a~="" and a or name
+ end
+end
+function lfs.mkdirs(path)
+ local full=""
+ for sub in gmatch(path,"(/*[^\\/]+)") do
+ full=full..sub
+ lfs.mkdir(full)
+ end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['l-boolean']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type,tonumber=type,tonumber
+boolean=boolean or {}
+local boolean=boolean
+function boolean.tonumber(b)
+ if b then return 1 else return 0 end
+end
+function toboolean(str,tolerant)
+ if str==nil then
+ return false
+ elseif str==false then
+ return false
+ elseif str==true then
+ return true
+ elseif str=="true" then
+ return true
+ elseif str=="false" then
+ return false
+ elseif not tolerant then
+ return false
+ elseif str==0 then
+ return false
+ elseif (tonumber(str) or 0)>0 then
+ return true
+ else
+ return str=="yes" or str=="on" or str=="t"
+ end
+end
+string.toboolean=toboolean
+function string.booleanstring(str)
+ if str=="0" then
+ return false
+ elseif str=="1" then
+ return true
+ elseif str=="" then
+ return false
+ elseif str=="false" then
+ return false
+ elseif str=="true" then
+ return true
+ elseif (tonumber(str) or 0)>0 then
+ return true
+ else
+ return str=="yes" or str=="on" or str=="t"
+ end
+end
+function string.is_boolean(str,default)
+ if type(str)=="string" then
+ if str=="true" or str=="yes" or str=="on" or str=="t" or str=="1" then
+ return true
+ elseif str=="false" or str=="no" or str=="off" or str=="f" or str=="0" then
+ return false
+ end
+ end
+ return default
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['l-math']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local floor,sin,cos,tan=math.floor,math.sin,math.cos,math.tan
+if not math.round then
+ function math.round(x) return floor(x+0.5) end
+end
+if not math.div then
+ function math.div(n,m) return floor(n/m) end
+end
+if not math.mod then
+ function math.mod(n,m) return n%m end
+end
+local pipi=2*math.pi/360
+if not math.sind then
+ function math.sind(d) return sin(d*pipi) end
+ function math.cosd(d) return cos(d*pipi) end
+ function math.tand(d) return tan(d*pipi) end
+end
+if not math.odd then
+ function math.odd (n) return n%2~=0 end
+ function math.even(n) return n%2==0 end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['util-str']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+utilities=utilities or {}
+utilities.strings=utilities.strings or {}
+local strings=utilities.strings
+local format,gsub,rep,sub=string.format,string.gsub,string.rep,string.sub
+local load,dump=load,string.dump
+local tonumber,type,tostring=tonumber,type,tostring
+local unpack,concat=table.unpack,table.concat
+local P,V,C,S,R,Ct,Cs,Cp,Carg,Cc=lpeg.P,lpeg.V,lpeg.C,lpeg.S,lpeg.R,lpeg.Ct,lpeg.Cs,lpeg.Cp,lpeg.Carg,lpeg.Cc
+local patterns,lpegmatch=lpeg.patterns,lpeg.match
+local utfchar,utfbyte=utf.char,utf.byte
+local loadstripped=nil
+if _LUAVERSION<5.2 then
+ loadstripped=function(str,shortcuts)
+ return load(str)
+ end
+else
+ loadstripped=function(str,shortcuts)
+ if shortcuts then
+ return load(dump(load(str),true),nil,nil,shortcuts)
+ else
+ return load(dump(load(str),true))
+ end
+ end
+end
+if not number then number={} end
+local stripper=patterns.stripzeros
+local function points(n)
+ n=tonumber(n)
+ return (not n or n==0) and "0pt" or lpegmatch(stripper,format("%.5fpt",n/65536))
+end
+local function basepoints(n)
+ n=tonumber(n)
+ return (not n or n==0) and "0bp" or lpegmatch(stripper,format("%.5fbp",n*(7200/7227)/65536))
+end
+number.points=points
+number.basepoints=basepoints
+local rubish=patterns.spaceortab^0*patterns.newline
+local anyrubish=patterns.spaceortab+patterns.newline
+local anything=patterns.anything
+local stripped=(patterns.spaceortab^1/"")*patterns.newline
+local leading=rubish^0/""
+local trailing=(anyrubish^1*patterns.endofstring)/""
+local redundant=rubish^3/"\n"
+local pattern=Cs(leading*(trailing+redundant+stripped+anything)^0)
+function strings.collapsecrlf(str)
+ return lpegmatch(pattern,str)
+end
+local repeaters={}
+function strings.newrepeater(str,offset)
+ offset=offset or 0
+ local s=repeaters[str]
+ if not s then
+ s={}
+ repeaters[str]=s
+ end
+ local t=s[offset]
+ if t then
+ return t
+ end
+ t={}
+ setmetatable(t,{ __index=function(t,k)
+ if not k then
+ return ""
+ end
+ local n=k+offset
+ local s=n>0 and rep(str,n) or ""
+ t[k]=s
+ return s
+ end })
+ s[offset]=t
+ return t
+end
+local extra,tab,start=0,0,4,0
+local nspaces=strings.newrepeater(" ")
+string.nspaces=nspaces
+local pattern=Carg(1)/function(t)
+ extra,tab,start=0,t or 7,1
+ end*Cs((
+ Cp()*patterns.tab/function(position)
+ local current=(position-start+1)+extra
+ local spaces=tab-(current-1)%tab
+ if spaces>0 then
+ extra=extra+spaces-1
+ return nspaces[spaces]
+ else
+ return ""
+ end
+ end+patterns.newline*Cp()/function(position)
+ extra,start=0,position
+ end+patterns.anything
+ )^1)
+function strings.tabtospace(str,tab)
+ return lpegmatch(pattern,str,1,tab or 7)
+end
+local newline=patterns.newline
+local endofstring=patterns.endofstring
+local whitespace=patterns.whitespace
+local spacer=patterns.spacer
+local space=spacer^0
+local nospace=space/""
+local endofline=nospace*newline
+local stripend=(whitespace^1*endofstring)/""
+local normalline=(nospace*((1-space*(newline+endofstring))^1)*nospace)
+local stripempty=endofline^1/""
+local normalempty=endofline^1
+local singleempty=endofline*(endofline^0/"")
+local doubleempty=endofline*endofline^-1*(endofline^0/"")
+local stripstart=stripempty^0
+local p_prune_normal=Cs (stripstart*(stripend+normalline+normalempty )^0 )
+local p_prune_collapse=Cs (stripstart*(stripend+normalline+doubleempty )^0 )
+local p_prune_noempty=Cs (stripstart*(stripend+normalline+singleempty )^0 )
+local p_retain_normal=Cs ((normalline+normalempty )^0 )
+local p_retain_collapse=Cs ((normalline+doubleempty )^0 )
+local p_retain_noempty=Cs ((normalline+singleempty )^0 )
+local striplinepatterns={
+ ["prune"]=p_prune_normal,
+ ["prune and collapse"]=p_prune_collapse,
+ ["prune and no empty"]=p_prune_noempty,
+ ["retain"]=p_retain_normal,
+ ["retain and collapse"]=p_retain_collapse,
+ ["retain and no empty"]=p_retain_noempty,
+ ["collapse"]=patterns.collapser,
+}
+strings.striplinepatterns=striplinepatterns
+function strings.striplines(str,how)
+ return str and lpegmatch(how and striplinepatterns[how] or p_prune_collapse,str) or str
+end
+strings.striplong=strings.striplines
+function strings.nice(str)
+ str=gsub(str,"[:%-+_]+"," ")
+ return str
+end
+local n=0
+local sequenced=table.sequenced
+function string.autodouble(s,sep)
+ if s==nil then
+ return '""'
+ end
+ local t=type(s)
+ if t=="number" then
+ return tostring(s)
+ end
+ if t=="table" then
+ return ('"'..sequenced(s,sep or ",")..'"')
+ end
+ return ('"'..tostring(s)..'"')
+end
+function string.autosingle(s,sep)
+ if s==nil then
+ return "''"
+ end
+ local t=type(s)
+ if t=="number" then
+ return tostring(s)
+ end
+ if t=="table" then
+ return ("'"..sequenced(s,sep or ",").."'")
+ end
+ return ("'"..tostring(s).."'")
+end
+local tracedchars={}
+string.tracedchars=tracedchars
+strings.tracers=tracedchars
+function string.tracedchar(b)
+ if type(b)=="number" then
+ return tracedchars[b] or (utfchar(b).." (U+"..format('%05X',b)..")")
+ else
+ local c=utfbyte(b)
+ return tracedchars[c] or (b.." (U+"..format('%05X',c)..")")
+ end
+end
+function number.signed(i)
+ if i>0 then
+ return "+",i
+ else
+ return "-",-i
+ end
+end
+local zero=P("0")^1/""
+local plus=P("+")/""
+local minus=P("-")
+local separator=S(".")
+local digit=R("09")
+local trailing=zero^1*#S("eE")
+local exponent=(S("eE")*(plus+Cs((minus*zero^0*P(-1))/"")+minus)*zero^0*(P(-1)*Cc("0")+P(1)^1))
+local pattern_a=Cs(minus^0*digit^1*(separator/""*trailing+separator*(trailing+digit)^0)*exponent)
+local pattern_b=Cs((exponent+P(1))^0)
+function number.sparseexponent(f,n)
+ if not n then
+ n=f
+ f="%e"
+ end
+ local tn=type(n)
+ if tn=="string" then
+ local m=tonumber(n)
+ if m then
+ return lpegmatch((f=="%e" or f=="%E") and pattern_a or pattern_b,format(f,m))
+ end
+ elseif tn=="number" then
+ return lpegmatch((f=="%e" or f=="%E") and pattern_a or pattern_b,format(f,n))
+ end
+ return tostring(n)
+end
+local template=[[
+%s
+%s
+return function(%s) return %s end
+]]
+local preamble,environment="",{}
+if _LUAVERSION<5.2 then
+ preamble=[[
+local lpeg=lpeg
+local type=type
+local tostring=tostring
+local tonumber=tonumber
+local format=string.format
+local concat=table.concat
+local signed=number.signed
+local points=number.points
+local basepoints= number.basepoints
+local utfchar=utf.char
+local utfbyte=utf.byte
+local lpegmatch=lpeg.match
+local nspaces=string.nspaces
+local tracedchar=string.tracedchar
+local autosingle=string.autosingle
+local autodouble=string.autodouble
+local sequenced=table.sequenced
+local formattednumber=number.formatted
+local sparseexponent=number.sparseexponent
+ ]]
+else
+ environment={
+ global=global or _G,
+ lpeg=lpeg,
+ type=type,
+ tostring=tostring,
+ tonumber=tonumber,
+ format=string.format,
+ concat=table.concat,
+ signed=number.signed,
+ points=number.points,
+ basepoints=number.basepoints,
+ utfchar=utf.char,
+ utfbyte=utf.byte,
+ lpegmatch=lpeg.match,
+ nspaces=string.nspaces,
+ tracedchar=string.tracedchar,
+ autosingle=string.autosingle,
+ autodouble=string.autodouble,
+ sequenced=table.sequenced,
+ formattednumber=number.formatted,
+ sparseexponent=number.sparseexponent,
+ }
+end
+local arguments={ "a1" }
+setmetatable(arguments,{ __index=function(t,k)
+ local v=t[k-1]..",a"..k
+ t[k]=v
+ return v
+ end
+})
+local prefix_any=C((S("+- .")+R("09"))^0)
+local prefix_tab=P("{")*C((1-P("}"))^0)*P("}")+C((1-R("az","AZ","09","%%"))^0)
+local format_s=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("format('%%%ss',a%s)",f,n)
+ else
+ return format("(a%s or '')",n)
+ end
+end
+local format_S=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("format('%%%ss',tostring(a%s))",f,n)
+ else
+ return format("tostring(a%s)",n)
+ end
+end
+local format_q=function()
+ n=n+1
+ return format("(a%s and format('%%q',a%s) or '')",n,n)
+end
+local format_Q=function()
+ n=n+1
+ return format("format('%%q',tostring(a%s))",n)
+end
+local format_i=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("format('%%%si',a%s)",f,n)
+ else
+ return format("format('%%i',a%s)",n)
+ end
+end
+local format_d=format_i
+local format_I=function(f)
+ n=n+1
+ return format("format('%%s%%%si',signed(a%s))",f,n)
+end
+local format_f=function(f)
+ n=n+1
+ return format("format('%%%sf',a%s)",f,n)
+end
+local format_F=function()
+ n=n+1
+ if not f or f=="" then
+ return format("(((a%s > -0.0000000005 and a%s < 0.0000000005) and '0') or format((a%s %% 1 == 0) and '%%i' or '%%.9f',a%s))",n,n,n,n)
+ else
+ return format("format((a%s %% 1 == 0) and '%%i' or '%%%sf',a%s)",n,f,n)
+ end
+end
+local format_g=function(f)
+ n=n+1
+ return format("format('%%%sg',a%s)",f,n)
+end
+local format_G=function(f)
+ n=n+1
+ return format("format('%%%sG',a%s)",f,n)
+end
+local format_e=function(f)
+ n=n+1
+ return format("format('%%%se',a%s)",f,n)
+end
+local format_E=function(f)
+ n=n+1
+ return format("format('%%%sE',a%s)",f,n)
+end
+local format_j=function(f)
+ n=n+1
+ return format("sparseexponent('%%%se',a%s)",f,n)
+end
+local format_J=function(f)
+ n=n+1
+ return format("sparseexponent('%%%sE',a%s)",f,n)
+end
+local format_x=function(f)
+ n=n+1
+ return format("format('%%%sx',a%s)",f,n)
+end
+local format_X=function(f)
+ n=n+1
+ return format("format('%%%sX',a%s)",f,n)
+end
+local format_o=function(f)
+ n=n+1
+ return format("format('%%%so',a%s)",f,n)
+end
+local format_c=function()
+ n=n+1
+ return format("utfchar(a%s)",n)
+end
+local format_C=function()
+ n=n+1
+ return format("tracedchar(a%s)",n)
+end
+local format_r=function(f)
+ n=n+1
+ return format("format('%%%s.0f',a%s)",f,n)
+end
+local format_h=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('0x%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_H=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('0x%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_u=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('u+%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_U=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('U+%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_p=function()
+ n=n+1
+ return format("points(a%s)",n)
+end
+local format_b=function()
+ n=n+1
+ return format("basepoints(a%s)",n)
+end
+local format_t=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("concat(a%s,%q)",n,f)
+ else
+ return format("concat(a%s)",n)
+ end
+end
+local format_T=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("sequenced(a%s,%q)",n,f)
+ else
+ return format("sequenced(a%s)",n)
+ end
+end
+local format_l=function()
+ n=n+1
+ return format("(a%s and 'true' or 'false')",n)
+end
+local format_L=function()
+ n=n+1
+ return format("(a%s and 'TRUE' or 'FALSE')",n)
+end
+local format_N=function()
+ n=n+1
+ return format("tostring(tonumber(a%s) or a%s)",n,n)
+end
+local format_a=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("autosingle(a%s,%q)",n,f)
+ else
+ return format("autosingle(a%s)",n)
+ end
+end
+local format_A=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("autodouble(a%s,%q)",n,f)
+ else
+ return format("autodouble(a%s)",n)
+ end
+end
+local format_w=function(f)
+ n=n+1
+ f=tonumber(f)
+ if f then
+ return format("nspaces[%s+a%s]",f,n)
+ else
+ return format("nspaces[a%s]",n)
+ end
+end
+local format_W=function(f)
+ return format("nspaces[%s]",tonumber(f) or 0)
+end
+local digit=patterns.digit
+local period=patterns.period
+local three=digit*digit*digit
+local splitter=Cs (
+ (((1-(three^1*period))^1+C(three))*(Carg(1)*three)^1+C((1-period)^1))*(P(1)/""*Carg(2))*C(2)
+)
+patterns.formattednumber=splitter
+function number.formatted(n,sep1,sep2)
+ local s=type(s)=="string" and n or format("%0.2f",n)
+ if sep1==true then
+ return lpegmatch(splitter,s,1,".",",")
+ elseif sep1=="." then
+ return lpegmatch(splitter,s,1,sep1,sep2 or ",")
+ elseif sep1=="," then
+ return lpegmatch(splitter,s,1,sep1,sep2 or ".")
+ else
+ return lpegmatch(splitter,s,1,sep1 or ",",sep2 or ".")
+ end
+end
+local format_m=function(f)
+ n=n+1
+ if not f or f=="" then
+ f=","
+ end
+ return format([[formattednumber(a%s,%q,".")]],n,f)
+end
+local format_M=function(f)
+ n=n+1
+ if not f or f=="" then
+ f="."
+ end
+ return format([[formattednumber(a%s,%q,",")]],n,f)
+end
+local format_z=function(f)
+ n=n+(tonumber(f) or 1)
+ return "''"
+end
+local format_rest=function(s)
+ return format("%q",s)
+end
+local format_extension=function(extensions,f,name)
+ local extension=extensions[name] or "tostring(%s)"
+ local f=tonumber(f) or 1
+ if f==0 then
+ return extension
+ elseif f==1 then
+ n=n+1
+ local a="a"..n
+ return format(extension,a,a)
+ elseif f<0 then
+ local a="a"..(n+f+1)
+ return format(extension,a,a)
+ else
+ local t={}
+ for i=1,f do
+ n=n+1
+ t[#t+1]="a"..n
+ end
+ return format(extension,unpack(t))
+ end
+end
+local builder=Cs { "start",
+ start=(
+ (
+ P("%")/""*(
+ V("!")
++V("s")+V("q")+V("i")+V("d")+V("f")+V("F")+V("g")+V("G")+V("e")+V("E")+V("x")+V("X")+V("o")
++V("c")+V("C")+V("S")
++V("Q")
++V("N")
++V("r")+V("h")+V("H")+V("u")+V("U")+V("p")+V("b")+V("t")+V("T")+V("l")+V("L")+V("I")+V("w")
++V("W")
++V("a")
++V("A")
++V("j")+V("J")
++V("m")+V("M")
++V("z")
++V("*")
+ )+V("*")
+ )*(P(-1)+Carg(1))
+ )^0,
+ ["s"]=(prefix_any*P("s"))/format_s,
+ ["q"]=(prefix_any*P("q"))/format_q,
+ ["i"]=(prefix_any*P("i"))/format_i,
+ ["d"]=(prefix_any*P("d"))/format_d,
+ ["f"]=(prefix_any*P("f"))/format_f,
+ ["F"]=(prefix_any*P("F"))/format_F,
+ ["g"]=(prefix_any*P("g"))/format_g,
+ ["G"]=(prefix_any*P("G"))/format_G,
+ ["e"]=(prefix_any*P("e"))/format_e,
+ ["E"]=(prefix_any*P("E"))/format_E,
+ ["x"]=(prefix_any*P("x"))/format_x,
+ ["X"]=(prefix_any*P("X"))/format_X,
+ ["o"]=(prefix_any*P("o"))/format_o,
+ ["S"]=(prefix_any*P("S"))/format_S,
+ ["Q"]=(prefix_any*P("Q"))/format_S,
+ ["N"]=(prefix_any*P("N"))/format_N,
+ ["c"]=(prefix_any*P("c"))/format_c,
+ ["C"]=(prefix_any*P("C"))/format_C,
+ ["r"]=(prefix_any*P("r"))/format_r,
+ ["h"]=(prefix_any*P("h"))/format_h,
+ ["H"]=(prefix_any*P("H"))/format_H,
+ ["u"]=(prefix_any*P("u"))/format_u,
+ ["U"]=(prefix_any*P("U"))/format_U,
+ ["p"]=(prefix_any*P("p"))/format_p,
+ ["b"]=(prefix_any*P("b"))/format_b,
+ ["t"]=(prefix_tab*P("t"))/format_t,
+ ["T"]=(prefix_tab*P("T"))/format_T,
+ ["l"]=(prefix_any*P("l"))/format_l,
+ ["L"]=(prefix_any*P("L"))/format_L,
+ ["I"]=(prefix_any*P("I"))/format_I,
+ ["w"]=(prefix_any*P("w"))/format_w,
+ ["W"]=(prefix_any*P("W"))/format_W,
+ ["j"]=(prefix_any*P("j"))/format_j,
+ ["J"]=(prefix_any*P("J"))/format_J,
+ ["m"]=(prefix_tab*P("m"))/format_m,
+ ["M"]=(prefix_tab*P("M"))/format_M,
+ ["z"]=(prefix_any*P("z"))/format_z,
+ ["a"]=(prefix_any*P("a"))/format_a,
+ ["A"]=(prefix_any*P("A"))/format_A,
+ ["*"]=Cs(((1-P("%"))^1+P("%%")/"%%")^1)/format_rest,
+ ["!"]=Carg(2)*prefix_any*P("!")*C((1-P("!"))^1)*P("!")/format_extension,
+}
+local direct=Cs (
+ P("%")*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*P(-1)/[[local format = string.format return function(str) return format("%0",str) end]]
+)
+local function make(t,str)
+ local f
+ local p
+ local p=lpegmatch(direct,str)
+ if p then
+ f=loadstripped(p)()
+ else
+ n=0
+ p=lpegmatch(builder,str,1,t._connector_,t._extensions_)
+ if n>0 then
+ p=format(template,preamble,t._preamble_,arguments[n],p)
+ f=loadstripped(p,t._environment_)()
+ else
+ f=function() return str end
+ end
+ end
+ t[str]=f
+ return f
+end
+local function use(t,fmt,...)
+ return t[fmt](...)
+end
+strings.formatters={}
+if _LUAVERSION<5.2 then
+ function strings.formatters.new(noconcat)
+ local t={ _type_="formatter",_connector_=noconcat and "," or "..",_extensions_={},_preamble_=preamble,_environment_={} }
+ setmetatable(t,{ __index=make,__call=use })
+ return t
+ end
+else
+ function strings.formatters.new(noconcat)
+ local e={}
+ for k,v in next,environment do
+ e[k]=v
+ end
+ local t={ _type_="formatter",_connector_=noconcat and "," or "..",_extensions_={},_preamble_="",_environment_=e }
+ setmetatable(t,{ __index=make,__call=use })
+ return t
+ end
+end
+local formatters=strings.formatters.new()
+string.formatters=formatters
+string.formatter=function(str,...) return formatters[str](...) end
+local function add(t,name,template,preamble)
+ if type(t)=="table" and t._type_=="formatter" then
+ t._extensions_[name]=template or "%s"
+ if type(preamble)=="string" then
+ t._preamble_=preamble.."\n"..t._preamble_
+ elseif type(preamble)=="table" then
+ for k,v in next,preamble do
+ t._environment_[k]=v
+ end
+ end
+ end
+end
+strings.formatters.add=add
+patterns.xmlescape=Cs((P("<")/"&lt;"+P(">")/"&gt;"+P("&")/"&amp;"+P('"')/"&quot;"+P(1))^0)
+patterns.texescape=Cs((C(S("#$%\\{}"))/"\\%1"+P(1))^0)
+patterns.luaescape=Cs(((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0)
+patterns.luaquoted=Cs(Cc('"')*((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0*Cc('"'))
+if _LUAVERSION<5.2 then
+ add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],"local xmlescape = lpeg.patterns.xmlescape")
+ add(formatters,"tex",[[lpegmatch(texescape,%s)]],"local texescape = lpeg.patterns.texescape")
+ add(formatters,"lua",[[lpegmatch(luaescape,%s)]],"local luaescape = lpeg.patterns.luaescape")
+else
+ add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],{ xmlescape=lpeg.patterns.xmlescape })
+ add(formatters,"tex",[[lpegmatch(texescape,%s)]],{ texescape=lpeg.patterns.texescape })
+ add(formatters,"lua",[[lpegmatch(luaescape,%s)]],{ luaescape=lpeg.patterns.luaescape })
+end
+local dquote=patterns.dquote
+local equote=patterns.escaped+dquote/'\\"'+1
+local space=patterns.space
+local cquote=Cc('"')
+local pattern=Cs(dquote*(equote-P(-2))^0*dquote)
++Cs(cquote*(equote-space)^0*space*equote^0*cquote)
+function string.optionalquoted(str)
+ return lpegmatch(pattern,str) or str
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luat-basics-gen']={
+ version=1.100,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local dummyfunction=function()
+end
+local dummyreporter=function(c)
+ return function(...)
+ (texio.reporter or texio.write_nl)(c.." : "..string.formatters(...))
+ end
+end
+statistics={
+ register=dummyfunction,
+ starttiming=dummyfunction,
+ stoptiming=dummyfunction,
+ elapsedtime=nil,
+}
+directives={
+ register=dummyfunction,
+ enable=dummyfunction,
+ disable=dummyfunction,
+}
+trackers={
+ register=dummyfunction,
+ enable=dummyfunction,
+ disable=dummyfunction,
+}
+experiments={
+ register=dummyfunction,
+ enable=dummyfunction,
+ disable=dummyfunction,
+}
+storage={
+ register=dummyfunction,
+ shared={},
+}
+logs={
+ new=dummyreporter,
+ reporter=dummyreporter,
+ messenger=dummyreporter,
+ report=dummyfunction,
+}
+callbacks={
+ register=function(n,f) return callback.register(n,f) end,
+}
+utilities={
+ storage={
+ allocate=function(t) return t or {} end,
+ mark=function(t) return t or {} end,
+ },
+}
+characters=characters or {
+ data={}
+}
+texconfig.kpse_init=true
+resolvers=resolvers or {}
+local remapper={
+ otf="opentype fonts",
+ ttf="truetype fonts",
+ ttc="truetype fonts",
+ dfont="truetype fonts",
+ cid="cid maps",
+ cidmap="cid maps",
+ fea="font feature files",
+ pfa="type1 fonts",
+ pfb="type1 fonts",
+ afm="afm",
+}
+function resolvers.findfile(name,fileformat)
+ name=string.gsub(name,"\\","/")
+ if not fileformat or fileformat=="" then
+ fileformat=file.suffix(name)
+ if fileformat=="" then
+ fileformat="tex"
+ end
+ end
+ fileformat=string.lower(fileformat)
+ fileformat=remapper[fileformat] or fileformat
+ local found=kpse.find_file(name,fileformat)
+ if not found or found=="" then
+ found=kpse.find_file(name,"other text files")
+ end
+ return found
+end
+resolvers.findbinfile=resolvers.findfile
+function resolvers.loadbinfile(filename,filetype)
+ local data=io.loaddata(filename)
+ return true,data,#data
+end
+function resolvers.resolve(s)
+ return s
+end
+function resolvers.unresolve(s)
+ return s
+end
+caches={}
+local writable=nil
+local readables={}
+local usingjit=jit
+if not caches.namespace or caches.namespace=="" or caches.namespace=="context" then
+ caches.namespace='generic'
+end
+do
+ local cachepaths=kpse.expand_var('$TEXMFCACHE') or ""
+ if cachepaths=="" or cachepaths=="$TEXMFCACHE" then
+ cachepaths=kpse.expand_var('$TEXMFVAR') or ""
+ end
+ if cachepaths=="" or cachepaths=="$TEXMFVAR" then
+ cachepaths=kpse.expand_var('$VARTEXMF') or ""
+ end
+ if cachepaths=="" then
+ local fallbacks={ "TMPDIR","TEMPDIR","TMP","TEMP","HOME","HOMEPATH" }
+ for i=1,#fallbacks do
+ cachepaths=os.getenv(fallbacks[i]) or ""
+ if cachepath~="" and lfs.isdir(cachepath) then
+ break
+ end
+ end
+ end
+ if cachepaths=="" then
+ cachepaths="."
+ end
+ cachepaths=string.split(cachepaths,os.type=="windows" and ";" or ":")
+ for i=1,#cachepaths do
+ local cachepath=cachepaths[i]
+ if not lfs.isdir(cachepath) then
+ lfs.mkdirs(cachepath)
+ if lfs.isdir(cachepath) then
+ texio.write(string.format("(created cache path: %s)",cachepath))
+ end
+ end
+ if file.is_writable(cachepath) then
+ writable=file.join(cachepath,"luatex-cache")
+ lfs.mkdir(writable)
+ writable=file.join(writable,caches.namespace)
+ lfs.mkdir(writable)
+ break
+ end
+ end
+ for i=1,#cachepaths do
+ if file.is_readable(cachepaths[i]) then
+ readables[#readables+1]=file.join(cachepaths[i],"luatex-cache",caches.namespace)
+ end
+ end
+ if not writable then
+ texio.write_nl("quiting: fix your writable cache path")
+ os.exit()
+ elseif #readables==0 then
+ texio.write_nl("quiting: fix your readable cache path")
+ os.exit()
+ elseif #readables==1 and readables[1]==writable then
+ texio.write(string.format("(using cache: %s)",writable))
+ else
+ texio.write(string.format("(using write cache: %s)",writable))
+ texio.write(string.format("(using read cache: %s)",table.concat(readables," ")))
+ end
+end
+function caches.getwritablepath(category,subcategory)
+ local path=file.join(writable,category)
+ lfs.mkdir(path)
+ path=file.join(path,subcategory)
+ lfs.mkdir(path)
+ return path
+end
+function caches.getreadablepaths(category,subcategory)
+ local t={}
+ for i=1,#readables do
+ t[i]=file.join(readables[i],category,subcategory)
+ end
+ return t
+end
+local function makefullname(path,name)
+ if path and path~="" then
+ return file.addsuffix(file.join(path,name),"lua"),file.addsuffix(file.join(path,name),usingjit and "lub" or "luc")
+ end
+end
+function caches.is_writable(path,name)
+ local fullname=makefullname(path,name)
+ return fullname and file.is_writable(fullname)
+end
+function caches.loaddata(paths,name)
+ for i=1,#paths do
+ local data=false
+ local luaname,lucname=makefullname(paths[i],name)
+ if lucname and not lfs.isfile(lucname) and type(caches.compile)=="function" then
+ texio.write(string.format("(compiling luc: %s)",lucname))
+ data=loadfile(luaname)
+ if data then
+ data=data()
+ end
+ if data then
+ caches.compile(data,luaname,lucname)
+ return data
+ end
+ end
+ if lucname and lfs.isfile(lucname) then
+ texio.write(string.format("(load luc: %s)",lucname))
+ data=loadfile(lucname)
+ if data then
+ data=data()
+ end
+ if data then
+ return data
+ else
+ texio.write(string.format("(loading failed: %s)",lucname))
+ end
+ end
+ if luaname and lfs.isfile(luaname) then
+ texio.write(string.format("(load lua: %s)",luaname))
+ data=loadfile(luaname)
+ if data then
+ data=data()
+ end
+ if data then
+ return data
+ end
+ end
+ end
+end
+function caches.savedata(path,name,data)
+ local luaname,lucname=makefullname(path,name)
+ if luaname then
+ texio.write(string.format("(save: %s)",luaname))
+ table.tofile(luaname,data,true)
+ if lucname and type(caches.compile)=="function" then
+ os.remove(lucname)
+ texio.write(string.format("(save: %s)",lucname))
+ caches.compile(data,luaname,lucname)
+ end
+ end
+end
+function caches.compile(data,luaname,lucname)
+ local d=io.loaddata(luaname)
+ if not d or d=="" then
+ d=table.serialize(data,true)
+ end
+ if d and d~="" then
+ local f=io.open(lucname,'wb')
+ if f then
+ local s=loadstring(d)
+ if s then
+ f:write(string.dump(s,true))
+ end
+ f:close()
+ end
+ end
+end
+function table.setmetatableindex(t,f)
+ setmetatable(t,{ __index=f })
+end
+arguments={}
+if arg then
+ for i=1,#arg do
+ local k,v=string.match(arg[i],"^%-%-([^=]+)=?(.-)$")
+ if k and v then
+ arguments[k]=v
+ end
+ end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['data-con']={
+ version=1.100,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local format,lower,gsub=string.format,string.lower,string.gsub
+local trace_cache=false trackers.register("resolvers.cache",function(v) trace_cache=v end)
+local trace_containers=false trackers.register("resolvers.containers",function(v) trace_containers=v end)
+local trace_storage=false trackers.register("resolvers.storage",function(v) trace_storage=v end)
+containers=containers or {}
+local containers=containers
+containers.usecache=true
+local report_containers=logs.reporter("resolvers","containers")
+local allocated={}
+local mt={
+ __index=function(t,k)
+ if k=="writable" then
+ local writable=caches.getwritablepath(t.category,t.subcategory) or { "." }
+ t.writable=writable
+ return writable
+ elseif k=="readables" then
+ local readables=caches.getreadablepaths(t.category,t.subcategory) or { "." }
+ t.readables=readables
+ return readables
+ end
+ end,
+ __storage__=true
+}
+function containers.define(category,subcategory,version,enabled)
+ if category and subcategory then
+ local c=allocated[category]
+ if not c then
+ c={}
+ allocated[category]=c
+ end
+ local s=c[subcategory]
+ if not s then
+ s={
+ category=category,
+ subcategory=subcategory,
+ storage={},
+ enabled=enabled,
+ version=version or math.pi,
+ trace=false,
+ }
+ setmetatable(s,mt)
+ c[subcategory]=s
+ end
+ return s
+ end
+end
+function containers.is_usable(container,name)
+ return container.enabled and caches and caches.is_writable(container.writable,name)
+end
+function containers.is_valid(container,name)
+ if name and name~="" then
+ local storage=container.storage[name]
+ return storage and storage.cache_version==container.version
+ else
+ return false
+ end
+end
+function containers.read(container,name)
+ local storage=container.storage
+ local stored=storage[name]
+ if not stored and container.enabled and caches and containers.usecache then
+ stored=caches.loaddata(container.readables,name)
+ if stored and stored.cache_version==container.version then
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","load",container.subcategory,name)
+ end
+ else
+ stored=nil
+ end
+ storage[name]=stored
+ elseif stored then
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","reuse",container.subcategory,name)
+ end
+ end
+ return stored
+end
+function containers.write(container,name,data)
+ if data then
+ data.cache_version=container.version
+ if container.enabled and caches then
+ local unique,shared=data.unique,data.shared
+ data.unique,data.shared=nil,nil
+ caches.savedata(container.writable,name,data)
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","save",container.subcategory,name)
+ end
+ data.unique,data.shared=unique,shared
+ end
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","store",container.subcategory,name)
+ end
+ container.storage[name]=data
+ end
+ return data
+end
+function containers.content(container,name)
+ return container.storage[name]
+end
+function containers.cleanname(name)
+ return (gsub(lower(name),"[^%w\128-\255]+","-"))
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-fonts-nod']={
+ version=1.001,
+ comment="companion to luatex-fonts.lua",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+if tex.attribute[0]~=0 then
+ texio.write_nl("log","!")
+ texio.write_nl("log","! Attribute 0 is reserved for ConTeXt's font feature management and has to be")
+ texio.write_nl("log","! set to zero. Also, some attributes in the range 1-255 are used for special")
+ texio.write_nl("log","! purposes so setting them at the TeX end might break the font handler.")
+ texio.write_nl("log","!")
+ tex.attribute[0]=0
+end
+attributes=attributes or {}
+attributes.unsetvalue=-0x7FFFFFFF
+local numbers,last={},127
+attributes.private=attributes.private or function(name)
+ local number=numbers[name]
+ if not number then
+ if last<255 then
+ last=last+1
+ end
+ number=last
+ numbers[name]=number
+ end
+ return number
+end
+nodes={}
+nodes.pool={}
+nodes.handlers={}
+local nodecodes={} for k,v in next,node.types () do nodecodes[string.gsub(v,"_","")]=k end
+local whatcodes={} for k,v in next,node.whatsits() do whatcodes[string.gsub(v,"_","")]=k end
+local glyphcodes={ [0]="character","glyph","ligature","ghost","left","right" }
+local disccodes={ [0]="discretionary","explicit","automatic","regular","first","second" }
+nodes.nodecodes=nodecodes
+nodes.whatcodes=whatcodes
+nodes.whatsitcodes=whatcodes
+nodes.glyphcodes=glyphcodes
+nodes.disccodes=disccodes
+local free_node=node.free
+local remove_node=node.remove
+local new_node=node.new
+local traverse_id=node.traverse_id
+nodes.handlers.protectglyphs=node.protect_glyphs
+nodes.handlers.unprotectglyphs=node.unprotect_glyphs
+local math_code=nodecodes.math
+local end_of_math=node.end_of_math
+function node.end_of_math(n)
+ if n.id==math_code and n.subtype==1 then
+ return n
+ else
+ return end_of_math(n)
+ end
+end
+function nodes.remove(head,current,free_too)
+ local t=current
+ head,current=remove_node(head,current)
+ if t then
+ if free_too then
+ free_node(t)
+ t=nil
+ else
+ t.next,t.prev=nil,nil
+ end
+ end
+ return head,current,t
+end
+function nodes.delete(head,current)
+ return nodes.remove(head,current,true)
+end
+function nodes.pool.kern(k)
+ local n=new_node("kern",1)
+ n.kern=k
+ return n
+end
+local getfield=node.getfield or function(n,tag) return n[tag] end
+local setfield=node.setfield or function(n,tag,value) n[tag]=value end
+nodes.getfield=getfield
+nodes.setfield=setfield
+nodes.getattr=getfield
+nodes.setattr=setfield
+if node.getid then nodes.getid=node.getid else function nodes.getid (n) return getfield(n,"id") end end
+if node.getsubtype then nodes.getsubtype=node.getsubtype else function nodes.getsubtype(n) return getfield(n,"subtype") end end
+if node.getnext then nodes.getnext=node.getnext else function nodes.getnext (n) return getfield(n,"next") end end
+if node.getprev then nodes.getprev=node.getprev else function nodes.getprev (n) return getfield(n,"prev") end end
+if node.getchar then nodes.getchar=node.getchar else function nodes.getchar (n) return getfield(n,"char") end end
+if node.getfont then nodes.getfont=node.getfont else function nodes.getfont (n) return getfield(n,"font") end end
+if node.getlist then nodes.getlist=node.getlist else function nodes.getlist (n) return getfield(n,"list") end end
+function nodes.tonut (n) return n end
+function nodes.tonode(n) return n end
+nodes.tostring=node.tostring or tostring
+nodes.copy=node.copy
+nodes.copy_list=node.copy_list
+nodes.delete=node.delete
+nodes.dimensions=node.dimensions
+nodes.end_of_math=node.end_of_math
+nodes.flush_list=node.flush_list
+nodes.flush_node=node.flush_node
+nodes.free=node.free
+nodes.insert_after=node.insert_after
+nodes.insert_before=node.insert_before
+nodes.hpack=node.hpack
+nodes.new=node.new
+nodes.tail=node.tail
+nodes.traverse=node.traverse
+nodes.traverse_id=node.traverse_id
+nodes.slide=node.slide
+nodes.vpack=node.vpack
+nodes.first_glyph=node.first_glyph
+nodes.first_character=node.first_character
+nodes.has_glyph=node.has_glyph or node.first_glyph
+nodes.current_attr=node.current_attr
+nodes.do_ligature_n=node.do_ligature_n
+nodes.has_field=node.has_field
+nodes.last_node=node.last_node
+nodes.usedlist=node.usedlist
+nodes.protrusion_skippable=node.protrusion_skippable
+nodes.write=node.write
+nodes.has_attribute=node.has_attribute
+nodes.set_attribute=node.set_attribute
+nodes.unset_attribute=node.unset_attribute
+nodes.protect_glyphs=node.protect_glyphs
+nodes.unprotect_glyphs=node.unprotect_glyphs
+nodes.kerning=node.kerning
+nodes.ligaturing=node.ligaturing
+nodes.mlist_to_hlist=node.mlist_to_hlist
+nodes.nuts=nodes
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-ini']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local allocate=utilities.storage.allocate
+local report_defining=logs.reporter("fonts","defining")
+fonts=fonts or {}
+local fonts=fonts
+fonts.hashes={ identifiers=allocate() }
+fonts.tables=fonts.tables or {}
+fonts.helpers=fonts.helpers or {}
+fonts.tracers=fonts.tracers or {}
+fonts.specifiers=fonts.specifiers or {}
+fonts.analyzers={}
+fonts.readers={}
+fonts.definers={ methods={} }
+fonts.loggers={ register=function() end }
+fontloader.totable=fontloader.to_table
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-con']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local next,tostring,rawget=next,tostring,rawget
+local format,match,lower,gsub=string.format,string.match,string.lower,string.gsub
+local utfbyte=utf.byte
+local sort,insert,concat,sortedkeys,serialize,fastcopy=table.sort,table.insert,table.concat,table.sortedkeys,table.serialize,table.fastcopy
+local derivetable=table.derive
+local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end)
+local trace_scaling=false trackers.register("fonts.scaling",function(v) trace_scaling=v end)
+local report_defining=logs.reporter("fonts","defining")
+local fonts=fonts
+local constructors=fonts.constructors or {}
+fonts.constructors=constructors
+local handlers=fonts.handlers or {}
+fonts.handlers=handlers
+local allocate=utilities.storage.allocate
+local setmetatableindex=table.setmetatableindex
+constructors.dontembed=allocate()
+constructors.autocleanup=true
+constructors.namemode="fullpath"
+constructors.version=1.01
+constructors.cache=containers.define("fonts","constructors",constructors.version,false)
+constructors.privateoffset=0xF0000
+constructors.keys={
+ properties={
+ encodingbytes="number",
+ embedding="number",
+ cidinfo={},
+ format="string",
+ fontname="string",
+ fullname="string",
+ filename="filename",
+ psname="string",
+ name="string",
+ virtualized="boolean",
+ hasitalics="boolean",
+ autoitalicamount="basepoints",
+ nostackmath="boolean",
+ noglyphnames="boolean",
+ mode="string",
+ hasmath="boolean",
+ mathitalics="boolean",
+ textitalics="boolean",
+ finalized="boolean",
+ },
+ parameters={
+ mathsize="number",
+ scriptpercentage="float",
+ scriptscriptpercentage="float",
+ units="cardinal",
+ designsize="scaledpoints",
+ expansion={
+ stretch="integerscale",
+ shrink="integerscale",
+ step="integerscale",
+ auto="boolean",
+ },
+ protrusion={
+ auto="boolean",
+ },
+ slantfactor="float",
+ extendfactor="float",
+ factor="float",
+ hfactor="float",
+ vfactor="float",
+ size="scaledpoints",
+ units="scaledpoints",
+ scaledpoints="scaledpoints",
+ slantperpoint="scaledpoints",
+ spacing={
+ width="scaledpoints",
+ stretch="scaledpoints",
+ shrink="scaledpoints",
+ extra="scaledpoints",
+ },
+ xheight="scaledpoints",
+ quad="scaledpoints",
+ ascender="scaledpoints",
+ descender="scaledpoints",
+ synonyms={
+ space="spacing.width",
+ spacestretch="spacing.stretch",
+ spaceshrink="spacing.shrink",
+ extraspace="spacing.extra",
+ x_height="xheight",
+ space_stretch="spacing.stretch",
+ space_shrink="spacing.shrink",
+ extra_space="spacing.extra",
+ em="quad",
+ ex="xheight",
+ slant="slantperpoint",
+ },
+ },
+ description={
+ width="basepoints",
+ height="basepoints",
+ depth="basepoints",
+ boundingbox={},
+ },
+ character={
+ width="scaledpoints",
+ height="scaledpoints",
+ depth="scaledpoints",
+ italic="scaledpoints",
+ },
+}
+local designsizes=allocate()
+constructors.designsizes=designsizes
+local loadedfonts=allocate()
+constructors.loadedfonts=loadedfonts
+local factors={
+ pt=65536.0,
+ bp=65781.8,
+}
+function constructors.setfactor(f)
+ constructors.factor=factors[f or 'pt'] or factors.pt
+end
+constructors.setfactor()
+function constructors.scaled(scaledpoints,designsize)
+ if scaledpoints<0 then
+ if designsize then
+ local factor=constructors.factor
+ if designsize>factor then
+ return (- scaledpoints/1000)*designsize
+ else
+ return (- scaledpoints/1000)*designsize*factor
+ end
+ else
+ return (- scaledpoints/1000)*10*factor
+ end
+ else
+ return scaledpoints
+ end
+end
+function constructors.cleanuptable(tfmdata)
+ if constructors.autocleanup and tfmdata.properties.virtualized then
+ for k,v in next,tfmdata.characters do
+ if v.commands then v.commands=nil end
+ end
+ end
+end
+function constructors.calculatescale(tfmdata,scaledpoints)
+ local parameters=tfmdata.parameters
+ if scaledpoints<0 then
+ scaledpoints=(- scaledpoints/1000)*(tfmdata.designsize or parameters.designsize)
+ end
+ return scaledpoints,scaledpoints/(parameters.units or 1000)
+end
+local unscaled={
+ ScriptPercentScaleDown=true,
+ ScriptScriptPercentScaleDown=true,
+ RadicalDegreeBottomRaisePercent=true
+}
+function constructors.assignmathparameters(target,original)
+ local mathparameters=original.mathparameters
+ if mathparameters and next(mathparameters) then
+ local targetparameters=target.parameters
+ local targetproperties=target.properties
+ local targetmathparameters={}
+ local factor=targetproperties.math_is_scaled and 1 or targetparameters.factor
+ for name,value in next,mathparameters do
+ if unscaled[name] then
+ targetmathparameters[name]=value
+ else
+ targetmathparameters[name]=value*factor
+ end
+ end
+ if not targetmathparameters.FractionDelimiterSize then
+ targetmathparameters.FractionDelimiterSize=1.01*targetparameters.size
+ end
+ if not mathparameters.FractionDelimiterDisplayStyleSize then
+ targetmathparameters.FractionDelimiterDisplayStyleSize=2.40*targetparameters.size
+ end
+ target.mathparameters=targetmathparameters
+ end
+end
+function constructors.beforecopyingcharacters(target,original)
+end
+function constructors.aftercopyingcharacters(target,original)
+end
+constructors.sharefonts=false
+constructors.nofsharedfonts=0
+local sharednames={}
+function constructors.trytosharefont(target,tfmdata)
+ if constructors.sharefonts then
+ local characters=target.characters
+ local n=1
+ local t={ target.psname }
+ local u=sortedkeys(characters)
+ for i=1,#u do
+ local k=u[i]
+ n=n+1;t[n]=k
+ n=n+1;t[n]=characters[k].index or k
+ end
+ local h=md5.HEX(concat(t," "))
+ local s=sharednames[h]
+ if s then
+ if trace_defining then
+ report_defining("font %a uses backend resources of font %a",target.fullname,s)
+ end
+ target.fullname=s
+ constructors.nofsharedfonts=constructors.nofsharedfonts+1
+ target.properties.sharedwith=s
+ else
+ sharednames[h]=target.fullname
+ end
+ end
+end
+function constructors.enhanceparameters(parameters)
+ local xheight=parameters.x_height
+ local quad=parameters.quad
+ local space=parameters.space
+ local stretch=parameters.space_stretch
+ local shrink=parameters.space_shrink
+ local extra=parameters.extra_space
+ local slant=parameters.slant
+ parameters.xheight=xheight
+ parameters.spacestretch=stretch
+ parameters.spaceshrink=shrink
+ parameters.extraspace=extra
+ parameters.em=quad
+ parameters.ex=xheight
+ parameters.slantperpoint=slant
+ parameters.spacing={
+ width=space,
+ stretch=stretch,
+ shrink=shrink,
+ extra=extra,
+ }
+end
+function constructors.scale(tfmdata,specification)
+ local target={}
+ if tonumber(specification) then
+ specification={ size=specification }
+ end
+ target.specification=specification
+ local scaledpoints=specification.size
+ local relativeid=specification.relativeid
+ local properties=tfmdata.properties or {}
+ local goodies=tfmdata.goodies or {}
+ local resources=tfmdata.resources or {}
+ local descriptions=tfmdata.descriptions or {}
+ local characters=tfmdata.characters or {}
+ local changed=tfmdata.changed or {}
+ local shared=tfmdata.shared or {}
+ local parameters=tfmdata.parameters or {}
+ local mathparameters=tfmdata.mathparameters or {}
+ local targetcharacters={}
+ local targetdescriptions=derivetable(descriptions)
+ local targetparameters=derivetable(parameters)
+ local targetproperties=derivetable(properties)
+ local targetgoodies=goodies
+ target.characters=targetcharacters
+ target.descriptions=targetdescriptions
+ target.parameters=targetparameters
+ target.properties=targetproperties
+ target.goodies=targetgoodies
+ target.shared=shared
+ target.resources=resources
+ target.unscaled=tfmdata
+ local mathsize=tonumber(specification.mathsize) or 0
+ local textsize=tonumber(specification.textsize) or scaledpoints
+ local forcedsize=tonumber(parameters.mathsize ) or 0
+ local extrafactor=tonumber(specification.factor ) or 1
+ if (mathsize==2 or forcedsize==2) and parameters.scriptpercentage then
+ scaledpoints=parameters.scriptpercentage*textsize/100
+ elseif (mathsize==3 or forcedsize==3) and parameters.scriptscriptpercentage then
+ scaledpoints=parameters.scriptscriptpercentage*textsize/100
+ elseif forcedsize>1000 then
+ scaledpoints=forcedsize
+ end
+ targetparameters.mathsize=mathsize
+ targetparameters.textsize=textsize
+ targetparameters.forcedsize=forcedsize
+ targetparameters.extrafactor=extrafactor
+ local tounicode=resources.tounicode
+ local defaultwidth=resources.defaultwidth or 0
+ local defaultheight=resources.defaultheight or 0
+ local defaultdepth=resources.defaultdepth or 0
+ local units=parameters.units or 1000
+ if target.fonts then
+ target.fonts=fastcopy(target.fonts)
+ end
+ targetproperties.language=properties.language or "dflt"
+ targetproperties.script=properties.script or "dflt"
+ targetproperties.mode=properties.mode or "base"
+ local askedscaledpoints=scaledpoints
+ local scaledpoints,delta=constructors.calculatescale(tfmdata,scaledpoints,nil,specification)
+ local hdelta=delta
+ local vdelta=delta
+ target.designsize=parameters.designsize
+ target.units_per_em=units
+ local direction=properties.direction or tfmdata.direction or 0
+ target.direction=direction
+ properties.direction=direction
+ target.size=scaledpoints
+ target.encodingbytes=properties.encodingbytes or 1
+ target.embedding=properties.embedding or "subset"
+ target.tounicode=1
+ target.cidinfo=properties.cidinfo
+ target.format=properties.format
+ local fontname=properties.fontname or tfmdata.fontname
+ local fullname=properties.fullname or tfmdata.fullname
+ local filename=properties.filename or tfmdata.filename
+ local psname=properties.psname or tfmdata.psname
+ local name=properties.name or tfmdata.name
+ if not psname or psname=="" then
+ psname=fontname or (fullname and fonts.names.cleanname(fullname))
+ end
+ target.fontname=fontname
+ target.fullname=fullname
+ target.filename=filename
+ target.psname=psname
+ target.name=name
+ properties.fontname=fontname
+ properties.fullname=fullname
+ properties.filename=filename
+ properties.psname=psname
+ properties.name=name
+ local expansion=parameters.expansion
+ if expansion then
+ target.stretch=expansion.stretch
+ target.shrink=expansion.shrink
+ target.step=expansion.step
+ target.auto_expand=expansion.auto
+ end
+ local protrusion=parameters.protrusion
+ if protrusion then
+ target.auto_protrude=protrusion.auto
+ end
+ local extendfactor=parameters.extendfactor or 0
+ if extendfactor~=0 and extendfactor~=1 then
+ hdelta=hdelta*extendfactor
+ target.extend=extendfactor*1000
+ else
+ target.extend=1000
+ end
+ local slantfactor=parameters.slantfactor or 0
+ if slantfactor~=0 then
+ target.slant=slantfactor*1000
+ else
+ target.slant=0
+ end
+ targetparameters.factor=delta
+ targetparameters.hfactor=hdelta
+ targetparameters.vfactor=vdelta
+ targetparameters.size=scaledpoints
+ targetparameters.units=units
+ targetparameters.scaledpoints=askedscaledpoints
+ local isvirtual=properties.virtualized or tfmdata.type=="virtual"
+ local hasquality=target.auto_expand or target.auto_protrude
+ local hasitalics=properties.hasitalics
+ local autoitalicamount=properties.autoitalicamount
+ local stackmath=not properties.nostackmath
+ local nonames=properties.noglyphnames
+ local nodemode=properties.mode=="node"
+ if changed and not next(changed) then
+ changed=false
+ end
+ target.type=isvirtual and "virtual" or "real"
+ target.postprocessors=tfmdata.postprocessors
+ local targetslant=(parameters.slant or parameters[1] or 0)*factors.pt
+ local targetspace=(parameters.space or parameters[2] or 0)*hdelta
+ local targetspace_stretch=(parameters.space_stretch or parameters[3] or 0)*hdelta
+ local targetspace_shrink=(parameters.space_shrink or parameters[4] or 0)*hdelta
+ local targetx_height=(parameters.x_height or parameters[5] or 0)*vdelta
+ local targetquad=(parameters.quad or parameters[6] or 0)*hdelta
+ local targetextra_space=(parameters.extra_space or parameters[7] or 0)*hdelta
+ targetparameters.slant=targetslant
+ targetparameters.space=targetspace
+ targetparameters.space_stretch=targetspace_stretch
+ targetparameters.space_shrink=targetspace_shrink
+ targetparameters.x_height=targetx_height
+ targetparameters.quad=targetquad
+ targetparameters.extra_space=targetextra_space
+ local ascender=parameters.ascender
+ if ascender then
+ targetparameters.ascender=delta*ascender
+ end
+ local descender=parameters.descender
+ if descender then
+ targetparameters.descender=delta*descender
+ end
+ constructors.enhanceparameters(targetparameters)
+ local protrusionfactor=(targetquad~=0 and 1000/targetquad) or 0
+ local scaledwidth=defaultwidth*hdelta
+ local scaledheight=defaultheight*vdelta
+ local scaleddepth=defaultdepth*vdelta
+ local hasmath=(properties.hasmath or next(mathparameters)) and true
+ if hasmath then
+ constructors.assignmathparameters(target,tfmdata)
+ properties.hasmath=true
+ target.nomath=false
+ target.MathConstants=target.mathparameters
+ else
+ properties.hasmath=false
+ target.nomath=true
+ target.mathparameters=nil
+ end
+ local italickey="italic"
+ local useitalics=true
+ if hasmath then
+ autoitalicamount=false
+ elseif properties.textitalics then
+ italickey="italic_correction"
+ useitalics=false
+ if properties.delaytextitalics then
+ autoitalicamount=false
+ end
+ end
+ if trace_defining then
+ report_defining("defining tfm, name %a, fullname %a, filename %a, hscale %a, vscale %a, math %a, italics %a",
+ name,fullname,filename,hdelta,vdelta,
+ hasmath and "enabled" or "disabled",useitalics and "enabled" or "disabled")
+ end
+ constructors.beforecopyingcharacters(target,tfmdata)
+ local sharedkerns={}
+ for unicode,character in next,characters do
+ local chr,description,index,touni
+ if changed then
+ local c=changed[unicode]
+ if c then
+ local ligatures=character.ligatures
+ description=descriptions[c] or descriptions[unicode] or character
+ character=characters[c] or character
+ index=description.index or c
+ if tounicode then
+ touni=tounicode[index]
+ if not touni then
+ local d=descriptions[unicode] or characters[unicode]
+ local i=d.index or unicode
+ touni=tounicode[i]
+ end
+ end
+ if ligatures and not character.ligatures then
+ character.ligatures=ligatures
+ end
+ else
+ description=descriptions[unicode] or character
+ index=description.index or unicode
+ if tounicode then
+ touni=tounicode[index]
+ end
+ end
+ else
+ description=descriptions[unicode] or character
+ index=description.index or unicode
+ if tounicode then
+ touni=tounicode[index]
+ end
+ end
+ local width=description.width
+ local height=description.height
+ local depth=description.depth
+ if width then width=hdelta*width else width=scaledwidth end
+ if height then height=vdelta*height else height=scaledheight end
+ if depth and depth~=0 then
+ depth=delta*depth
+ if nonames then
+ chr={
+ index=index,
+ height=height,
+ depth=depth,
+ width=width,
+ }
+ else
+ chr={
+ name=description.name,
+ index=index,
+ height=height,
+ depth=depth,
+ width=width,
+ }
+ end
+ else
+ if nonames then
+ chr={
+ index=index,
+ height=height,
+ width=width,
+ }
+ else
+ chr={
+ name=description.name,
+ index=index,
+ height=height,
+ width=width,
+ }
+ end
+ end
+ if touni then
+ chr.tounicode=touni
+ end
+ if hasquality then
+ local ve=character.expansion_factor
+ if ve then
+ chr.expansion_factor=ve*1000
+ end
+ local vl=character.left_protruding
+ if vl then
+ chr.left_protruding=protrusionfactor*width*vl
+ end
+ local vr=character.right_protruding
+ if vr then
+ chr.right_protruding=protrusionfactor*width*vr
+ end
+ end
+ if autoitalicamount then
+ local vi=description.italic
+ if not vi then
+ local vi=description.boundingbox[3]-description.width+autoitalicamount
+ if vi>0 then
+ chr[italickey]=vi*hdelta
+ end
+ elseif vi~=0 then
+ chr[italickey]=vi*hdelta
+ end
+ elseif hasitalics then
+ local vi=description.italic
+ if vi and vi~=0 then
+ chr[italickey]=vi*hdelta
+ end
+ end
+ if hasmath then
+ local vn=character.next
+ if vn then
+ chr.next=vn
+ else
+ local vv=character.vert_variants
+ if vv then
+ local t={}
+ for i=1,#vv do
+ local vvi=vv[i]
+ t[i]={
+ ["start"]=(vvi["start"] or 0)*vdelta,
+ ["end"]=(vvi["end"] or 0)*vdelta,
+ ["advance"]=(vvi["advance"] or 0)*vdelta,
+ ["extender"]=vvi["extender"],
+ ["glyph"]=vvi["glyph"],
+ }
+ end
+ chr.vert_variants=t
+ else
+ local hv=character.horiz_variants
+ if hv then
+ local t={}
+ for i=1,#hv do
+ local hvi=hv[i]
+ t[i]={
+ ["start"]=(hvi["start"] or 0)*hdelta,
+ ["end"]=(hvi["end"] or 0)*hdelta,
+ ["advance"]=(hvi["advance"] or 0)*hdelta,
+ ["extender"]=hvi["extender"],
+ ["glyph"]=hvi["glyph"],
+ }
+ end
+ chr.horiz_variants=t
+ end
+ end
+ end
+ local va=character.top_accent
+ if va then
+ chr.top_accent=vdelta*va
+ end
+ if stackmath then
+ local mk=character.mathkerns
+ if mk then
+ local kerns={}
+ local v=mk.top_right if v then local k={} for i=1,#v do local vi=v[i]
+ k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern }
+ end kerns.top_right=k end
+ local v=mk.top_left if v then local k={} for i=1,#v do local vi=v[i]
+ k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern }
+ end kerns.top_left=k end
+ local v=mk.bottom_left if v then local k={} for i=1,#v do local vi=v[i]
+ k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern }
+ end kerns.bottom_left=k end
+ local v=mk.bottom_right if v then local k={} for i=1,#v do local vi=v[i]
+ k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern }
+ end kerns.bottom_right=k end
+ chr.mathkern=kerns
+ end
+ end
+ end
+ if not nodemode then
+ local vk=character.kerns
+ if vk then
+ local s=sharedkerns[vk]
+ if not s then
+ s={}
+ for k,v in next,vk do s[k]=v*hdelta end
+ sharedkerns[vk]=s
+ end
+ chr.kerns=s
+ end
+ local vl=character.ligatures
+ if vl then
+ if true then
+ chr.ligatures=vl
+ else
+ local tt={}
+ for i,l in next,vl do
+ tt[i]=l
+ end
+ chr.ligatures=tt
+ end
+ end
+ end
+ if isvirtual then
+ local vc=character.commands
+ if vc then
+ local ok=false
+ for i=1,#vc do
+ local key=vc[i][1]
+ if key=="right" or key=="down" then
+ ok=true
+ break
+ end
+ end
+ if ok then
+ local tt={}
+ for i=1,#vc do
+ local ivc=vc[i]
+ local key=ivc[1]
+ if key=="right" then
+ tt[i]={ key,ivc[2]*hdelta }
+ elseif key=="down" then
+ tt[i]={ key,ivc[2]*vdelta }
+ elseif key=="rule" then
+ tt[i]={ key,ivc[2]*vdelta,ivc[3]*hdelta }
+ else
+ tt[i]=ivc
+ end
+ end
+ chr.commands=tt
+ else
+ chr.commands=vc
+ end
+ chr.index=nil
+ end
+ end
+ targetcharacters[unicode]=chr
+ end
+ constructors.aftercopyingcharacters(target,tfmdata)
+ constructors.trytosharefont(target,tfmdata)
+ return target
+end
+function constructors.finalize(tfmdata)
+ if tfmdata.properties and tfmdata.properties.finalized then
+ return
+ end
+ if not tfmdata.characters then
+ return nil
+ end
+ if not tfmdata.goodies then
+ tfmdata.goodies={}
+ end
+ local parameters=tfmdata.parameters
+ if not parameters then
+ return nil
+ end
+ if not parameters.expansion then
+ parameters.expansion={
+ stretch=tfmdata.stretch or 0,
+ shrink=tfmdata.shrink or 0,
+ step=tfmdata.step or 0,
+ auto=tfmdata.auto_expand or false,
+ }
+ end
+ if not parameters.protrusion then
+ parameters.protrusion={
+ auto=auto_protrude
+ }
+ end
+ if not parameters.size then
+ parameters.size=tfmdata.size
+ end
+ if not parameters.extendfactor then
+ parameters.extendfactor=tfmdata.extend or 0
+ end
+ if not parameters.slantfactor then
+ parameters.slantfactor=tfmdata.slant or 0
+ end
+ if not parameters.designsize then
+ parameters.designsize=tfmdata.designsize or (factors.pt*10)
+ end
+ if not parameters.units then
+ parameters.units=tfmdata.units_per_em or 1000
+ end
+ if not tfmdata.descriptions then
+ local descriptions={}
+ setmetatableindex(descriptions,function(t,k) local v={} t[k]=v return v end)
+ tfmdata.descriptions=descriptions
+ end
+ local properties=tfmdata.properties
+ if not properties then
+ properties={}
+ tfmdata.properties=properties
+ end
+ if not properties.virtualized then
+ properties.virtualized=tfmdata.type=="virtual"
+ end
+ if not tfmdata.properties then
+ tfmdata.properties={
+ fontname=tfmdata.fontname,
+ filename=tfmdata.filename,
+ fullname=tfmdata.fullname,
+ name=tfmdata.name,
+ psname=tfmdata.psname,
+ encodingbytes=tfmdata.encodingbytes or 1,
+ embedding=tfmdata.embedding or "subset",
+ tounicode=tfmdata.tounicode or 1,
+ cidinfo=tfmdata.cidinfo or nil,
+ format=tfmdata.format or "type1",
+ direction=tfmdata.direction or 0,
+ }
+ end
+ if not tfmdata.resources then
+ tfmdata.resources={}
+ end
+ if not tfmdata.shared then
+ tfmdata.shared={}
+ end
+ if not properties.hasmath then
+ properties.hasmath=not tfmdata.nomath
+ end
+ tfmdata.MathConstants=nil
+ tfmdata.postprocessors=nil
+ tfmdata.fontname=nil
+ tfmdata.filename=nil
+ tfmdata.fullname=nil
+ tfmdata.name=nil
+ tfmdata.psname=nil
+ tfmdata.encodingbytes=nil
+ tfmdata.embedding=nil
+ tfmdata.tounicode=nil
+ tfmdata.cidinfo=nil
+ tfmdata.format=nil
+ tfmdata.direction=nil
+ tfmdata.type=nil
+ tfmdata.nomath=nil
+ tfmdata.designsize=nil
+ tfmdata.size=nil
+ tfmdata.stretch=nil
+ tfmdata.shrink=nil
+ tfmdata.step=nil
+ tfmdata.auto_expand=nil
+ tfmdata.auto_protrude=nil
+ tfmdata.extend=nil
+ tfmdata.slant=nil
+ tfmdata.units_per_em=nil
+ properties.finalized=true
+ return tfmdata
+end
+local hashmethods={}
+constructors.hashmethods=hashmethods
+function constructors.hashfeatures(specification)
+ local features=specification.features
+ if features then
+ local t,tn={},0
+ for category,list in next,features do
+ if next(list) then
+ local hasher=hashmethods[category]
+ if hasher then
+ local hash=hasher(list)
+ if hash then
+ tn=tn+1
+ t[tn]=category..":"..hash
+ end
+ end
+ end
+ end
+ if tn>0 then
+ return concat(t," & ")
+ end
+ end
+ return "unknown"
+end
+hashmethods.normal=function(list)
+ local s={}
+ local n=0
+ for k,v in next,list do
+ if not k then
+ elseif k=="number" or k=="features" then
+ else
+ n=n+1
+ s[n]=k
+ end
+ end
+ if n>0 then
+ sort(s)
+ for i=1,n do
+ local k=s[i]
+ s[i]=k..'='..tostring(list[k])
+ end
+ return concat(s,"+")
+ end
+end
+function constructors.hashinstance(specification,force)
+ local hash,size,fallbacks=specification.hash,specification.size,specification.fallbacks
+ if force or not hash then
+ hash=constructors.hashfeatures(specification)
+ specification.hash=hash
+ end
+ if size<1000 and designsizes[hash] then
+ size=math.round(constructors.scaled(size,designsizes[hash]))
+ specification.size=size
+ end
+ if fallbacks then
+ return hash..' @ '..tostring(size)..' @ '..fallbacks
+ else
+ return hash..' @ '..tostring(size)
+ end
+end
+function constructors.setname(tfmdata,specification)
+ if constructors.namemode=="specification" then
+ local specname=specification.specification
+ if specname then
+ tfmdata.properties.name=specname
+ if trace_defining then
+ report_otf("overloaded fontname %a",specname)
+ end
+ end
+ end
+end
+function constructors.checkedfilename(data)
+ local foundfilename=data.foundfilename
+ if not foundfilename then
+ local askedfilename=data.filename or ""
+ if askedfilename~="" then
+ askedfilename=resolvers.resolve(askedfilename)
+ foundfilename=resolvers.findbinfile(askedfilename,"") or ""
+ if foundfilename=="" then
+ report_defining("source file %a is not found",askedfilename)
+ foundfilename=resolvers.findbinfile(file.basename(askedfilename),"") or ""
+ if foundfilename~="" then
+ report_defining("using source file %a due to cache mismatch",foundfilename)
+ end
+ end
+ end
+ data.foundfilename=foundfilename
+ end
+ return foundfilename
+end
+local formats=allocate()
+fonts.formats=formats
+setmetatableindex(formats,function(t,k)
+ local l=lower(k)
+ if rawget(t,k) then
+ t[k]=l
+ return l
+ end
+ return rawget(t,file.suffix(l))
+end)
+local locations={}
+local function setindeed(mode,target,group,name,action,position)
+ local t=target[mode]
+ if not t then
+ report_defining("fatal error in setting feature %a, group %a, mode %a",name,group,mode)
+ os.exit()
+ elseif position then
+ insert(t,position,{ name=name,action=action })
+ else
+ for i=1,#t do
+ local ti=t[i]
+ if ti.name==name then
+ ti.action=action
+ return
+ end
+ end
+ insert(t,{ name=name,action=action })
+ end
+end
+local function set(group,name,target,source)
+ target=target[group]
+ if not target then
+ report_defining("fatal target error in setting feature %a, group %a",name,group)
+ os.exit()
+ end
+ local source=source[group]
+ if not source then
+ report_defining("fatal source error in setting feature %a, group %a",name,group)
+ os.exit()
+ end
+ local node=source.node
+ local base=source.base
+ local position=source.position
+ if node then
+ setindeed("node",target,group,name,node,position)
+ end
+ if base then
+ setindeed("base",target,group,name,base,position)
+ end
+end
+local function register(where,specification)
+ local name=specification.name
+ if name and name~="" then
+ local default=specification.default
+ local description=specification.description
+ local initializers=specification.initializers
+ local processors=specification.processors
+ local manipulators=specification.manipulators
+ local modechecker=specification.modechecker
+ if default then
+ where.defaults[name]=default
+ end
+ if description and description~="" then
+ where.descriptions[name]=description
+ end
+ if initializers then
+ set('initializers',name,where,specification)
+ end
+ if processors then
+ set('processors',name,where,specification)
+ end
+ if manipulators then
+ set('manipulators',name,where,specification)
+ end
+ if modechecker then
+ where.modechecker=modechecker
+ end
+ end
+end
+constructors.registerfeature=register
+function constructors.getfeatureaction(what,where,mode,name)
+ what=handlers[what].features
+ if what then
+ where=what[where]
+ if where then
+ mode=where[mode]
+ if mode then
+ for i=1,#mode do
+ local m=mode[i]
+ if m.name==name then
+ return m.action
+ end
+ end
+ end
+ end
+ end
+end
+function constructors.newhandler(what)
+ local handler=handlers[what]
+ if not handler then
+ handler={}
+ handlers[what]=handler
+ end
+ return handler
+end
+function constructors.newfeatures(what)
+ local handler=handlers[what]
+ local features=handler.features
+ if not features then
+ local tables=handler.tables
+ local statistics=handler.statistics
+ features=allocate {
+ defaults={},
+ descriptions=tables and tables.features or {},
+ used=statistics and statistics.usedfeatures or {},
+ initializers={ base={},node={} },
+ processors={ base={},node={} },
+ manipulators={ base={},node={} },
+ }
+ features.register=function(specification) return register(features,specification) end
+ handler.features=features
+ end
+ return features
+end
+function constructors.checkedfeatures(what,features)
+ local defaults=handlers[what].features.defaults
+ if features and next(features) then
+ features=fastcopy(features)
+ for key,value in next,defaults do
+ if features[key]==nil then
+ features[key]=value
+ end
+ end
+ return features
+ else
+ return fastcopy(defaults)
+ end
+end
+function constructors.initializefeatures(what,tfmdata,features,trace,report)
+ if features and next(features) then
+ local properties=tfmdata.properties or {}
+ local whathandler=handlers[what]
+ local whatfeatures=whathandler.features
+ local whatinitializers=whatfeatures.initializers
+ local whatmodechecker=whatfeatures.modechecker
+ local mode=properties.mode or (whatmodechecker and whatmodechecker(tfmdata,features,features.mode)) or features.mode or "base"
+ properties.mode=mode
+ features.mode=mode
+ local done={}
+ while true do
+ local redo=false
+ local initializers=whatfeatures.initializers[mode]
+ if initializers then
+ for i=1,#initializers do
+ local step=initializers[i]
+ local feature=step.name
+ local value=features[feature]
+ if not value then
+ elseif done[feature] then
+ else
+ local action=step.action
+ if trace then
+ report("initializing feature %a to %a for mode %a for font %a",feature,
+ value,mode,tfmdata.properties.fullname)
+ end
+ action(tfmdata,value,features)
+ if mode~=properties.mode or mode~=features.mode then
+ if whatmodechecker then
+ properties.mode=whatmodechecker(tfmdata,features,properties.mode)
+ features.mode=properties.mode
+ end
+ if mode~=properties.mode then
+ mode=properties.mode
+ redo=true
+ end
+ end
+ done[feature]=true
+ end
+ if redo then
+ break
+ end
+ end
+ if not redo then
+ break
+ end
+ else
+ break
+ end
+ end
+ properties.mode=mode
+ return true
+ else
+ return false
+ end
+end
+function constructors.collectprocessors(what,tfmdata,features,trace,report)
+ local processes,nofprocesses={},0
+ if features and next(features) then
+ local properties=tfmdata.properties
+ local whathandler=handlers[what]
+ local whatfeatures=whathandler.features
+ local whatprocessors=whatfeatures.processors
+ local mode=properties.mode
+ local processors=whatprocessors[mode]
+ if processors then
+ for i=1,#processors do
+ local step=processors[i]
+ local feature=step.name
+ if features[feature] then
+ local action=step.action
+ if trace then
+ report("installing feature processor %a for mode %a for font %a",feature,mode,tfmdata.properties.fullname)
+ end
+ if action then
+ nofprocesses=nofprocesses+1
+ processes[nofprocesses]=action
+ end
+ end
+ end
+ elseif trace then
+ report("no feature processors for mode %a for font %a",mode,properties.fullname)
+ end
+ end
+ return processes
+end
+function constructors.applymanipulators(what,tfmdata,features,trace,report)
+ if features and next(features) then
+ local properties=tfmdata.properties
+ local whathandler=handlers[what]
+ local whatfeatures=whathandler.features
+ local whatmanipulators=whatfeatures.manipulators
+ local mode=properties.mode
+ local manipulators=whatmanipulators[mode]
+ if manipulators then
+ for i=1,#manipulators do
+ local step=manipulators[i]
+ local feature=step.name
+ local value=features[feature]
+ if value then
+ local action=step.action
+ if trace then
+ report("applying feature manipulator %a for mode %a for font %a",feature,mode,properties.fullname)
+ end
+ if action then
+ action(tfmdata,feature,value)
+ end
+ end
+ end
+ end
+ end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-font-enc']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+fonts.encodings={}
+fonts.encodings.agl={}
+fonts.encodings.known={}
+setmetatable(fonts.encodings.agl,{ __index=function(t,k)
+ if k=="unicodes" then
+ texio.write(" <loading (extended) adobe glyph list>")
+ local unicodes=dofile(resolvers.findfile("font-age.lua"))
+ fonts.encodings.agl={ unicodes=unicodes }
+ return unicodes
+ else
+ return nil
+ end
+end })
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-cid']={
+ version=1.001,
+ comment="companion to font-otf.lua (cidmaps)",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local format,match,lower=string.format,string.match,string.lower
+local tonumber=tonumber
+local P,S,R,C,V,lpegmatch=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.V,lpeg.match
+local fonts,logs,trackers=fonts,logs,trackers
+local trace_loading=false trackers.register("otf.loading",function(v) trace_loading=v end)
+local report_otf=logs.reporter("fonts","otf loading")
+local cid={}
+fonts.cid=cid
+local cidmap={}
+local cidmax=10
+local number=C(R("09","af","AF")^1)
+local space=S(" \n\r\t")
+local spaces=space^0
+local period=P(".")
+local periods=period*period
+local name=P("/")*C((1-space)^1)
+local unicodes,names={},{}
+local function do_one(a,b)
+ unicodes[tonumber(a)]=tonumber(b,16)
+end
+local function do_range(a,b,c)
+ c=tonumber(c,16)
+ for i=tonumber(a),tonumber(b) do
+ unicodes[i]=c
+ c=c+1
+ end
+end
+local function do_name(a,b)
+ names[tonumber(a)]=b
+end
+local grammar=P { "start",
+ start=number*spaces*number*V("series"),
+ series=(spaces*(V("one")+V("range")+V("named")))^1,
+ one=(number*spaces*number)/do_one,
+ range=(number*periods*number*spaces*number)/do_range,
+ named=(number*spaces*name)/do_name
+}
+local function loadcidfile(filename)
+ local data=io.loaddata(filename)
+ if data then
+ unicodes,names={},{}
+ lpegmatch(grammar,data)
+ local supplement,registry,ordering=match(filename,"^(.-)%-(.-)%-()%.(.-)$")
+ return {
+ supplement=supplement,
+ registry=registry,
+ ordering=ordering,
+ filename=filename,
+ unicodes=unicodes,
+ names=names
+ }
+ end
+end
+cid.loadfile=loadcidfile
+local template="%s-%s-%s.cidmap"
+local function locate(registry,ordering,supplement)
+ local filename=format(template,registry,ordering,supplement)
+ local hashname=lower(filename)
+ local found=cidmap[hashname]
+ if not found then
+ if trace_loading then
+ report_otf("checking cidmap, registry %a, ordering %a, supplement %a, filename %a",registry,ordering,supplement,filename)
+ end
+ local fullname=resolvers.findfile(filename,'cid') or ""
+ if fullname~="" then
+ found=loadcidfile(fullname)
+ if found then
+ if trace_loading then
+ report_otf("using cidmap file %a",filename)
+ end
+ cidmap[hashname]=found
+ found.usedname=file.basename(filename)
+ end
+ end
+ end
+ return found
+end
+function cid.getmap(specification)
+ if not specification then
+ report_otf("invalid cidinfo specification, table expected")
+ return
+ end
+ local registry=specification.registry
+ local ordering=specification.ordering
+ local supplement=specification.supplement
+ local filename=format(registry,ordering,supplement)
+ local found=cidmap[lower(filename)]
+ if found then
+ return found
+ end
+ if trace_loading then
+ report_otf("cidmap needed, registry %a, ordering %a, supplement %a",registry,ordering,supplement)
+ end
+ found=locate(registry,ordering,supplement)
+ if not found then
+ local supnum=tonumber(supplement)
+ local cidnum=nil
+ if supnum<cidmax then
+ for s=supnum+1,cidmax do
+ local c=locate(registry,ordering,s)
+ if c then
+ found,cidnum=c,s
+ break
+ end
+ end
+ end
+ if not found and supnum>0 then
+ for s=supnum-1,0,-1 do
+ local c=locate(registry,ordering,s)
+ if c then
+ found,cidnum=c,s
+ break
+ end
+ end
+ end
+ registry=lower(registry)
+ ordering=lower(ordering)
+ if found and cidnum>0 then
+ for s=0,cidnum-1 do
+ local filename=format(template,registry,ordering,s)
+ if not cidmap[filename] then
+ cidmap[filename]=found
+ end
+ end
+ end
+ end
+ return found
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-map']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local tonumber=tonumber
+local match,format,find,concat,gsub,lower=string.match,string.format,string.find,table.concat,string.gsub,string.lower
+local P,R,S,C,Ct,Cc,lpegmatch=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cc,lpeg.match
+local utfbyte=utf.byte
+local floor=math.floor
+local trace_loading=false trackers.register("fonts.loading",function(v) trace_loading=v end)
+local trace_mapping=false trackers.register("fonts.mapping",function(v) trace_unimapping=v end)
+local report_fonts=logs.reporter("fonts","loading")
+local fonts=fonts or {}
+local mappings=fonts.mappings or {}
+fonts.mappings=mappings
+local function loadlumtable(filename)
+ local lumname=file.replacesuffix(file.basename(filename),"lum")
+ local lumfile=resolvers.findfile(lumname,"map") or ""
+ if lumfile~="" and lfs.isfile(lumfile) then
+ if trace_loading or trace_mapping then
+ report_fonts("loading map table %a",lumfile)
+ end
+ lumunic=dofile(lumfile)
+ return lumunic,lumfile
+ end
+end
+local hex=R("AF","09")
+local hexfour=(hex*hex*hex*hex)/function(s) return tonumber(s,16) end
+local hexsix=(hex*hex*hex*hex*hex*hex)/function(s) return tonumber(s,16) end
+local dec=(R("09")^1)/tonumber
+local period=P(".")
+local unicode=P("uni")*(hexfour*(period+P(-1))*Cc(false)+Ct(hexfour^1)*Cc(true))
+local ucode=P("u")*(hexsix*(period+P(-1))*Cc(false)+Ct(hexsix^1)*Cc(true))
+local index=P("index")*dec*Cc(false)
+local parser=unicode+ucode+index
+local parsers={}
+local function makenameparser(str)
+ if not str or str=="" then
+ return parser
+ else
+ local p=parsers[str]
+ if not p then
+ p=P(str)*period*dec*Cc(false)
+ parsers[str]=p
+ end
+ return p
+ end
+end
+local function tounicode16(unicode,name)
+ if unicode<0x10000 then
+ return format("%04X",unicode)
+ elseif unicode<0x1FFFFFFFFF then
+ return format("%04X%04X",floor(unicode/1024),unicode%1024+0xDC00)
+ else
+ report_fonts("can't convert %a in %a into tounicode",unicode,name)
+ end
+end
+local function tounicode16sequence(unicodes,name)
+ local t={}
+ for l=1,#unicodes do
+ local unicode=unicodes[l]
+ if unicode<0x10000 then
+ t[l]=format("%04X",unicode)
+ elseif unicode<0x1FFFFFFFFF then
+ t[l]=format("%04X%04X",floor(unicode/1024),unicode%1024+0xDC00)
+ else
+ report_fonts ("can't convert %a in %a into tounicode",unicode,name)
+ end
+ end
+ return concat(t)
+end
+local function fromunicode16(str)
+ if #str==4 then
+ return tonumber(str,16)
+ else
+ local l,r=match(str,"(....)(....)")
+ return (tonumber(l,16))*0x400+tonumber(r,16)-0xDC00
+ end
+end
+mappings.loadlumtable=loadlumtable
+mappings.makenameparser=makenameparser
+mappings.tounicode16=tounicode16
+mappings.tounicode16sequence=tounicode16sequence
+mappings.fromunicode16=fromunicode16
+local ligseparator=P("_")
+local varseparator=P(".")
+local namesplitter=Ct(C((1-ligseparator-varseparator)^1)*(ligseparator*C((1-ligseparator-varseparator)^1))^0)
+function mappings.addtounicode(data,filename)
+ local resources=data.resources
+ local properties=data.properties
+ local descriptions=data.descriptions
+ local unicodes=resources.unicodes
+ if not unicodes then
+ return
+ end
+ unicodes['space']=unicodes['space'] or 32
+ unicodes['hyphen']=unicodes['hyphen'] or 45
+ unicodes['zwj']=unicodes['zwj'] or 0x200D
+ unicodes['zwnj']=unicodes['zwnj'] or 0x200C
+ local private=fonts.constructors.privateoffset
+ local unknown=format("%04X",utfbyte("?"))
+ local unicodevector=fonts.encodings.agl.unicodes
+ local tounicode={}
+ local originals={}
+ resources.tounicode=tounicode
+ resources.originals=originals
+ local lumunic,uparser,oparser
+ local cidinfo,cidnames,cidcodes,usedmap
+ if false then
+ lumunic=loadlumtable(filename)
+ lumunic=lumunic and lumunic.tounicode
+ end
+ cidinfo=properties.cidinfo
+ usedmap=cidinfo and fonts.cid.getmap(cidinfo)
+ if usedmap then
+ oparser=usedmap and makenameparser(cidinfo.ordering)
+ cidnames=usedmap.names
+ cidcodes=usedmap.unicodes
+ end
+ uparser=makenameparser()
+ local ns,nl=0,0
+ for unic,glyph in next,descriptions do
+ local index=glyph.index
+ local name=glyph.name
+ if unic==-1 or unic>=private or (unic>=0xE000 and unic<=0xF8FF) or unic==0xFFFE or unic==0xFFFF then
+ local unicode=lumunic and lumunic[name] or unicodevector[name]
+ if unicode then
+ originals[index]=unicode
+ tounicode[index]=tounicode16(unicode,name)
+ ns=ns+1
+ end
+ if (not unicode) and usedmap then
+ local foundindex=lpegmatch(oparser,name)
+ if foundindex then
+ unicode=cidcodes[foundindex]
+ if unicode then
+ originals[index]=unicode
+ tounicode[index]=tounicode16(unicode,name)
+ ns=ns+1
+ else
+ local reference=cidnames[foundindex]
+ if reference then
+ local foundindex=lpegmatch(oparser,reference)
+ if foundindex then
+ unicode=cidcodes[foundindex]
+ if unicode then
+ originals[index]=unicode
+ tounicode[index]=tounicode16(unicode,name)
+ ns=ns+1
+ end
+ end
+ if not unicode or unicode=="" then
+ local foundcodes,multiple=lpegmatch(uparser,reference)
+ if foundcodes then
+ originals[index]=foundcodes
+ if multiple then
+ tounicode[index]=tounicode16sequence(foundcodes)
+ nl=nl+1
+ unicode=true
+ else
+ tounicode[index]=tounicode16(foundcodes,name)
+ ns=ns+1
+ unicode=foundcodes
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ if not unicode or unicode=="" then
+ local split=lpegmatch(namesplitter,name)
+ local nsplit=split and #split or 0
+ local t,n={},0
+ unicode=true
+ for l=1,nsplit do
+ local base=split[l]
+ local u=unicodes[base] or unicodevector[base]
+ if not u then
+ break
+ elseif type(u)=="table" then
+ if u[1]>=private then
+ unicode=false
+ break
+ end
+ n=n+1
+ t[n]=u[1]
+ else
+ if u>=private then
+ unicode=false
+ break
+ end
+ n=n+1
+ t[n]=u
+ end
+ end
+ if n==0 then
+ elseif n==1 then
+ originals[index]=t[1]
+ tounicode[index]=tounicode16(t[1],name)
+ else
+ originals[index]=t
+ tounicode[index]=tounicode16sequence(t)
+ end
+ nl=nl+1
+ end
+ if not unicode or unicode=="" then
+ local foundcodes,multiple=lpegmatch(uparser,name)
+ if foundcodes then
+ if multiple then
+ originals[index]=foundcodes
+ tounicode[index]=tounicode16sequence(foundcodes,name)
+ nl=nl+1
+ unicode=true
+ else
+ originals[index]=foundcodes
+ tounicode[index]=tounicode16(foundcodes,name)
+ ns=ns+1
+ unicode=foundcodes
+ end
+ end
+ end
+ end
+ end
+ if trace_mapping then
+ for unic,glyph in table.sortedhash(descriptions) do
+ local name=glyph.name
+ local index=glyph.index
+ local toun=tounicode[index]
+ if toun then
+ report_fonts("internal slot %U, name %a, unicode %U, tounicode %a",index,name,unic,toun)
+ else
+ report_fonts("internal slot %U, name %a, unicode %U",index,name,unic)
+ end
+ end
+ end
+ if trace_loading and (ns>0 or nl>0) then
+ report_fonts("%s tounicode entries added, ligatures %s",nl+ns,ns)
+ end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-fonts-syn']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+fonts.names=fonts.names or {}
+fonts.names.version=1.001
+fonts.names.basename="luatex-fonts-names"
+fonts.names.new_to_old={}
+fonts.names.old_to_new={}
+fonts.names.cache=containers.define("fonts","data",fonts.names.version,true)
+local data,loaded=nil,false
+local fileformats={ "lua","tex","other text files" }
+function fonts.names.reportmissingbase()
+ texio.write("<missing font database, run: mtxrun --script fonts --reload --simple>")
+ fonts.names.reportmissingbase=nil
+end
+function fonts.names.reportmissingname()
+ texio.write("<unknown font in database, run: mtxrun --script fonts --reload --simple>")
+ fonts.names.reportmissingname=nil
+end
+function fonts.names.resolve(name,sub)
+ if not loaded then
+ local basename=fonts.names.basename
+ if basename and basename~="" then
+ data=containers.read(fonts.names.cache,basename)
+ if not data then
+ basename=file.addsuffix(basename,"lua")
+ for i=1,#fileformats do
+ local format=fileformats[i]
+ local foundname=resolvers.findfile(basename,format) or ""
+ if foundname~="" then
+ data=dofile(foundname)
+ texio.write("<font database loaded: ",foundname,">")
+ break
+ end
+ end
+ end
+ end
+ loaded=true
+ end
+ if type(data)=="table" and data.version==fonts.names.version then
+ local condensed=string.gsub(string.lower(name),"[^%a%d]","")
+ local found=data.mappings and data.mappings[condensed]
+ if found then
+ local fontname,filename,subfont=found[1],found[2],found[3]
+ if subfont then
+ return filename,fontname
+ else
+ return filename,false
+ end
+ elseif fonts.names.reportmissingname then
+ fonts.names.reportmissingname()
+ return name,false
+ end
+ elseif fonts.names.reportmissingbase then
+ fonts.names.reportmissingbase()
+ end
+end
+fonts.names.resolvespec=fonts.names.resolve
+function fonts.names.getfilename(askedname,suffix)
+ return ""
+end
+function fonts.names.ignoredfile(filename)
+ return false
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-tfm']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local next=next
+local match=string.match
+local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end)
+local trace_features=false trackers.register("tfm.features",function(v) trace_features=v end)
+local report_defining=logs.reporter("fonts","defining")
+local report_tfm=logs.reporter("fonts","tfm loading")
+local findbinfile=resolvers.findbinfile
+local fonts=fonts
+local handlers=fonts.handlers
+local readers=fonts.readers
+local constructors=fonts.constructors
+local encodings=fonts.encodings
+local tfm=constructors.newhandler("tfm")
+local tfmfeatures=constructors.newfeatures("tfm")
+local registertfmfeature=tfmfeatures.register
+constructors.resolvevirtualtoo=false
+fonts.formats.tfm="type1"
+function tfm.setfeatures(tfmdata,features)
+ local okay=constructors.initializefeatures("tfm",tfmdata,features,trace_features,report_tfm)
+ if okay then
+ return constructors.collectprocessors("tfm",tfmdata,features,trace_features,report_tfm)
+ else
+ return {}
+ end
+end
+local function read_from_tfm(specification)
+ local filename=specification.filename
+ local size=specification.size
+ if trace_defining then
+ report_defining("loading tfm file %a at size %s",filename,size)
+ end
+ local tfmdata=font.read_tfm(filename,size)
+ if tfmdata then
+ local features=specification.features and specification.features.normal or {}
+ local resources=tfmdata.resources or {}
+ local properties=tfmdata.properties or {}
+ local parameters=tfmdata.parameters or {}
+ local shared=tfmdata.shared or {}
+ properties.name=tfmdata.name
+ properties.fontname=tfmdata.fontname
+ properties.psname=tfmdata.psname
+ properties.filename=specification.filename
+ parameters.size=size
+ shared.rawdata={}
+ shared.features=features
+ shared.processes=next(features) and tfm.setfeatures(tfmdata,features) or nil
+ tfmdata.properties=properties
+ tfmdata.resources=resources
+ tfmdata.parameters=parameters
+ tfmdata.shared=shared
+ parameters.slant=parameters.slant or parameters[1] or 0
+ parameters.space=parameters.space or parameters[2] or 0
+ parameters.space_stretch=parameters.space_stretch or parameters[3] or 0
+ parameters.space_shrink=parameters.space_shrink or parameters[4] or 0
+ parameters.x_height=parameters.x_height or parameters[5] or 0
+ parameters.quad=parameters.quad or parameters[6] or 0
+ parameters.extra_space=parameters.extra_space or parameters[7] or 0
+ constructors.enhanceparameters(parameters)
+ if constructors.resolvevirtualtoo then
+ fonts.loggers.register(tfmdata,file.suffix(filename),specification)
+ local vfname=findbinfile(specification.name,'ovf')
+ if vfname and vfname~="" then
+ local vfdata=font.read_vf(vfname,size)
+ if vfdata then
+ local chars=tfmdata.characters
+ for k,v in next,vfdata.characters do
+ chars[k].commands=v.commands
+ end
+ properties.virtualized=true
+ tfmdata.fonts=vfdata.fonts
+ end
+ end
+ end
+ local allfeatures=tfmdata.shared.features or specification.features.normal
+ constructors.applymanipulators("tfm",tfmdata,allfeatures.normal,trace_features,report_tfm)
+ if not features.encoding then
+ local encoding,filename=match(properties.filename,"^(.-)%-(.*)$")
+ if filename and encoding and encodings.known and encodings.known[encoding] then
+ features.encoding=encoding
+ end
+ end
+ return tfmdata
+ end
+end
+local function check_tfm(specification,fullname)
+ local foundname=findbinfile(fullname,'tfm') or ""
+ if foundname=="" then
+ foundname=findbinfile(fullname,'ofm') or ""
+ end
+ if foundname=="" then
+ foundname=fonts.names.getfilename(fullname,"tfm") or ""
+ end
+ if foundname~="" then
+ specification.filename=foundname
+ specification.format="ofm"
+ return read_from_tfm(specification)
+ elseif trace_defining then
+ report_defining("loading tfm with name %a fails",specification.name)
+ end
+end
+readers.check_tfm=check_tfm
+function readers.tfm(specification)
+ local fullname=specification.filename or ""
+ if fullname=="" then
+ local forced=specification.forced or ""
+ if forced~="" then
+ fullname=specification.name.."."..forced
+ else
+ fullname=specification.name
+ end
+ end
+ return check_tfm(specification,fullname)
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-afm']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local fonts,logs,trackers,containers,resolvers=fonts,logs,trackers,containers,resolvers
+local next,type,tonumber=next,type,tonumber
+local format,match,gmatch,lower,gsub,strip=string.format,string.match,string.gmatch,string.lower,string.gsub,string.strip
+local abs=math.abs
+local P,S,C,R,lpegmatch,patterns=lpeg.P,lpeg.S,lpeg.C,lpeg.R,lpeg.match,lpeg.patterns
+local derivetable=table.derive
+local trace_features=false trackers.register("afm.features",function(v) trace_features=v end)
+local trace_indexing=false trackers.register("afm.indexing",function(v) trace_indexing=v end)
+local trace_loading=false trackers.register("afm.loading",function(v) trace_loading=v end)
+local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end)
+local report_afm=logs.reporter("fonts","afm loading")
+local findbinfile=resolvers.findbinfile
+local definers=fonts.definers
+local readers=fonts.readers
+local constructors=fonts.constructors
+local afm=constructors.newhandler("afm")
+local pfb=constructors.newhandler("pfb")
+local afmfeatures=constructors.newfeatures("afm")
+local registerafmfeature=afmfeatures.register
+afm.version=1.410
+afm.cache=containers.define("fonts","afm",afm.version,true)
+afm.autoprefixed=true
+afm.helpdata={}
+afm.syncspace=true
+afm.addligatures=true
+afm.addtexligatures=true
+afm.addkerns=true
+local applyruntimefixes=fonts.treatments and fonts.treatments.applyfixes
+local function setmode(tfmdata,value)
+ if value then
+ tfmdata.properties.mode=lower(value)
+ end
+end
+registerafmfeature {
+ name="mode",
+ description="mode",
+ initializers={
+ base=setmode,
+ node=setmode,
+ }
+}
+local comment=P("Comment")
+local spacing=patterns.spacer
+local lineend=patterns.newline
+local words=C((1-lineend)^1)
+local number=C((R("09")+S("."))^1)/tonumber*spacing^0
+local data=lpeg.Carg(1)
+local pattern=(
+ comment*spacing*(
+ data*(
+ ("CODINGSCHEME"*spacing*words )/function(fd,a) end+("DESIGNSIZE"*spacing*number*words )/function(fd,a) fd[ 1]=a end+("CHECKSUM"*spacing*number*words )/function(fd,a) fd[ 2]=a end+("SPACE"*spacing*number*"plus"*number*"minus"*number)/function(fd,a,b,c) fd[ 3],fd[ 4],fd[ 5]=a,b,c end+("QUAD"*spacing*number )/function(fd,a) fd[ 6]=a end+("EXTRASPACE"*spacing*number )/function(fd,a) fd[ 7]=a end+("NUM"*spacing*number*number*number )/function(fd,a,b,c) fd[ 8],fd[ 9],fd[10]=a,b,c end+("DENOM"*spacing*number*number )/function(fd,a,b ) fd[11],fd[12]=a,b end+("SUP"*spacing*number*number*number )/function(fd,a,b,c) fd[13],fd[14],fd[15]=a,b,c end+("SUB"*spacing*number*number )/function(fd,a,b) fd[16],fd[17]=a,b end+("SUPDROP"*spacing*number )/function(fd,a) fd[18]=a end+("SUBDROP"*spacing*number )/function(fd,a) fd[19]=a end+("DELIM"*spacing*number*number )/function(fd,a,b) fd[20],fd[21]=a,b end+("AXISHEIGHT"*spacing*number )/function(fd,a) fd[22]=a end
+ )+(1-lineend)^0
+ )+(1-comment)^1
+)^0
+local function scan_comment(str)
+ local fd={}
+ lpegmatch(pattern,str,1,fd)
+ return fd
+end
+local keys={}
+function keys.FontName (data,line) data.metadata.fontname=strip (line)
+ data.metadata.fullname=strip (line) end
+function keys.ItalicAngle (data,line) data.metadata.italicangle=tonumber (line) end
+function keys.IsFixedPitch(data,line) data.metadata.isfixedpitch=toboolean(line,true) end
+function keys.CharWidth (data,line) data.metadata.charwidth=tonumber (line) end
+function keys.XHeight (data,line) data.metadata.xheight=tonumber (line) end
+function keys.Descender (data,line) data.metadata.descender=tonumber (line) end
+function keys.Ascender (data,line) data.metadata.ascender=tonumber (line) end
+function keys.Comment (data,line)
+ line=lower(line)
+ local designsize=match(line,"designsize[^%d]*(%d+)")
+ if designsize then data.metadata.designsize=tonumber(designsize) end
+end
+local function get_charmetrics(data,charmetrics,vector)
+ local characters=data.characters
+ local chr,ind={},0
+ for k,v in gmatch(charmetrics,"([%a]+) +(.-) *;") do
+ if k=='C' then
+ v=tonumber(v)
+ if v<0 then
+ ind=ind+1
+ else
+ ind=v
+ end
+ chr={
+ index=ind
+ }
+ elseif k=='WX' then
+ chr.width=tonumber(v)
+ elseif k=='N' then
+ characters[v]=chr
+ elseif k=='B' then
+ local llx,lly,urx,ury=match(v,"^ *(.-) +(.-) +(.-) +(.-)$")
+ chr.boundingbox={ tonumber(llx),tonumber(lly),tonumber(urx),tonumber(ury) }
+ elseif k=='L' then
+ local plus,becomes=match(v,"^(.-) +(.-)$")
+ local ligatures=chr.ligatures
+ if ligatures then
+ ligatures[plus]=becomes
+ else
+ chr.ligatures={ [plus]=becomes }
+ end
+ end
+ end
+end
+local function get_kernpairs(data,kernpairs)
+ local characters=data.characters
+ for one,two,value in gmatch(kernpairs,"KPX +(.-) +(.-) +(.-)\n") do
+ local chr=characters[one]
+ if chr then
+ local kerns=chr.kerns
+ if kerns then
+ kerns[two]=tonumber(value)
+ else
+ chr.kerns={ [two]=tonumber(value) }
+ end
+ end
+ end
+end
+local function get_variables(data,fontmetrics)
+ for key,rest in gmatch(fontmetrics,"(%a+) *(.-)[\n\r]") do
+ local keyhandler=keys[key]
+ if keyhandler then
+ keyhandler(data,rest)
+ end
+ end
+end
+local function get_indexes(data,pfbname)
+ data.resources.filename=resolvers.unresolve(pfbname)
+ local pfbblob=fontloader.open(pfbname)
+ if pfbblob then
+ local characters=data.characters
+ local pfbdata=fontloader.to_table(pfbblob)
+ if pfbdata then
+ local glyphs=pfbdata.glyphs
+ if glyphs then
+ if trace_loading then
+ report_afm("getting index data from %a",pfbname)
+ end
+ for index,glyph in next,glyphs do
+ local name=glyph.name
+ if name then
+ local char=characters[name]
+ if char then
+ if trace_indexing then
+ report_afm("glyph %a has index %a",name,index)
+ end
+ char.index=index
+ end
+ end
+ end
+ elseif trace_loading then
+ report_afm("no glyph data in pfb file %a",pfbname)
+ end
+ elseif trace_loading then
+ report_afm("no data in pfb file %a",pfbname)
+ end
+ fontloader.close(pfbblob)
+ elseif trace_loading then
+ report_afm("invalid pfb file %a",pfbname)
+ end
+end
+local function readafm(filename)
+ local ok,afmblob,size=resolvers.loadbinfile(filename)
+ if ok and afmblob then
+ local data={
+ resources={
+ filename=resolvers.unresolve(filename),
+ version=afm.version,
+ creator="context mkiv",
+ },
+ properties={
+ hasitalics=false,
+ },
+ goodies={},
+ metadata={
+ filename=file.removesuffix(file.basename(filename))
+ },
+ characters={
+ },
+ descriptions={
+ },
+ }
+ afmblob=gsub(afmblob,"StartCharMetrics(.-)EndCharMetrics",function(charmetrics)
+ if trace_loading then
+ report_afm("loading char metrics")
+ end
+ get_charmetrics(data,charmetrics,vector)
+ return ""
+ end)
+ afmblob=gsub(afmblob,"StartKernPairs(.-)EndKernPairs",function(kernpairs)
+ if trace_loading then
+ report_afm("loading kern pairs")
+ end
+ get_kernpairs(data,kernpairs)
+ return ""
+ end)
+ afmblob=gsub(afmblob,"StartFontMetrics%s+([%d%.]+)(.-)EndFontMetrics",function(version,fontmetrics)
+ if trace_loading then
+ report_afm("loading variables")
+ end
+ data.afmversion=version
+ get_variables(data,fontmetrics)
+ data.fontdimens=scan_comment(fontmetrics)
+ return ""
+ end)
+ return data
+ else
+ if trace_loading then
+ report_afm("no valid afm file %a",filename)
+ end
+ return nil
+ end
+end
+local addkerns,addligatures,addtexligatures,unify,normalize
+function afm.load(filename)
+ filename=resolvers.findfile(filename,'afm') or ""
+ if filename~="" and not fonts.names.ignoredfile(filename) then
+ local name=file.removesuffix(file.basename(filename))
+ local data=containers.read(afm.cache,name)
+ local attr=lfs.attributes(filename)
+ local size,time=attr.size or 0,attr.modification or 0
+ local pfbfile=file.replacesuffix(name,"pfb")
+ local pfbname=resolvers.findfile(pfbfile,"pfb") or ""
+ if pfbname=="" then
+ pfbname=resolvers.findfile(file.basename(pfbfile),"pfb") or ""
+ end
+ local pfbsize,pfbtime=0,0
+ if pfbname~="" then
+ local attr=lfs.attributes(pfbname)
+ pfbsize=attr.size or 0
+ pfbtime=attr.modification or 0
+ end
+ if not data or data.size~=size or data.time~=time or data.pfbsize~=pfbsize or data.pfbtime~=pfbtime then
+ report_afm("reading %a",filename)
+ data=readafm(filename)
+ if data then
+ if pfbname~="" then
+ get_indexes(data,pfbname)
+ elseif trace_loading then
+ report_afm("no pfb file for %a",filename)
+ end
+ report_afm("unifying %a",filename)
+ unify(data,filename)
+ if afm.addligatures then
+ report_afm("add ligatures")
+ addligatures(data)
+ end
+ if afm.addtexligatures then
+ report_afm("add tex ligatures")
+ addtexligatures(data)
+ end
+ if afm.addkerns then
+ report_afm("add extra kerns")
+ addkerns(data)
+ end
+ normalize(data)
+ report_afm("add tounicode data")
+ fonts.mappings.addtounicode(data,filename)
+ data.size=size
+ data.time=time
+ data.pfbsize=pfbsize
+ data.pfbtime=pfbtime
+ report_afm("saving %a in cache",name)
+ data=containers.write(afm.cache,name,data)
+ data=containers.read(afm.cache,name)
+ end
+ if applyruntimefixes and data then
+ applyruntimefixes(filename,data)
+ end
+ end
+ return data
+ else
+ return nil
+ end
+end
+local uparser=fonts.mappings.makenameparser()
+unify=function(data,filename)
+ local unicodevector=fonts.encodings.agl.unicodes
+ local unicodes,names={},{}
+ local private=constructors.privateoffset
+ local descriptions=data.descriptions
+ for name,blob in next,data.characters do
+ local code=unicodevector[name]
+ if not code then
+ code=lpegmatch(uparser,name)
+ if not code then
+ code=private
+ private=private+1
+ report_afm("assigning private slot %U for unknown glyph name %a",code,name)
+ end
+ end
+ local index=blob.index
+ unicodes[name]=code
+ names[name]=index
+ blob.name=name
+ descriptions[code]={
+ boundingbox=blob.boundingbox,
+ width=blob.width,
+ kerns=blob.kerns,
+ index=index,
+ name=name,
+ }
+ end
+ for unicode,description in next,descriptions do
+ local kerns=description.kerns
+ if kerns then
+ local krn={}
+ for name,kern in next,kerns do
+ local unicode=unicodes[name]
+ if unicode then
+ krn[unicode]=kern
+ else
+ end
+ end
+ description.kerns=krn
+ end
+ end
+ data.characters=nil
+ local resources=data.resources
+ local filename=resources.filename or file.removesuffix(file.basename(filename))
+ resources.filename=resolvers.unresolve(filename)
+ resources.unicodes=unicodes
+ resources.marks={}
+ resources.names=names
+ resources.private=private
+end
+normalize=function(data)
+end
+local addthem=function(rawdata,ligatures)
+ if ligatures then
+ local descriptions=rawdata.descriptions
+ local resources=rawdata.resources
+ local unicodes=resources.unicodes
+ local names=resources.names
+ for ligname,ligdata in next,ligatures do
+ local one=descriptions[unicodes[ligname]]
+ if one then
+ for _,pair in next,ligdata do
+ local two,three=unicodes[pair[1]],unicodes[pair[2]]
+ if two and three then
+ local ol=one.ligatures
+ if ol then
+ if not ol[two] then
+ ol[two]=three
+ end
+ else
+ one.ligatures={ [two]=three }
+ end
+ end
+ end
+ end
+ end
+ end
+end
+addligatures=function(rawdata) addthem(rawdata,afm.helpdata.ligatures ) end
+addtexligatures=function(rawdata) addthem(rawdata,afm.helpdata.texligatures) end
+addkerns=function(rawdata)
+ local descriptions=rawdata.descriptions
+ local resources=rawdata.resources
+ local unicodes=resources.unicodes
+ local function do_it_left(what)
+ if what then
+ for unicode,description in next,descriptions do
+ local kerns=description.kerns
+ if kerns then
+ local extrakerns
+ for complex,simple in next,what do
+ complex=unicodes[complex]
+ simple=unicodes[simple]
+ if complex and simple then
+ local ks=kerns[simple]
+ if ks and not kerns[complex] then
+ if extrakerns then
+ extrakerns[complex]=ks
+ else
+ extrakerns={ [complex]=ks }
+ end
+ end
+ end
+ end
+ if extrakerns then
+ description.extrakerns=extrakerns
+ end
+ end
+ end
+ end
+ end
+ local function do_it_copy(what)
+ if what then
+ for complex,simple in next,what do
+ complex=unicodes[complex]
+ simple=unicodes[simple]
+ if complex and simple then
+ local complexdescription=descriptions[complex]
+ if complexdescription then
+ local simpledescription=descriptions[complex]
+ if simpledescription then
+ local extrakerns
+ local kerns=simpledescription.kerns
+ if kerns then
+ for unicode,kern in next,kerns do
+ if extrakerns then
+ extrakerns[unicode]=kern
+ else
+ extrakerns={ [unicode]=kern }
+ end
+ end
+ end
+ local extrakerns=simpledescription.extrakerns
+ if extrakerns then
+ for unicode,kern in next,extrakerns do
+ if extrakerns then
+ extrakerns[unicode]=kern
+ else
+ extrakerns={ [unicode]=kern }
+ end
+ end
+ end
+ if extrakerns then
+ complexdescription.extrakerns=extrakerns
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ do_it_left(afm.helpdata.leftkerned)
+ do_it_left(afm.helpdata.bothkerned)
+ do_it_copy(afm.helpdata.bothkerned)
+ do_it_copy(afm.helpdata.rightkerned)
+end
+local function adddimensions(data)
+ if data then
+ for unicode,description in next,data.descriptions do
+ local bb=description.boundingbox
+ if bb then
+ local ht,dp=bb[4],-bb[2]
+ if ht==0 or ht<0 then
+ else
+ description.height=ht
+ end
+ if dp==0 or dp<0 then
+ else
+ description.depth=dp
+ end
+ end
+ end
+ end
+end
+local function copytotfm(data)
+ if data and data.descriptions then
+ local metadata=data.metadata
+ local resources=data.resources
+ local properties=derivetable(data.properties)
+ local descriptions=derivetable(data.descriptions)
+ local goodies=derivetable(data.goodies)
+ local characters={}
+ local parameters={}
+ local unicodes=resources.unicodes
+ for unicode,description in next,data.descriptions do
+ characters[unicode]={}
+ end
+ local filename=constructors.checkedfilename(resources)
+ local fontname=metadata.fontname or metadata.fullname
+ local fullname=metadata.fullname or metadata.fontname
+ local endash=unicodes['space']
+ local emdash=unicodes['emdash']
+ local spacer="space"
+ local spaceunits=500
+ local monospaced=metadata.isfixedpitch
+ local charwidth=metadata.charwidth
+ local italicangle=metadata.italicangle
+ local charxheight=metadata.xheight and metadata.xheight>0 and metadata.xheight
+ properties.monospaced=monospaced
+ parameters.italicangle=italicangle
+ parameters.charwidth=charwidth
+ parameters.charxheight=charxheight
+ if properties.monospaced then
+ if descriptions[endash] then
+ spaceunits,spacer=descriptions[endash].width,"space"
+ end
+ if not spaceunits and descriptions[emdash] then
+ spaceunits,spacer=descriptions[emdash].width,"emdash"
+ end
+ if not spaceunits and charwidth then
+ spaceunits,spacer=charwidth,"charwidth"
+ end
+ else
+ if descriptions[endash] then
+ spaceunits,spacer=descriptions[endash].width,"space"
+ end
+ if not spaceunits and charwidth then
+ spaceunits,spacer=charwidth,"charwidth"
+ end
+ end
+ spaceunits=tonumber(spaceunits)
+ if spaceunits<200 then
+ end
+ parameters.slant=0
+ parameters.space=spaceunits
+ parameters.space_stretch=500
+ parameters.space_shrink=333
+ parameters.x_height=400
+ parameters.quad=1000
+ if italicangle and italicangle~=0 then
+ parameters.italicangle=italicangle
+ parameters.italicfactor=math.cos(math.rad(90+italicangle))
+ parameters.slant=- math.tan(italicangle*math.pi/180)
+ end
+ if monospaced then
+ parameters.space_stretch=0
+ parameters.space_shrink=0
+ elseif afm.syncspace then
+ parameters.space_stretch=spaceunits/2
+ parameters.space_shrink=spaceunits/3
+ end
+ parameters.extra_space=parameters.space_shrink
+ if charxheight then
+ parameters.x_height=charxheight
+ else
+ local x=unicodes['x']
+ if x then
+ local x=descriptions[x]
+ if x then
+ parameters.x_height=x.height
+ end
+ end
+ end
+ local fd=data.fontdimens
+ if fd and fd[8] and fd[9] and fd[10] then
+ for k,v in next,fd do
+ parameters[k]=v
+ end
+ end
+ parameters.designsize=(metadata.designsize or 10)*65536
+ parameters.ascender=abs(metadata.ascender or 0)
+ parameters.descender=abs(metadata.descender or 0)
+ parameters.units=1000
+ properties.spacer=spacer
+ properties.encodingbytes=2
+ properties.format=fonts.formats[filename] or "type1"
+ properties.filename=filename
+ properties.fontname=fontname
+ properties.fullname=fullname
+ properties.psname=fullname
+ properties.name=filename or fullname or fontname
+ if next(characters) then
+ return {
+ characters=characters,
+ descriptions=descriptions,
+ parameters=parameters,
+ resources=resources,
+ properties=properties,
+ goodies=goodies,
+ }
+ end
+ end
+ return nil
+end
+function afm.setfeatures(tfmdata,features)
+ local okay=constructors.initializefeatures("afm",tfmdata,features,trace_features,report_afm)
+ if okay then
+ return constructors.collectprocessors("afm",tfmdata,features,trace_features,report_afm)
+ else
+ return {}
+ end
+end
+local function checkfeatures(specification)
+end
+local function afmtotfm(specification)
+ local afmname=specification.filename or specification.name
+ if specification.forced=="afm" or specification.format=="afm" then
+ if trace_loading then
+ report_afm("forcing afm format for %a",afmname)
+ end
+ else
+ local tfmname=findbinfile(afmname,"ofm") or ""
+ if tfmname~="" then
+ if trace_loading then
+ report_afm("fallback from afm to tfm for %a",afmname)
+ end
+ return
+ end
+ end
+ if afmname~="" then
+ local features=constructors.checkedfeatures("afm",specification.features.normal)
+ specification.features.normal=features
+ constructors.hashinstance(specification,true)
+ specification=definers.resolve(specification)
+ local cache_id=specification.hash
+ local tfmdata=containers.read(constructors.cache,cache_id)
+ if not tfmdata then
+ local rawdata=afm.load(afmname)
+ if rawdata and next(rawdata) then
+ adddimensions(rawdata)
+ tfmdata=copytotfm(rawdata)
+ if tfmdata and next(tfmdata) then
+ local shared=tfmdata.shared
+ if not shared then
+ shared={}
+ tfmdata.shared=shared
+ end
+ shared.rawdata=rawdata
+ shared.features=features
+ shared.processes=afm.setfeatures(tfmdata,features)
+ end
+ elseif trace_loading then
+ report_afm("no (valid) afm file found with name %a",afmname)
+ end
+ tfmdata=containers.write(constructors.cache,cache_id,tfmdata)
+ end
+ return tfmdata
+ end
+end
+local function read_from_afm(specification)
+ local tfmdata=afmtotfm(specification)
+ if tfmdata then
+ tfmdata.properties.name=specification.name
+ tfmdata=constructors.scale(tfmdata,specification)
+ local allfeatures=tfmdata.shared.features or specification.features.normal
+ constructors.applymanipulators("afm",tfmdata,allfeatures,trace_features,report_afm)
+ fonts.loggers.register(tfmdata,'afm',specification)
+ end
+ return tfmdata
+end
+local function prepareligatures(tfmdata,ligatures,value)
+ if value then
+ local descriptions=tfmdata.descriptions
+ for unicode,character in next,tfmdata.characters do
+ local description=descriptions[unicode]
+ local dligatures=description.ligatures
+ if dligatures then
+ local cligatures=character.ligatures
+ if not cligatures then
+ cligatures={}
+ character.ligatures=cligatures
+ end
+ for unicode,ligature in next,dligatures do
+ cligatures[unicode]={
+ char=ligature,
+ type=0
+ }
+ end
+ end
+ end
+ end
+end
+local function preparekerns(tfmdata,kerns,value)
+ if value then
+ local rawdata=tfmdata.shared.rawdata
+ local resources=rawdata.resources
+ local unicodes=resources.unicodes
+ local descriptions=tfmdata.descriptions
+ for u,chr in next,tfmdata.characters do
+ local d=descriptions[u]
+ local newkerns=d[kerns]
+ if newkerns then
+ local kerns=chr.kerns
+ if not kerns then
+ kerns={}
+ chr.kerns=kerns
+ end
+ for k,v in next,newkerns do
+ local uk=unicodes[k]
+ if uk then
+ kerns[uk]=v
+ end
+ end
+ end
+ end
+ end
+end
+local list={
+ [0x0027]=0x2019,
+}
+local function texreplacements(tfmdata,value)
+ local descriptions=tfmdata.descriptions
+ local characters=tfmdata.characters
+ for k,v in next,list do
+ characters [k]=characters [v]
+ descriptions[k]=descriptions[v]
+ end
+end
+local function ligatures (tfmdata,value) prepareligatures(tfmdata,'ligatures',value) end
+local function texligatures(tfmdata,value) prepareligatures(tfmdata,'texligatures',value) end
+local function kerns (tfmdata,value) preparekerns (tfmdata,'kerns',value) end
+local function extrakerns (tfmdata,value) preparekerns (tfmdata,'extrakerns',value) end
+registerafmfeature {
+ name="liga",
+ description="traditional ligatures",
+ initializers={
+ base=ligatures,
+ node=ligatures,
+ }
+}
+registerafmfeature {
+ name="kern",
+ description="intercharacter kerning",
+ initializers={
+ base=kerns,
+ node=kerns,
+ }
+}
+registerafmfeature {
+ name="extrakerns",
+ description="additional intercharacter kerning",
+ initializers={
+ base=extrakerns,
+ node=extrakerns,
+ }
+}
+registerafmfeature {
+ name='tlig',
+ description='tex ligatures',
+ initializers={
+ base=texligatures,
+ node=texligatures,
+ }
+}
+registerafmfeature {
+ name='trep',
+ description='tex replacements',
+ initializers={
+ base=texreplacements,
+ node=texreplacements,
+ }
+}
+local check_tfm=readers.check_tfm
+fonts.formats.afm="type1"
+fonts.formats.pfb="type1"
+local function check_afm(specification,fullname)
+ local foundname=findbinfile(fullname,'afm') or ""
+ if foundname=="" then
+ foundname=fonts.names.getfilename(fullname,"afm") or ""
+ end
+ if foundname=="" and afm.autoprefixed then
+ local encoding,shortname=match(fullname,"^(.-)%-(.*)$")
+ if encoding and shortname and fonts.encodings.known[encoding] then
+ shortname=findbinfile(shortname,'afm') or ""
+ if shortname~="" then
+ foundname=shortname
+ if trace_defining then
+ report_afm("stripping encoding prefix from filename %a",afmname)
+ end
+ end
+ end
+ end
+ if foundname~="" then
+ specification.filename=foundname
+ specification.format="afm"
+ return read_from_afm(specification)
+ end
+end
+function readers.afm(specification,method)
+ local fullname,tfmdata=specification.filename or "",nil
+ if fullname=="" then
+ local forced=specification.forced or ""
+ if forced~="" then
+ tfmdata=check_afm(specification,specification.name.."."..forced)
+ end
+ if not tfmdata then
+ method=method or definers.method or "afm or tfm"
+ if method=="tfm" then
+ tfmdata=check_tfm(specification,specification.name)
+ elseif method=="afm" then
+ tfmdata=check_afm(specification,specification.name)
+ elseif method=="tfm or afm" then
+ tfmdata=check_tfm(specification,specification.name) or check_afm(specification,specification.name)
+ else
+ tfmdata=check_afm(specification,specification.name) or check_tfm(specification,specification.name)
+ end
+ end
+ else
+ tfmdata=check_afm(specification,fullname)
+ end
+ return tfmdata
+end
+function readers.pfb(specification,method)
+ local original=specification.specification
+ if trace_defining then
+ report_afm("using afm reader for %a",original)
+ end
+ specification.specification=gsub(original,"%.pfb",".afm")
+ specification.forced="afm"
+ return readers.afm(specification,method)
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-afk']={
+ version=1.001,
+ comment="companion to font-afm.lua",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+ dataonly=true,
+}
+local allocate=utilities.storage.allocate
+fonts.handlers.afm.helpdata={
+ ligatures=allocate {
+ ['f']={
+ { 'f','ff' },
+ { 'i','fi' },
+ { 'l','fl' },
+ },
+ ['ff']={
+ { 'i','ffi' }
+ },
+ ['fi']={
+ { 'i','fii' }
+ },
+ ['fl']={
+ { 'i','fli' }
+ },
+ ['s']={
+ { 't','st' }
+ },
+ ['i']={
+ { 'j','ij' }
+ },
+ },
+ texligatures=allocate {
+ ['quoteleft']={
+ { 'quoteleft','quotedblleft' }
+ },
+ ['quoteright']={
+ { 'quoteright','quotedblright' }
+ },
+ ['hyphen']={
+ { 'hyphen','endash' }
+ },
+ ['endash']={
+ { 'hyphen','emdash' }
+ }
+ },
+ leftkerned=allocate {
+ AEligature="A",aeligature="a",
+ OEligature="O",oeligature="o",
+ IJligature="I",ijligature="i",
+ AE="A",ae="a",
+ OE="O",oe="o",
+ IJ="I",ij="i",
+ Ssharp="S",ssharp="s",
+ },
+ rightkerned=allocate {
+ AEligature="E",aeligature="e",
+ OEligature="E",oeligature="e",
+ IJligature="J",ijligature="j",
+ AE="E",ae="e",
+ OE="E",oe="e",
+ IJ="J",ij="j",
+ Ssharp="S",ssharp="s",
+ },
+ bothkerned=allocate {
+ Acircumflex="A",acircumflex="a",
+ Ccircumflex="C",ccircumflex="c",
+ Ecircumflex="E",ecircumflex="e",
+ Gcircumflex="G",gcircumflex="g",
+ Hcircumflex="H",hcircumflex="h",
+ Icircumflex="I",icircumflex="i",
+ Jcircumflex="J",jcircumflex="j",
+ Ocircumflex="O",ocircumflex="o",
+ Scircumflex="S",scircumflex="s",
+ Ucircumflex="U",ucircumflex="u",
+ Wcircumflex="W",wcircumflex="w",
+ Ycircumflex="Y",ycircumflex="y",
+ Agrave="A",agrave="a",
+ Egrave="E",egrave="e",
+ Igrave="I",igrave="i",
+ Ograve="O",ograve="o",
+ Ugrave="U",ugrave="u",
+ Ygrave="Y",ygrave="y",
+ Atilde="A",atilde="a",
+ Itilde="I",itilde="i",
+ Otilde="O",otilde="o",
+ Utilde="U",utilde="u",
+ Ntilde="N",ntilde="n",
+ Adiaeresis="A",adiaeresis="a",Adieresis="A",adieresis="a",
+ Ediaeresis="E",ediaeresis="e",Edieresis="E",edieresis="e",
+ Idiaeresis="I",idiaeresis="i",Idieresis="I",idieresis="i",
+ Odiaeresis="O",odiaeresis="o",Odieresis="O",odieresis="o",
+ Udiaeresis="U",udiaeresis="u",Udieresis="U",udieresis="u",
+ Ydiaeresis="Y",ydiaeresis="y",Ydieresis="Y",ydieresis="y",
+ Aacute="A",aacute="a",
+ Cacute="C",cacute="c",
+ Eacute="E",eacute="e",
+ Iacute="I",iacute="i",
+ Lacute="L",lacute="l",
+ Nacute="N",nacute="n",
+ Oacute="O",oacute="o",
+ Racute="R",racute="r",
+ Sacute="S",sacute="s",
+ Uacute="U",uacute="u",
+ Yacute="Y",yacute="y",
+ Zacute="Z",zacute="z",
+ Dstroke="D",dstroke="d",
+ Hstroke="H",hstroke="h",
+ Tstroke="T",tstroke="t",
+ Cdotaccent="C",cdotaccent="c",
+ Edotaccent="E",edotaccent="e",
+ Gdotaccent="G",gdotaccent="g",
+ Idotaccent="I",idotaccent="i",
+ Zdotaccent="Z",zdotaccent="z",
+ Amacron="A",amacron="a",
+ Emacron="E",emacron="e",
+ Imacron="I",imacron="i",
+ Omacron="O",omacron="o",
+ Umacron="U",umacron="u",
+ Ccedilla="C",ccedilla="c",
+ Kcedilla="K",kcedilla="k",
+ Lcedilla="L",lcedilla="l",
+ Ncedilla="N",ncedilla="n",
+ Rcedilla="R",rcedilla="r",
+ Scedilla="S",scedilla="s",
+ Tcedilla="T",tcedilla="t",
+ Ohungarumlaut="O",ohungarumlaut="o",
+ Uhungarumlaut="U",uhungarumlaut="u",
+ Aogonek="A",aogonek="a",
+ Eogonek="E",eogonek="e",
+ Iogonek="I",iogonek="i",
+ Uogonek="U",uogonek="u",
+ Aring="A",aring="a",
+ Uring="U",uring="u",
+ Abreve="A",abreve="a",
+ Ebreve="E",ebreve="e",
+ Gbreve="G",gbreve="g",
+ Ibreve="I",ibreve="i",
+ Obreve="O",obreve="o",
+ Ubreve="U",ubreve="u",
+ Ccaron="C",ccaron="c",
+ Dcaron="D",dcaron="d",
+ Ecaron="E",ecaron="e",
+ Lcaron="L",lcaron="l",
+ Ncaron="N",ncaron="n",
+ Rcaron="R",rcaron="r",
+ Scaron="S",scaron="s",
+ Tcaron="T",tcaron="t",
+ Zcaron="Z",zcaron="z",
+ dotlessI="I",dotlessi="i",
+ dotlessJ="J",dotlessj="j",
+ AEligature="AE",aeligature="ae",AE="AE",ae="ae",
+ OEligature="OE",oeligature="oe",OE="OE",oe="oe",
+ IJligature="IJ",ijligature="ij",IJ="IJ",ij="ij",
+ Lstroke="L",lstroke="l",Lslash="L",lslash="l",
+ Ostroke="O",ostroke="o",Oslash="O",oslash="o",
+ Ssharp="SS",ssharp="ss",
+ Aumlaut="A",aumlaut="a",
+ Eumlaut="E",eumlaut="e",
+ Iumlaut="I",iumlaut="i",
+ Oumlaut="O",oumlaut="o",
+ Uumlaut="U",uumlaut="u",
+ }
+}
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-fonts-tfm']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+local tfm={}
+fonts.handlers.tfm=tfm
+fonts.formats.tfm="type1"
+function fonts.readers.tfm(specification)
+ local fullname=specification.filename or ""
+ if fullname=="" then
+ local forced=specification.forced or ""
+ if forced~="" then
+ fullname=specification.name.."."..forced
+ else
+ fullname=specification.name
+ end
+ end
+ local foundname=resolvers.findbinfile(fullname,'tfm') or ""
+ if foundname=="" then
+ foundname=resolvers.findbinfile(fullname,'ofm') or ""
+ end
+ if foundname~="" then
+ specification.filename=foundname
+ specification.format="ofm"
+ return font.read_tfm(specification.filename,specification.size)
+ end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-oti']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local lower=string.lower
+local fonts=fonts
+local constructors=fonts.constructors
+local otf=constructors.newhandler("otf")
+local otffeatures=constructors.newfeatures("otf")
+local otftables=otf.tables
+local registerotffeature=otffeatures.register
+local allocate=utilities.storage.allocate
+registerotffeature {
+ name="features",
+ description="initialization of feature handler",
+ default=true,
+}
+local function setmode(tfmdata,value)
+ if value then
+ tfmdata.properties.mode=lower(value)
+ end
+end
+local function setlanguage(tfmdata,value)
+ if value then
+ local cleanvalue=lower(value)
+ local languages=otftables and otftables.languages
+ local properties=tfmdata.properties
+ if not languages then
+ properties.language=cleanvalue
+ elseif languages[value] then
+ properties.language=cleanvalue
+ else
+ properties.language="dflt"
+ end
+ end
+end
+local function setscript(tfmdata,value)
+ if value then
+ local cleanvalue=lower(value)
+ local scripts=otftables and otftables.scripts
+ local properties=tfmdata.properties
+ if not scripts then
+ properties.script=cleanvalue
+ elseif scripts[value] then
+ properties.script=cleanvalue
+ else
+ properties.script="dflt"
+ end
+ end
+end
+registerotffeature {
+ name="mode",
+ description="mode",
+ initializers={
+ base=setmode,
+ node=setmode,
+ }
+}
+registerotffeature {
+ name="language",
+ description="language",
+ initializers={
+ base=setlanguage,
+ node=setlanguage,
+ }
+}
+registerotffeature {
+ name="script",
+ description="script",
+ initializers={
+ base=setscript,
+ node=setscript,
+ }
+}
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-otf']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local utfbyte=utf.byte
+local format,gmatch,gsub,find,match,lower,strip=string.format,string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip
+local type,next,tonumber,tostring=type,next,tonumber,tostring
+local abs=math.abs
+local insert=table.insert
+local lpegmatch=lpeg.match
+local reversed,concat,remove,sortedkeys=table.reversed,table.concat,table.remove,table.sortedkeys
+local ioflush=io.flush
+local fastcopy,tohash,derivetable=table.fastcopy,table.tohash,table.derive
+local formatters=string.formatters
+local allocate=utilities.storage.allocate
+local registertracker=trackers.register
+local registerdirective=directives.register
+local starttiming=statistics.starttiming
+local stoptiming=statistics.stoptiming
+local elapsedtime=statistics.elapsedtime
+local findbinfile=resolvers.findbinfile
+local trace_private=false registertracker("otf.private",function(v) trace_private=v end)
+local trace_loading=false registertracker("otf.loading",function(v) trace_loading=v end)
+local trace_features=false registertracker("otf.features",function(v) trace_features=v end)
+local trace_dynamics=false registertracker("otf.dynamics",function(v) trace_dynamics=v end)
+local trace_sequences=false registertracker("otf.sequences",function(v) trace_sequences=v end)
+local trace_markwidth=false registertracker("otf.markwidth",function(v) trace_markwidth=v end)
+local trace_defining=false registertracker("fonts.defining",function(v) trace_defining=v end)
+local report_otf=logs.reporter("fonts","otf loading")
+local fonts=fonts
+local otf=fonts.handlers.otf
+otf.glists={ "gsub","gpos" }
+otf.version=2.756
+otf.cache=containers.define("fonts","otf",otf.version,true)
+local fontdata=fonts.hashes.identifiers
+local chardata=characters and characters.data
+local otffeatures=fonts.constructors.newfeatures("otf")
+local registerotffeature=otffeatures.register
+local enhancers=allocate()
+otf.enhancers=enhancers
+local patches={}
+enhancers.patches=patches
+local definers=fonts.definers
+local readers=fonts.readers
+local constructors=fonts.constructors
+local forceload=false
+local cleanup=0
+local usemetatables=false
+local packdata=true
+local syncspace=true
+local forcenotdef=false
+local includesubfonts=false
+local overloadkerns=false
+local applyruntimefixes=fonts.treatments and fonts.treatments.applyfixes
+local wildcard="*"
+local default="dflt"
+local fontloaderfields=fontloader.fields
+local mainfields=nil
+local glyphfields=nil
+local formats=fonts.formats
+formats.otf="opentype"
+formats.ttf="truetype"
+formats.ttc="truetype"
+formats.dfont="truetype"
+registerdirective("fonts.otf.loader.cleanup",function(v) cleanup=tonumber(v) or (v and 1) or 0 end)
+registerdirective("fonts.otf.loader.force",function(v) forceload=v end)
+registerdirective("fonts.otf.loader.usemetatables",function(v) usemetatables=v end)
+registerdirective("fonts.otf.loader.pack",function(v) packdata=v end)
+registerdirective("fonts.otf.loader.syncspace",function(v) syncspace=v end)
+registerdirective("fonts.otf.loader.forcenotdef",function(v) forcenotdef=v end)
+registerdirective("fonts.otf.loader.overloadkerns",function(v) overloadkerns=v end)
+function otf.fileformat(filename)
+ local leader=lower(io.loadchunk(filename,4))
+ local suffix=lower(file.suffix(filename))
+ if leader=="otto" then
+ return formats.otf,suffix=="otf"
+ elseif leader=="ttcf" then
+ return formats.ttc,suffix=="ttc"
+ elseif suffix=="ttc" then
+ return formats.ttc,true
+ elseif suffix=="dfont" then
+ return formats.dfont,true
+ else
+ return formats.ttf,suffix=="ttf"
+ end
+end
+local function otf_format(filename)
+ local format,okay=otf.fileformat(filename)
+ if not okay then
+ report_otf("font %a is actually an %a file",filename,format)
+ end
+ return format
+end
+local function load_featurefile(raw,featurefile)
+ if featurefile and featurefile~="" then
+ if trace_loading then
+ report_otf("using featurefile %a",featurefile)
+ end
+ fontloader.apply_featurefile(raw,featurefile)
+ end
+end
+local function showfeatureorder(rawdata,filename)
+ local sequences=rawdata.resources.sequences
+ if sequences and #sequences>0 then
+ if trace_loading then
+ report_otf("font %a has %s sequences",filename,#sequences)
+ report_otf(" ")
+ end
+ for nos=1,#sequences do
+ local sequence=sequences[nos]
+ local typ=sequence.type or "no-type"
+ local name=sequence.name or "no-name"
+ local subtables=sequence.subtables or { "no-subtables" }
+ local features=sequence.features
+ if trace_loading then
+ report_otf("%3i %-15s %-20s [% t]",nos,name,typ,subtables)
+ end
+ if features then
+ for feature,scripts in next,features do
+ local tt={}
+ if type(scripts)=="table" then
+ for script,languages in next,scripts do
+ local ttt={}
+ for language,_ in next,languages do
+ ttt[#ttt+1]=language
+ end
+ tt[#tt+1]=formatters["[%s: % t]"](script,ttt)
+ end
+ if trace_loading then
+ report_otf(" %s: % t",feature,tt)
+ end
+ else
+ if trace_loading then
+ report_otf(" %s: %S",feature,scripts)
+ end
+ end
+ end
+ end
+ end
+ if trace_loading then
+ report_otf("\n")
+ end
+ elseif trace_loading then
+ report_otf("font %a has no sequences",filename)
+ end
+end
+local valid_fields=table.tohash {
+ "ascent",
+ "cidinfo",
+ "copyright",
+ "descent",
+ "design_range_bottom",
+ "design_range_top",
+ "design_size",
+ "encodingchanged",
+ "extrema_bound",
+ "familyname",
+ "fontname",
+ "fontname",
+ "fontstyle_id",
+ "fontstyle_name",
+ "fullname",
+ "hasvmetrics",
+ "horiz_base",
+ "issans",
+ "isserif",
+ "italicangle",
+ "macstyle",
+ "onlybitmaps",
+ "origname",
+ "os2_version",
+ "pfminfo",
+ "serifcheck",
+ "sfd_version",
+ "strokedfont",
+ "strokewidth",
+ "table_version",
+ "ttf_tables",
+ "uni_interp",
+ "uniqueid",
+ "units_per_em",
+ "upos",
+ "use_typo_metrics",
+ "uwidth",
+ "validation_state",
+ "version",
+ "vert_base",
+ "weight",
+ "weight_width_slope_only",
+}
+local ordered_enhancers={
+ "prepare tables",
+ "prepare glyphs",
+ "prepare lookups",
+ "analyze glyphs",
+ "analyze math",
+ "prepare tounicode",
+ "reorganize lookups",
+ "reorganize mark classes",
+ "reorganize anchor classes",
+ "reorganize glyph kerns",
+ "reorganize glyph lookups",
+ "reorganize glyph anchors",
+ "merge kern classes",
+ "reorganize features",
+ "reorganize subtables",
+ "check glyphs",
+ "check metadata",
+ "check extra features",
+ "check encoding",
+ "add duplicates",
+ "cleanup tables",
+}
+local actions=allocate()
+local before=allocate()
+local after=allocate()
+patches.before=before
+patches.after=after
+local function enhance(name,data,filename,raw)
+ local enhancer=actions[name]
+ if enhancer then
+ if trace_loading then
+ report_otf("apply enhancement %a to file %a",name,filename)
+ ioflush()
+ end
+ enhancer(data,filename,raw)
+ else
+ end
+end
+function enhancers.apply(data,filename,raw)
+ local basename=file.basename(lower(filename))
+ if trace_loading then
+ report_otf("%s enhancing file %a","start",filename)
+ end
+ ioflush()
+ for e=1,#ordered_enhancers do
+ local enhancer=ordered_enhancers[e]
+ local b=before[enhancer]
+ if b then
+ for pattern,action in next,b do
+ if find(basename,pattern) then
+ action(data,filename,raw)
+ end
+ end
+ end
+ enhance(enhancer,data,filename,raw)
+ local a=after[enhancer]
+ if a then
+ for pattern,action in next,a do
+ if find(basename,pattern) then
+ action(data,filename,raw)
+ end
+ end
+ end
+ ioflush()
+ end
+ if trace_loading then
+ report_otf("%s enhancing file %a","stop",filename)
+ end
+ ioflush()
+end
+function patches.register(what,where,pattern,action)
+ local pw=patches[what]
+ if pw then
+ local ww=pw[where]
+ if ww then
+ ww[pattern]=action
+ else
+ pw[where]={ [pattern]=action}
+ end
+ end
+end
+function patches.report(fmt,...)
+ if trace_loading then
+ report_otf("patching: %s",formatters[fmt](...))
+ end
+end
+function enhancers.register(what,action)
+ actions[what]=action
+end
+function otf.load(filename,sub,featurefile)
+ local base=file.basename(file.removesuffix(filename))
+ local name=file.removesuffix(base)
+ local attr=lfs.attributes(filename)
+ local size=attr and attr.size or 0
+ local time=attr and attr.modification or 0
+ if featurefile then
+ name=name.."@"..file.removesuffix(file.basename(featurefile))
+ end
+ if sub=="" then
+ sub=false
+ end
+ local hash=name
+ if sub then
+ hash=hash.."-"..sub
+ end
+ hash=containers.cleanname(hash)
+ local featurefiles
+ if featurefile then
+ featurefiles={}
+ for s in gmatch(featurefile,"[^,]+") do
+ local name=resolvers.findfile(file.addsuffix(s,'fea'),'fea') or ""
+ if name=="" then
+ report_otf("loading error, no featurefile %a",s)
+ else
+ local attr=lfs.attributes(name)
+ featurefiles[#featurefiles+1]={
+ name=name,
+ size=attr and attr.size or 0,
+ time=attr and attr.modification or 0,
+ }
+ end
+ end
+ if #featurefiles==0 then
+ featurefiles=nil
+ end
+ end
+ local data=containers.read(otf.cache,hash)
+ local reload=not data or data.size~=size or data.time~=time
+ if forceload then
+ report_otf("forced reload of %a due to hard coded flag",filename)
+ reload=true
+ end
+ if not reload then
+ local featuredata=data.featuredata
+ if featurefiles then
+ if not featuredata or #featuredata~=#featurefiles then
+ reload=true
+ else
+ for i=1,#featurefiles do
+ local fi,fd=featurefiles[i],featuredata[i]
+ if fi.name~=fd.name or fi.size~=fd.size or fi.time~=fd.time then
+ reload=true
+ break
+ end
+ end
+ end
+ elseif featuredata then
+ reload=true
+ end
+ if reload then
+ report_otf("loading: forced reload due to changed featurefile specification %a",featurefile)
+ end
+ end
+ if reload then
+ report_otf("loading %a, hash %a",filename,hash)
+ local fontdata,messages
+ if sub then
+ fontdata,messages=fontloader.open(filename,sub)
+ else
+ fontdata,messages=fontloader.open(filename)
+ end
+ if fontdata then
+ mainfields=mainfields or (fontloaderfields and fontloaderfields(fontdata))
+ end
+ if trace_loading and messages and #messages>0 then
+ if type(messages)=="string" then
+ report_otf("warning: %s",messages)
+ else
+ for m=1,#messages do
+ report_otf("warning: %S",messages[m])
+ end
+ end
+ else
+ report_otf("loading done")
+ end
+ if fontdata then
+ if featurefiles then
+ for i=1,#featurefiles do
+ load_featurefile(fontdata,featurefiles[i].name)
+ end
+ end
+ local unicodes={
+ }
+ local splitter=lpeg.splitter(" ",unicodes)
+ data={
+ size=size,
+ time=time,
+ format=otf_format(filename),
+ featuredata=featurefiles,
+ resources={
+ filename=resolvers.unresolve(filename),
+ version=otf.version,
+ creator="context mkiv",
+ unicodes=unicodes,
+ indices={
+ },
+ duplicates={
+ },
+ variants={
+ },
+ lookuptypes={},
+ },
+ metadata={
+ },
+ properties={
+ },
+ descriptions={},
+ goodies={},
+ helpers={
+ tounicodelist=splitter,
+ tounicodetable=lpeg.Ct(splitter),
+ },
+ }
+ starttiming(data)
+ report_otf("file size: %s",size)
+ enhancers.apply(data,filename,fontdata)
+ local packtime={}
+ if packdata then
+ if cleanup>0 then
+ collectgarbage("collect")
+ end
+ starttiming(packtime)
+ enhance("pack",data,filename,nil)
+ stoptiming(packtime)
+ end
+ report_otf("saving %a in cache",filename)
+ data=containers.write(otf.cache,hash,data)
+ if cleanup>1 then
+ collectgarbage("collect")
+ end
+ stoptiming(data)
+ if elapsedtime then
+ report_otf("preprocessing and caching time %s, packtime %s",
+ elapsedtime(data),packdata and elapsedtime(packtime) or 0)
+ end
+ fontloader.close(fontdata)
+ if cleanup>3 then
+ collectgarbage("collect")
+ end
+ data=containers.read(otf.cache,hash)
+ if cleanup>2 then
+ collectgarbage("collect")
+ end
+ else
+ data=nil
+ report_otf("loading failed due to read error")
+ end
+ end
+ if data then
+ if trace_defining then
+ report_otf("loading from cache using hash %a",hash)
+ end
+ enhance("unpack",data,filename,nil,false)
+ if applyruntimefixes then
+ applyruntimefixes(filename,data)
+ end
+ enhance("add dimensions",data,filename,nil,false)
+ if trace_sequences then
+ showfeatureorder(data,filename)
+ end
+ end
+ return data
+end
+local mt={
+ __index=function(t,k)
+ if k=="height" then
+ local ht=t.boundingbox[4]
+ return ht<0 and 0 or ht
+ elseif k=="depth" then
+ local dp=-t.boundingbox[2]
+ return dp<0 and 0 or dp
+ elseif k=="width" then
+ return 0
+ elseif k=="name" then
+ return forcenotdef and ".notdef"
+ end
+ end
+}
+actions["prepare tables"]=function(data,filename,raw)
+ data.properties.hasitalics=false
+end
+actions["add dimensions"]=function(data,filename)
+ if data then
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local defaultwidth=resources.defaultwidth or 0
+ local defaultheight=resources.defaultheight or 0
+ local defaultdepth=resources.defaultdepth or 0
+ local basename=trace_markwidth and file.basename(filename)
+ if usemetatables then
+ for _,d in next,descriptions do
+ local wd=d.width
+ if not wd then
+ d.width=defaultwidth
+ elseif trace_markwidth and wd~=0 and d.class=="mark" then
+ report_otf("mark %a with width %b found in %a",d.name or "<noname>",wd,basename)
+ end
+ setmetatable(d,mt)
+ end
+ else
+ for _,d in next,descriptions do
+ local bb,wd=d.boundingbox,d.width
+ if not wd then
+ d.width=defaultwidth
+ elseif trace_markwidth and wd~=0 and d.class=="mark" then
+ report_otf("mark %a with width %b found in %a",d.name or "<noname>",wd,basename)
+ end
+ if bb then
+ local ht,dp=bb[4],-bb[2]
+ if ht==0 or ht<0 then
+ else
+ d.height=ht
+ end
+ if dp==0 or dp<0 then
+ else
+ d.depth=dp
+ end
+ end
+ end
+ end
+ end
+end
+local function somecopy(old)
+ if old then
+ local new={}
+ if type(old)=="table" then
+ for k,v in next,old do
+ if k=="glyphs" then
+ elseif type(v)=="table" then
+ new[k]=somecopy(v)
+ else
+ new[k]=v
+ end
+ end
+ else
+ for i=1,#mainfields do
+ local k=mainfields[i]
+ local v=old[k]
+ if k=="glyphs" then
+ elseif type(v)=="table" then
+ new[k]=somecopy(v)
+ else
+ new[k]=v
+ end
+ end
+ end
+ return new
+ else
+ return {}
+ end
+end
+actions["prepare glyphs"]=function(data,filename,raw)
+ local rawglyphs=raw.glyphs
+ local rawsubfonts=raw.subfonts
+ local rawcidinfo=raw.cidinfo
+ local criterium=constructors.privateoffset
+ local private=criterium
+ local resources=data.resources
+ local metadata=data.metadata
+ local properties=data.properties
+ local descriptions=data.descriptions
+ local unicodes=resources.unicodes
+ local indices=resources.indices
+ local duplicates=resources.duplicates
+ local variants=resources.variants
+ if rawsubfonts then
+ metadata.subfonts=includesubfonts and {}
+ properties.cidinfo=rawcidinfo
+ if rawcidinfo.registry then
+ local cidmap=fonts.cid.getmap(rawcidinfo)
+ if cidmap then
+ rawcidinfo.usedname=cidmap.usedname
+ local nofnames,nofunicodes=0,0
+ local cidunicodes,cidnames=cidmap.unicodes,cidmap.names
+ for cidindex=1,#rawsubfonts do
+ local subfont=rawsubfonts[cidindex]
+ local cidglyphs=subfont.glyphs
+ if includesubfonts then
+ metadata.subfonts[cidindex]=somecopy(subfont)
+ end
+ for index=0,subfont.glyphcnt-1 do
+ local glyph=cidglyphs[index]
+ if glyph then
+ local unicode=glyph.unicode
+if unicode>=0x00E000 and unicode<=0x00F8FF then
+ unicode=-1
+elseif unicode>=0x0F0000 and unicode<=0x0FFFFD then
+ unicode=-1
+elseif unicode>=0x100000 and unicode<=0x10FFFD then
+ unicode=-1
+end
+ local name=glyph.name or cidnames[index]
+ if not unicode or unicode==-1 then
+ unicode=cidunicodes[index]
+ end
+ if unicode and descriptions[unicode] then
+ report_otf("preventing glyph %a at index %H to overload unicode %U",name or "noname",index,unicode)
+ unicode=-1
+ end
+ if not unicode or unicode==-1 then
+ if not name then
+ name=format("u%06X",private)
+ end
+ unicode=private
+ unicodes[name]=private
+ if trace_private then
+ report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private)
+ end
+ private=private+1
+ nofnames=nofnames+1
+ else
+ if not name then
+ name=format("u%06X",unicode)
+ end
+ unicodes[name]=unicode
+ nofunicodes=nofunicodes+1
+ end
+ indices[index]=unicode
+ local description={
+ boundingbox=glyph.boundingbox,
+ name=glyph.name or name or "unknown",
+ cidindex=cidindex,
+ index=index,
+ glyph=glyph,
+ }
+ descriptions[unicode]=description
+ else
+ end
+ end
+ end
+ if trace_loading then
+ report_otf("cid font remapped, %s unicode points, %s symbolic names, %s glyphs",nofunicodes,nofnames,nofunicodes+nofnames)
+ end
+ elseif trace_loading then
+ report_otf("unable to remap cid font, missing cid file for %a",filename)
+ end
+ elseif trace_loading then
+ report_otf("font %a has no glyphs",filename)
+ end
+ else
+ for index=0,raw.glyphcnt-1 do
+ local glyph=rawglyphs[index]
+ if glyph then
+ local unicode=glyph.unicode
+ local name=glyph.name
+ if not unicode or unicode==-1 then
+ unicode=private
+ unicodes[name]=private
+ if trace_private then
+ report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private)
+ end
+ private=private+1
+ else
+ unicodes[name]=unicode
+ end
+ indices[index]=unicode
+ if not name then
+ name=format("u%06X",unicode)
+ end
+ descriptions[unicode]={
+ boundingbox=glyph.boundingbox,
+ name=name,
+ index=index,
+ glyph=glyph,
+ }
+ local altuni=glyph.altuni
+ if altuni then
+ for i=1,#altuni do
+ local a=altuni[i]
+ local u=a.unicode
+ local v=a.variant
+ if v then
+ local vv=variants[v]
+ if vv then
+ vv[u]=unicode
+ else
+ vv={ [u]=unicode }
+ variants[v]=vv
+ end
+ end
+ end
+ end
+ else
+ report_otf("potential problem: glyph %U is used but empty",index)
+ end
+ end
+ end
+ resources.private=private
+end
+actions["check encoding"]=function(data,filename,raw)
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local properties=data.properties
+ local unicodes=resources.unicodes
+ local indices=resources.indices
+ local duplicates=resources.duplicates
+ local mapdata=raw.map or {}
+ local unicodetoindex=mapdata and mapdata.map or {}
+ local indextounicode=mapdata and mapdata.backmap or {}
+ local encname=lower(data.enc_name or mapdata.enc_name or "")
+ local criterium=0xFFFF
+ local privateoffset=constructors.privateoffset
+ if find(encname,"unicode") then
+ if trace_loading then
+ report_otf("checking embedded unicode map %a",encname)
+ end
+ local reported={}
+ for maybeunicode,index in next,unicodetoindex do
+ if descriptions[maybeunicode] then
+ else
+ local unicode=indices[index]
+ if not unicode then
+ elseif maybeunicode==unicode then
+ elseif unicode>privateoffset then
+ else
+ local d=descriptions[unicode]
+ if d then
+ local c=d.copies
+ if c then
+ c[maybeunicode]=true
+ else
+ d.copies={ [maybeunicode]=true }
+ end
+ elseif index and not reported[index] then
+ report_otf("missing index %i",index)
+ reported[index]=true
+ end
+ end
+ end
+ end
+ for unicode,data in next,descriptions do
+ local d=data.copies
+ if d then
+ duplicates[unicode]=sortedkeys(d)
+ data.copies=nil
+ end
+ end
+ elseif properties.cidinfo then
+ report_otf("warning: no unicode map, used cidmap %a",properties.cidinfo.usedname)
+ else
+ report_otf("warning: non unicode map %a, only using glyph unicode data",encname or "whatever")
+ end
+ if mapdata then
+ mapdata.map={}
+ mapdata.backmap={}
+ end
+end
+actions["add duplicates"]=function(data,filename,raw)
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local properties=data.properties
+ local unicodes=resources.unicodes
+ local indices=resources.indices
+ local duplicates=resources.duplicates
+ for unicode,d in next,duplicates do
+ local nofduplicates=#d
+ if nofduplicates>4 then
+ if trace_loading then
+ report_otf("ignoring excessive duplicates of %U (n=%s)",unicode,nofduplicates)
+ end
+ else
+ for i=1,nofduplicates do
+ local u=d[i]
+ if not descriptions[u] then
+ local description=descriptions[unicode]
+ local n=0
+ for _,description in next,descriptions do
+ if kerns then
+ local kerns=description.kerns
+ for _,k in next,kerns do
+ local ku=k[unicode]
+ if ku then
+ k[u]=ku
+ n=n+1
+ end
+ end
+ end
+ end
+ if u>0 then
+ local duplicate=table.copy(description)
+ duplicate.comment=format("copy of U+%05X",unicode)
+ descriptions[u]=duplicate
+ if trace_loading then
+ report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n)
+ end
+ end
+ end
+ end
+ end
+ end
+end
+actions["analyze glyphs"]=function(data,filename,raw)
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local metadata=data.metadata
+ local properties=data.properties
+ local hasitalics=false
+ local widths={}
+ local marks={}
+ for unicode,description in next,descriptions do
+ local glyph=description.glyph
+ local italic=glyph.italic_correction
+ if not italic then
+ elseif italic==0 then
+ else
+ description.italic=italic
+ hasitalics=true
+ end
+ local width=glyph.width
+ widths[width]=(widths[width] or 0)+1
+ local class=glyph.class
+ if class then
+ if class=="mark" then
+ marks[unicode]=true
+ end
+ description.class=class
+ end
+ end
+ properties.hasitalics=hasitalics
+ resources.marks=marks
+ local wd,most=0,1
+ for k,v in next,widths do
+ if v>most then
+ wd,most=k,v
+ end
+ end
+ if most>1000 then
+ if trace_loading then
+ report_otf("most common width: %s (%s times), sharing (cjk font)",wd,most)
+ end
+ for unicode,description in next,descriptions do
+ if description.width==wd then
+ else
+ description.width=description.glyph.width
+ end
+ end
+ resources.defaultwidth=wd
+ else
+ for unicode,description in next,descriptions do
+ description.width=description.glyph.width
+ end
+ end
+end
+actions["reorganize mark classes"]=function(data,filename,raw)
+ local mark_classes=raw.mark_classes
+ if mark_classes then
+ local resources=data.resources
+ local unicodes=resources.unicodes
+ local markclasses={}
+ resources.markclasses=markclasses
+ for name,class in next,mark_classes do
+ local t={}
+ for s in gmatch(class,"[^ ]+") do
+ t[unicodes[s]]=true
+ end
+ markclasses[name]=t
+ end
+ end
+end
+actions["reorganize features"]=function(data,filename,raw)
+ local features={}
+ data.resources.features=features
+ for k,what in next,otf.glists do
+ local dw=raw[what]
+ if dw then
+ local f={}
+ features[what]=f
+ for i=1,#dw do
+ local d=dw[i]
+ local dfeatures=d.features
+ if dfeatures then
+ for i=1,#dfeatures do
+ local df=dfeatures[i]
+ local tag=strip(lower(df.tag))
+ local ft=f[tag]
+ if not ft then
+ ft={}
+ f[tag]=ft
+ end
+ local dscripts=df.scripts
+ for i=1,#dscripts do
+ local d=dscripts[i]
+ local languages=d.langs
+ local script=strip(lower(d.script))
+ local fts=ft[script] if not fts then fts={} ft[script]=fts end
+ for i=1,#languages do
+ fts[strip(lower(languages[i]))]=true
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
+actions["reorganize anchor classes"]=function(data,filename,raw)
+ local resources=data.resources
+ local anchor_to_lookup={}
+ local lookup_to_anchor={}
+ resources.anchor_to_lookup=anchor_to_lookup
+ resources.lookup_to_anchor=lookup_to_anchor
+ local classes=raw.anchor_classes
+ if classes then
+ for c=1,#classes do
+ local class=classes[c]
+ local anchor=class.name
+ local lookups=class.lookup
+ if type(lookups)~="table" then
+ lookups={ lookups }
+ end
+ local a=anchor_to_lookup[anchor]
+ if not a then
+ a={}
+ anchor_to_lookup[anchor]=a
+ end
+ for l=1,#lookups do
+ local lookup=lookups[l]
+ local l=lookup_to_anchor[lookup]
+ if l then
+ l[anchor]=true
+ else
+ l={ [anchor]=true }
+ lookup_to_anchor[lookup]=l
+ end
+ a[lookup]=true
+ end
+ end
+ end
+end
+actions["prepare tounicode"]=function(data,filename,raw)
+ fonts.mappings.addtounicode(data,filename)
+end
+local g_directions={
+ gsub_contextchain=1,
+ gpos_contextchain=1,
+ gsub_reversecontextchain=-1,
+ gpos_reversecontextchain=-1,
+}
+actions["reorganize subtables"]=function(data,filename,raw)
+ local resources=data.resources
+ local sequences={}
+ local lookups={}
+ local chainedfeatures={}
+ resources.sequences=sequences
+ resources.lookups=lookups
+ for _,what in next,otf.glists do
+ local dw=raw[what]
+ if dw then
+ for k=1,#dw do
+ local gk=dw[k]
+ local features=gk.features
+ local typ=gk.type
+ local chain=g_directions[typ] or 0
+ local subtables=gk.subtables
+ if subtables then
+ local t={}
+ for s=1,#subtables do
+ t[s]=subtables[s].name
+ end
+ subtables=t
+ end
+ local flags,markclass=gk.flags,nil
+ if flags then
+ local t={
+ (flags.ignorecombiningmarks and "mark") or false,
+ (flags.ignoreligatures and "ligature") or false,
+ (flags.ignorebaseglyphs and "base") or false,
+ flags.r2l or false,
+ }
+ markclass=flags.mark_class
+ if markclass then
+ markclass=resources.markclasses[markclass]
+ end
+ flags=t
+ end
+ local name=gk.name
+ if not name then
+ report_otf("skipping weird lookup number %s",k)
+ elseif features then
+ local f={}
+ local o={}
+ for i=1,#features do
+ local df=features[i]
+ local tag=strip(lower(df.tag))
+ local ft=f[tag]
+ if not ft then
+ ft={}
+ f[tag]=ft
+ o[#o+1]=tag
+ end
+ local dscripts=df.scripts
+ for i=1,#dscripts do
+ local d=dscripts[i]
+ local languages=d.langs
+ local script=strip(lower(d.script))
+ local fts=ft[script] if not fts then fts={} ft[script]=fts end
+ for i=1,#languages do
+ fts[strip(lower(languages[i]))]=true
+ end
+ end
+ end
+ sequences[#sequences+1]={
+ type=typ,
+ chain=chain,
+ flags=flags,
+ name=name,
+ subtables=subtables,
+ markclass=markclass,
+ features=f,
+ order=o,
+ }
+ else
+ lookups[name]={
+ type=typ,
+ chain=chain,
+ flags=flags,
+ subtables=subtables,
+ markclass=markclass,
+ }
+ end
+ end
+ end
+ end
+end
+actions["prepare lookups"]=function(data,filename,raw)
+ local lookups=raw.lookups
+ if lookups then
+ data.lookups=lookups
+ end
+end
+local function t_uncover(splitter,cache,covers)
+ local result={}
+ for n=1,#covers do
+ local cover=covers[n]
+ local uncovered=cache[cover]
+ if not uncovered then
+ uncovered=lpegmatch(splitter,cover)
+ cache[cover]=uncovered
+ end
+ result[n]=uncovered
+ end
+ return result
+end
+local function s_uncover(splitter,cache,cover)
+ if cover=="" then
+ return nil
+ else
+ local uncovered=cache[cover]
+ if not uncovered then
+ uncovered=lpegmatch(splitter,cover)
+ cache[cover]=uncovered
+ end
+ return { uncovered }
+ end
+end
+local function t_hashed(t,cache)
+ if t then
+ local ht={}
+ for i=1,#t do
+ local ti=t[i]
+ local tih=cache[ti]
+ if not tih then
+ tih={}
+ for i=1,#ti do
+ tih[ti[i]]=true
+ end
+ cache[ti]=tih
+ end
+ ht[i]=tih
+ end
+ return ht
+ else
+ return nil
+ end
+end
+local function s_hashed(t,cache)
+ if t then
+ local ht={}
+ local tf=t[1]
+ for i=1,#tf do
+ ht[i]={ [tf[i]]=true }
+ end
+ return ht
+ else
+ return nil
+ end
+end
+local function r_uncover(splitter,cache,cover,replacements)
+ if cover=="" then
+ return nil
+ else
+ local uncovered=cover[1]
+ local replaced=cache[replacements]
+ if not replaced then
+ replaced=lpegmatch(splitter,replacements)
+ cache[replacements]=replaced
+ end
+ local nu,nr=#uncovered,#replaced
+ local r={}
+ if nu==nr then
+ for i=1,nu do
+ r[uncovered[i]]=replaced[i]
+ end
+ end
+ return r
+ end
+end
+actions["reorganize lookups"]=function(data,filename,raw)
+ if data.lookups then
+ local splitter=data.helpers.tounicodetable
+ local t_u_cache={}
+ local s_u_cache=t_u_cache
+ local t_h_cache={}
+ local s_h_cache=t_h_cache
+ local r_u_cache={}
+ for _,lookup in next,data.lookups do
+ local rules=lookup.rules
+ if rules then
+ local format=lookup.format
+ if format=="class" then
+ local before_class=lookup.before_class
+ if before_class then
+ before_class=t_uncover(splitter,t_u_cache,reversed(before_class))
+ end
+ local current_class=lookup.current_class
+ if current_class then
+ current_class=t_uncover(splitter,t_u_cache,current_class)
+ end
+ local after_class=lookup.after_class
+ if after_class then
+ after_class=t_uncover(splitter,t_u_cache,after_class)
+ end
+ for i=1,#rules do
+ local rule=rules[i]
+ local class=rule.class
+ local before=class.before
+ if before then
+ for i=1,#before do
+ before[i]=before_class[before[i]] or {}
+ end
+ rule.before=t_hashed(before,t_h_cache)
+ end
+ local current=class.current
+ local lookups=rule.lookups
+ if current then
+ for i=1,#current do
+ current[i]=current_class[current[i]] or {}
+ if lookups and not lookups[i] then
+ lookups[i]=""
+ end
+ end
+ rule.current=t_hashed(current,t_h_cache)
+ end
+ local after=class.after
+ if after then
+ for i=1,#after do
+ after[i]=after_class[after[i]] or {}
+ end
+ rule.after=t_hashed(after,t_h_cache)
+ end
+ rule.class=nil
+ end
+ lookup.before_class=nil
+ lookup.current_class=nil
+ lookup.after_class=nil
+ lookup.format="coverage"
+ elseif format=="coverage" then
+ for i=1,#rules do
+ local rule=rules[i]
+ local coverage=rule.coverage
+ if coverage then
+ local before=coverage.before
+ if before then
+ before=t_uncover(splitter,t_u_cache,reversed(before))
+ rule.before=t_hashed(before,t_h_cache)
+ end
+ local current=coverage.current
+ if current then
+ current=t_uncover(splitter,t_u_cache,current)
+ local lookups=rule.lookups
+ if lookups then
+ for i=1,#current do
+ if not lookups[i] then
+ lookups[i]=""
+ end
+ end
+ end
+ rule.current=t_hashed(current,t_h_cache)
+ end
+ local after=coverage.after
+ if after then
+ after=t_uncover(splitter,t_u_cache,after)
+ rule.after=t_hashed(after,t_h_cache)
+ end
+ rule.coverage=nil
+ end
+ end
+ elseif format=="reversecoverage" then
+ for i=1,#rules do
+ local rule=rules[i]
+ local reversecoverage=rule.reversecoverage
+ if reversecoverage then
+ local before=reversecoverage.before
+ if before then
+ before=t_uncover(splitter,t_u_cache,reversed(before))
+ rule.before=t_hashed(before,t_h_cache)
+ end
+ local current=reversecoverage.current
+ if current then
+ current=t_uncover(splitter,t_u_cache,current)
+ rule.current=t_hashed(current,t_h_cache)
+ end
+ local after=reversecoverage.after
+ if after then
+ after=t_uncover(splitter,t_u_cache,after)
+ rule.after=t_hashed(after,t_h_cache)
+ end
+ local replacements=reversecoverage.replacements
+ if replacements then
+ rule.replacements=r_uncover(splitter,r_u_cache,current,replacements)
+ end
+ rule.reversecoverage=nil
+ end
+ end
+ elseif format=="glyphs" then
+ for i=1,#rules do
+ local rule=rules[i]
+ local glyphs=rule.glyphs
+ if glyphs then
+ local fore=glyphs.fore
+ if fore and fore~="" then
+ fore=s_uncover(splitter,s_u_cache,fore)
+ rule.before=s_hashed(fore,s_h_cache)
+ end
+ local back=glyphs.back
+ if back then
+ back=s_uncover(splitter,s_u_cache,back)
+ rule.after=s_hashed(back,s_h_cache)
+ end
+ local names=glyphs.names
+ if names then
+ names=s_uncover(splitter,s_u_cache,names)
+ rule.current=s_hashed(names,s_h_cache)
+ end
+ rule.glyphs=nil
+ local lookups=rule.lookups
+ if lookups then
+ for i=1,#names do
+ if not lookups[i] then
+ lookups[i]=""
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
+local function check_variants(unicode,the_variants,splitter,unicodes)
+ local variants=the_variants.variants
+ if variants then
+ local glyphs=lpegmatch(splitter,variants)
+ local done={ [unicode]=true }
+ local n=0
+ for i=1,#glyphs do
+ local g=glyphs[i]
+ if done[g] then
+ report_otf("skipping cyclic reference %U in math variant %U",g,unicode)
+ else
+ if n==0 then
+ n=1
+ variants={ g }
+ else
+ n=n+1
+ variants[n]=g
+ end
+ done[g]=true
+ end
+ end
+ if n==0 then
+ variants=nil
+ end
+ end
+ local parts=the_variants.parts
+ if parts then
+ local p=#parts
+ if p>0 then
+ for i=1,p do
+ local pi=parts[i]
+ pi.glyph=unicodes[pi.component] or 0
+ pi.component=nil
+ end
+ else
+ parts=nil
+ end
+ end
+ local italic_correction=the_variants.italic_correction
+ if italic_correction and italic_correction==0 then
+ italic_correction=nil
+ end
+ return variants,parts,italic_correction
+end
+actions["analyze math"]=function(data,filename,raw)
+ if raw.math then
+ data.metadata.math=raw.math
+ local unicodes=data.resources.unicodes
+ local splitter=data.helpers.tounicodetable
+ for unicode,description in next,data.descriptions do
+ local glyph=description.glyph
+ local mathkerns=glyph.mathkern
+ local horiz_variants=glyph.horiz_variants
+ local vert_variants=glyph.vert_variants
+ local top_accent=glyph.top_accent
+ if mathkerns or horiz_variants or vert_variants or top_accent then
+ local math={}
+ if top_accent then
+ math.top_accent=top_accent
+ end
+ if mathkerns then
+ for k,v in next,mathkerns do
+ if not next(v) then
+ mathkerns[k]=nil
+ else
+ for k,v in next,v do
+ if v==0 then
+ k[v]=nil
+ end
+ end
+ end
+ end
+ math.kerns=mathkerns
+ end
+ if horiz_variants then
+ math.horiz_variants,math.horiz_parts,math.horiz_italic_correction=check_variants(unicode,horiz_variants,splitter,unicodes)
+ end
+ if vert_variants then
+ math.vert_variants,math.vert_parts,math.vert_italic_correction=check_variants(unicode,vert_variants,splitter,unicodes)
+ end
+ local italic_correction=description.italic
+ if italic_correction and italic_correction~=0 then
+ math.italic_correction=italic_correction
+ end
+ description.math=math
+ end
+ end
+ end
+end
+actions["reorganize glyph kerns"]=function(data,filename,raw)
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local unicodes=resources.unicodes
+ for unicode,description in next,descriptions do
+ local kerns=description.glyph.kerns
+ if kerns then
+ local newkerns={}
+ for k,kern in next,kerns do
+ local name=kern.char
+ local offset=kern.off
+ local lookup=kern.lookup
+ if name and offset and lookup then
+ local unicode=unicodes[name]
+ if unicode then
+ if type(lookup)=="table" then
+ for l=1,#lookup do
+ local lookup=lookup[l]
+ local lookupkerns=newkerns[lookup]
+ if lookupkerns then
+ lookupkerns[unicode]=offset
+ else
+ newkerns[lookup]={ [unicode]=offset }
+ end
+ end
+ else
+ local lookupkerns=newkerns[lookup]
+ if lookupkerns then
+ lookupkerns[unicode]=offset
+ else
+ newkerns[lookup]={ [unicode]=offset }
+ end
+ end
+ elseif trace_loading then
+ report_otf("problems with unicode %a of kern %a of glyph %U",name,k,unicode)
+ end
+ end
+ end
+ description.kerns=newkerns
+ end
+ end
+end
+actions["merge kern classes"]=function(data,filename,raw)
+ local gposlist=raw.gpos
+ if gposlist then
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local unicodes=resources.unicodes
+ local splitter=data.helpers.tounicodetable
+ local ignored=0
+ local blocked=0
+ for gp=1,#gposlist do
+ local gpos=gposlist[gp]
+ local subtables=gpos.subtables
+ if subtables then
+ local first_done={}
+ local split={}
+ for s=1,#subtables do
+ local subtable=subtables[s]
+ local kernclass=subtable.kernclass
+ local lookup=subtable.lookup or subtable.name
+ if kernclass then
+ if #kernclass>0 then
+ kernclass=kernclass[1]
+ lookup=type(kernclass.lookup)=="string" and kernclass.lookup or lookup
+ report_otf("fixing kernclass table of lookup %a",lookup)
+ end
+ local firsts=kernclass.firsts
+ local seconds=kernclass.seconds
+ local offsets=kernclass.offsets
+ for n,s in next,firsts do
+ split[s]=split[s] or lpegmatch(splitter,s)
+ end
+ local maxseconds=0
+ for n,s in next,seconds do
+ if n>maxseconds then
+ maxseconds=n
+ end
+ split[s]=split[s] or lpegmatch(splitter,s)
+ end
+ for fk=1,#firsts do
+ local fv=firsts[fk]
+ local splt=split[fv]
+ if splt then
+ local extrakerns={}
+ local baseoffset=(fk-1)*maxseconds
+ for sk=2,maxseconds do
+ local sv=seconds[sk]
+ local splt=split[sv]
+ if splt then
+ local offset=offsets[baseoffset+sk]
+ if offset then
+ for i=1,#splt do
+ extrakerns[splt[i]]=offset
+ end
+ end
+ end
+ end
+ for i=1,#splt do
+ local first_unicode=splt[i]
+ if first_done[first_unicode] then
+ report_otf("lookup %a: ignoring further kerns of %C",lookup,first_unicode)
+ blocked=blocked+1
+ else
+ first_done[first_unicode]=true
+ local description=descriptions[first_unicode]
+ if description then
+ local kerns=description.kerns
+ if not kerns then
+ kerns={}
+ description.kerns=kerns
+ end
+ local lookupkerns=kerns[lookup]
+ if not lookupkerns then
+ lookupkerns={}
+ kerns[lookup]=lookupkerns
+ end
+ if overloadkerns then
+ for second_unicode,kern in next,extrakerns do
+ lookupkerns[second_unicode]=kern
+ end
+ else
+ for second_unicode,kern in next,extrakerns do
+ local k=lookupkerns[second_unicode]
+ if not k then
+ lookupkerns[second_unicode]=kern
+ elseif k~=kern then
+ if trace_loading then
+ report_otf("lookup %a: ignoring overload of kern between %C and %C, rejecting %a, keeping %a",lookup,first_unicode,second_unicode,k,kern)
+ end
+ ignored=ignored+1
+ end
+ end
+ end
+ elseif trace_loading then
+ report_otf("no glyph data for %U",first_unicode)
+ end
+ end
+ end
+ end
+ end
+ subtable.kernclass={}
+ end
+ end
+ end
+ end
+ if ignored>0 then
+ report_otf("%s kern overloads ignored",ignored)
+ end
+ if blocked>0 then
+ report_otf("%s succesive kerns blocked",blocked)
+ end
+ end
+end
+actions["check glyphs"]=function(data,filename,raw)
+ for unicode,description in next,data.descriptions do
+ description.glyph=nil
+ end
+end
+actions["check metadata"]=function(data,filename,raw)
+ local metadata=data.metadata
+ for _,k in next,mainfields do
+ if valid_fields[k] then
+ local v=raw[k]
+ if not metadata[k] then
+ metadata[k]=v
+ end
+ end
+ end
+ local ttftables=metadata.ttf_tables
+ if ttftables then
+ for i=1,#ttftables do
+ ttftables[i].data="deleted"
+ end
+ end
+ if metadata.validation_state and table.contains(metadata.validation_state,"bad_ps_fontname") then
+ local name=file.nameonly(filename)
+ metadata.fontname="bad-fontname-"..name
+ metadata.fullname="bad-fullname-"..name
+ end
+end
+actions["cleanup tables"]=function(data,filename,raw)
+ data.resources.indices=nil
+ data.helpers=nil
+end
+actions["reorganize glyph lookups"]=function(data,filename,raw)
+ local resources=data.resources
+ local unicodes=resources.unicodes
+ local descriptions=data.descriptions
+ local splitter=data.helpers.tounicodelist
+ local lookuptypes=resources.lookuptypes
+ for unicode,description in next,descriptions do
+ local lookups=description.glyph.lookups
+ if lookups then
+ for tag,lookuplist in next,lookups do
+ for l=1,#lookuplist do
+ local lookup=lookuplist[l]
+ local specification=lookup.specification
+ local lookuptype=lookup.type
+ local lt=lookuptypes[tag]
+ if not lt then
+ lookuptypes[tag]=lookuptype
+ elseif lt~=lookuptype then
+ report_otf("conflicting lookuptypes, %a points to %a and %a",tag,lt,lookuptype)
+ end
+ if lookuptype=="ligature" then
+ lookuplist[l]={ lpegmatch(splitter,specification.components) }
+ elseif lookuptype=="alternate" then
+ lookuplist[l]={ lpegmatch(splitter,specification.components) }
+ elseif lookuptype=="substitution" then
+ lookuplist[l]=unicodes[specification.variant]
+ elseif lookuptype=="multiple" then
+ lookuplist[l]={ lpegmatch(splitter,specification.components) }
+ elseif lookuptype=="position" then
+ lookuplist[l]={
+ specification.x or 0,
+ specification.y or 0,
+ specification.h or 0,
+ specification.v or 0
+ }
+ elseif lookuptype=="pair" then
+ local one=specification.offsets[1]
+ local two=specification.offsets[2]
+ local paired=unicodes[specification.paired]
+ if one then
+ if two then
+ lookuplist[l]={ paired,{ one.x or 0,one.y or 0,one.h or 0,one.v or 0 },{ two.x or 0,two.y or 0,two.h or 0,two.v or 0 } }
+ else
+ lookuplist[l]={ paired,{ one.x or 0,one.y or 0,one.h or 0,one.v or 0 } }
+ end
+ else
+ if two then
+ lookuplist[l]={ paired,{},{ two.x or 0,two.y or 0,two.h or 0,two.v or 0} }
+ else
+ lookuplist[l]={ paired }
+ end
+ end
+ end
+ end
+ end
+ local slookups,mlookups
+ for tag,lookuplist in next,lookups do
+ if #lookuplist==1 then
+ if slookups then
+ slookups[tag]=lookuplist[1]
+ else
+ slookups={ [tag]=lookuplist[1] }
+ end
+ else
+ if mlookups then
+ mlookups[tag]=lookuplist
+ else
+ mlookups={ [tag]=lookuplist }
+ end
+ end
+ end
+ if slookups then
+ description.slookups=slookups
+ end
+ if mlookups then
+ description.mlookups=mlookups
+ end
+ end
+ end
+end
+actions["reorganize glyph anchors"]=function(data,filename,raw)
+ local descriptions=data.descriptions
+ for unicode,description in next,descriptions do
+ local anchors=description.glyph.anchors
+ if anchors then
+ for class,data in next,anchors do
+ if class=="baselig" then
+ for tag,specification in next,data do
+ for i=1,#specification do
+ local si=specification[i]
+ specification[i]={ si.x or 0,si.y or 0 }
+ end
+ end
+ else
+ for tag,specification in next,data do
+ data[tag]={ specification.x or 0,specification.y or 0 }
+ end
+ end
+ end
+ description.anchors=anchors
+ end
+ end
+end
+function otf.setfeatures(tfmdata,features)
+ local okay=constructors.initializefeatures("otf",tfmdata,features,trace_features,report_otf)
+ if okay then
+ return constructors.collectprocessors("otf",tfmdata,features,trace_features,report_otf)
+ else
+ return {}
+ end
+end
+local function copytotfm(data,cache_id)
+ if data then
+ local metadata=data.metadata
+ local resources=data.resources
+ local properties=derivetable(data.properties)
+ local descriptions=derivetable(data.descriptions)
+ local goodies=derivetable(data.goodies)
+ local characters={}
+ local parameters={}
+ local mathparameters={}
+ local pfminfo=metadata.pfminfo or {}
+ local resources=data.resources
+ local unicodes=resources.unicodes
+ local spaceunits=500
+ local spacer="space"
+ local designsize=metadata.designsize or metadata.design_size or 100
+ local mathspecs=metadata.math
+ if designsize==0 then
+ designsize=100
+ end
+ if mathspecs then
+ for name,value in next,mathspecs do
+ mathparameters[name]=value
+ end
+ end
+ for unicode,_ in next,data.descriptions do
+ characters[unicode]={}
+ end
+ if mathspecs then
+ for unicode,character in next,characters do
+ local d=descriptions[unicode]
+ local m=d.math
+ if m then
+ local variants=m.horiz_variants
+ local parts=m.horiz_parts
+ if variants then
+ local c=character
+ for i=1,#variants do
+ local un=variants[i]
+ c.next=un
+ c=characters[un]
+ end
+ c.horiz_variants=parts
+ elseif parts then
+ character.horiz_variants=parts
+ end
+ local variants=m.vert_variants
+ local parts=m.vert_parts
+ if variants then
+ local c=character
+ for i=1,#variants do
+ local un=variants[i]
+ c.next=un
+ c=characters[un]
+ end
+ c.vert_variants=parts
+ elseif parts then
+ character.vert_variants=parts
+ end
+ local italic_correction=m.vert_italic_correction
+ if italic_correction then
+ character.vert_italic_correction=italic_correction
+ end
+ local top_accent=m.top_accent
+ if top_accent then
+ character.top_accent=top_accent
+ end
+ local kerns=m.kerns
+ if kerns then
+ character.mathkerns=kerns
+ end
+ end
+ end
+ end
+ local filename=constructors.checkedfilename(resources)
+ local fontname=metadata.fontname
+ local fullname=metadata.fullname or fontname
+ local units=metadata.units_per_em or 1000
+ if units==0 then
+ units=1000
+ metadata.units_per_em=1000
+ report_otf("changing %a units to %a",0,units)
+ end
+ local monospaced=metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose.proportion=="Monospaced")
+ local charwidth=pfminfo.avgwidth
+ local charxheight=pfminfo.os2_xheight and pfminfo.os2_xheight>0 and pfminfo.os2_xheight
+ local italicangle=metadata.italicangle
+ properties.monospaced=monospaced
+ parameters.italicangle=italicangle
+ parameters.charwidth=charwidth
+ parameters.charxheight=charxheight
+ local space=0x0020
+ local emdash=0x2014
+ if monospaced then
+ if descriptions[space] then
+ spaceunits,spacer=descriptions[space].width,"space"
+ end
+ if not spaceunits and descriptions[emdash] then
+ spaceunits,spacer=descriptions[emdash].width,"emdash"
+ end
+ if not spaceunits and charwidth then
+ spaceunits,spacer=charwidth,"charwidth"
+ end
+ else
+ if descriptions[space] then
+ spaceunits,spacer=descriptions[space].width,"space"
+ end
+ if not spaceunits and descriptions[emdash] then
+ spaceunits,spacer=descriptions[emdash].width/2,"emdash/2"
+ end
+ if not spaceunits and charwidth then
+ spaceunits,spacer=charwidth,"charwidth"
+ end
+ end
+ spaceunits=tonumber(spaceunits) or 500
+ parameters.slant=0
+ parameters.space=spaceunits
+ parameters.space_stretch=units/2
+ parameters.space_shrink=1*units/3
+ parameters.x_height=2*units/5
+ parameters.quad=units
+ if spaceunits<2*units/5 then
+ end
+ if italicangle and italicangle~=0 then
+ parameters.italicangle=italicangle
+ parameters.italicfactor=math.cos(math.rad(90+italicangle))
+ parameters.slant=- math.tan(italicangle*math.pi/180)
+ end
+ if monospaced then
+ parameters.space_stretch=0
+ parameters.space_shrink=0
+ elseif syncspace then
+ parameters.space_stretch=spaceunits/2
+ parameters.space_shrink=spaceunits/3
+ end
+ parameters.extra_space=parameters.space_shrink
+ if charxheight then
+ parameters.x_height=charxheight
+ else
+ local x=0x78
+ if x then
+ local x=descriptions[x]
+ if x then
+ parameters.x_height=x.height
+ end
+ end
+ end
+ parameters.designsize=(designsize/10)*65536
+ parameters.ascender=abs(metadata.ascent or 0)
+ parameters.descender=abs(metadata.descent or 0)
+ parameters.units=units
+ properties.space=spacer
+ properties.encodingbytes=2
+ properties.format=data.format or otf_format(filename) or formats.otf
+ properties.noglyphnames=true
+ properties.filename=filename
+ properties.fontname=fontname
+ properties.fullname=fullname
+ properties.psname=fontname or fullname
+ properties.name=filename or fullname
+ return {
+ characters=characters,
+ descriptions=descriptions,
+ parameters=parameters,
+ mathparameters=mathparameters,
+ resources=resources,
+ properties=properties,
+ goodies=goodies,
+ }
+ end
+end
+local function otftotfm(specification)
+ local cache_id=specification.hash
+ local tfmdata=containers.read(constructors.cache,cache_id)
+ if not tfmdata then
+ local name=specification.name
+ local sub=specification.sub
+ local filename=specification.filename
+ local features=specification.features.normal
+ local rawdata=otf.load(filename,sub,features and features.featurefile)
+ if rawdata and next(rawdata) then
+ local descriptions=rawdata.descriptions
+ local duplicates=rawdata.resources.duplicates
+ if duplicates then
+ local nofduplicates,nofduplicated=0,0
+ for parent,list in next,duplicates do
+ for i=1,#list do
+ local unicode=list[i]
+ if not descriptions[unicode] then
+ descriptions[unicode]=descriptions[parent]
+ nofduplicated=nofduplicated+1
+ end
+ end
+ nofduplicates=nofduplicates+#list
+ end
+ if trace_otf and nofduplicated~=nofduplicates then
+ report_otf("%i extra duplicates copied out of %i",nofduplicated,nofduplicates)
+ end
+ end
+ rawdata.lookuphash={}
+ tfmdata=copytotfm(rawdata,cache_id)
+ if tfmdata and next(tfmdata) then
+ local features=constructors.checkedfeatures("otf",features)
+ local shared=tfmdata.shared
+ if not shared then
+ shared={}
+ tfmdata.shared=shared
+ end
+ shared.rawdata=rawdata
+ shared.dynamics={}
+ tfmdata.changed={}
+ shared.features=features
+ shared.processes=otf.setfeatures(tfmdata,features)
+ end
+ end
+ containers.write(constructors.cache,cache_id,tfmdata)
+ end
+ return tfmdata
+end
+local function read_from_otf(specification)
+ local tfmdata=otftotfm(specification)
+ if tfmdata then
+ tfmdata.properties.name=specification.name
+ tfmdata.properties.sub=specification.sub
+ tfmdata=constructors.scale(tfmdata,specification)
+ local allfeatures=tfmdata.shared.features or specification.features.normal
+ constructors.applymanipulators("otf",tfmdata,allfeatures,trace_features,report_otf)
+ constructors.setname(tfmdata,specification)
+ fonts.loggers.register(tfmdata,file.suffix(specification.filename),specification)
+ end
+ return tfmdata
+end
+local function checkmathsize(tfmdata,mathsize)
+ local mathdata=tfmdata.shared.rawdata.metadata.math
+ local mathsize=tonumber(mathsize)
+ if mathdata then
+ local parameters=tfmdata.parameters
+ parameters.scriptpercentage=mathdata.ScriptPercentScaleDown
+ parameters.scriptscriptpercentage=mathdata.ScriptScriptPercentScaleDown
+ parameters.mathsize=mathsize
+ end
+end
+registerotffeature {
+ name="mathsize",
+ description="apply mathsize specified in the font",
+ initializers={
+ base=checkmathsize,
+ node=checkmathsize,
+ }
+}
+function otf.collectlookups(rawdata,kind,script,language)
+ local sequences=rawdata.resources.sequences
+ if sequences then
+ local featuremap,featurelist={},{}
+ for s=1,#sequences do
+ local sequence=sequences[s]
+ local features=sequence.features
+ features=features and features[kind]
+ features=features and (features[script] or features[default] or features[wildcard])
+ features=features and (features[language] or features[default] or features[wildcard])
+ if features then
+ local subtables=sequence.subtables
+ if subtables then
+ for s=1,#subtables do
+ local ss=subtables[s]
+ if not featuremap[s] then
+ featuremap[ss]=true
+ featurelist[#featurelist+1]=ss
+ end
+ end
+ end
+ end
+ end
+ if #featurelist>0 then
+ return featuremap,featurelist
+ end
+ end
+ return nil,nil
+end
+local function check_otf(forced,specification,suffix)
+ local name=specification.name
+ if forced then
+ name=specification.forcedname
+ end
+ local fullname=findbinfile(name,suffix) or ""
+ if fullname=="" then
+ fullname=fonts.names.getfilename(name,suffix) or ""
+ end
+ if fullname~="" and not fonts.names.ignoredfile(fullname) then
+ specification.filename=fullname
+ return read_from_otf(specification)
+ end
+end
+local function opentypereader(specification,suffix)
+ local forced=specification.forced or ""
+ if formats[forced] then
+ return check_otf(true,specification,forced)
+ else
+ return check_otf(false,specification,suffix)
+ end
+end
+readers.opentype=opentypereader
+function readers.otf (specification) return opentypereader(specification,"otf") end
+function readers.ttf (specification) return opentypereader(specification,"ttf") end
+function readers.ttc (specification) return opentypereader(specification,"ttf") end
+function readers.dfont(specification) return opentypereader(specification,"ttf") end
+function otf.scriptandlanguage(tfmdata,attr)
+ local properties=tfmdata.properties
+ return properties.script or "dflt",properties.language or "dflt"
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-otb']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local concat=table.concat
+local format,gmatch,gsub,find,match,lower,strip=string.format,string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip
+local type,next,tonumber,tostring=type,next,tonumber,tostring
+local lpegmatch=lpeg.match
+local utfchar=utf.char
+local trace_baseinit=false trackers.register("otf.baseinit",function(v) trace_baseinit=v end)
+local trace_singles=false trackers.register("otf.singles",function(v) trace_singles=v end)
+local trace_multiples=false trackers.register("otf.multiples",function(v) trace_multiples=v end)
+local trace_alternatives=false trackers.register("otf.alternatives",function(v) trace_alternatives=v end)
+local trace_ligatures=false trackers.register("otf.ligatures",function(v) trace_ligatures=v end)
+local trace_ligatures_detail=false trackers.register("otf.ligatures.detail",function(v) trace_ligatures_detail=v end)
+local trace_kerns=false trackers.register("otf.kerns",function(v) trace_kerns=v end)
+local trace_preparing=false trackers.register("otf.preparing",function(v) trace_preparing=v end)
+local report_prepare=logs.reporter("fonts","otf prepare")
+local fonts=fonts
+local otf=fonts.handlers.otf
+local otffeatures=otf.features
+local registerotffeature=otffeatures.register
+otf.defaultbasealternate="none"
+local wildcard="*"
+local default="dflt"
+local formatters=string.formatters
+local f_unicode=formatters["%U"]
+local f_uniname=formatters["%U (%s)"]
+local f_unilist=formatters["% t (% t)"]
+local function gref(descriptions,n)
+ if type(n)=="number" then
+ local name=descriptions[n].name
+ if name then
+ return f_uniname(n,name)
+ else
+ return f_unicode(n)
+ end
+ elseif n then
+ local num,nam,j={},{},0
+ for i=1,#n do
+ local ni=n[i]
+ if tonumber(ni) then
+ j=j+1
+ local di=descriptions[ni]
+ num[j]=f_unicode(ni)
+ nam[j]=di and di.name or "-"
+ end
+ end
+ return f_unilist(num,nam)
+ else
+ return "<error in base mode tracing>"
+ end
+end
+local function cref(feature,lookupname)
+ if lookupname then
+ return formatters["feature %a, lookup %a"](feature,lookupname)
+ else
+ return formatters["feature %a"](feature)
+ end
+end
+local function report_alternate(feature,lookupname,descriptions,unicode,replacement,value,comment)
+ report_prepare("%s: base alternate %s => %s (%S => %S)",
+ cref(feature,lookupname),
+ gref(descriptions,unicode),
+ replacement and gref(descriptions,replacement),
+ value,
+ comment)
+end
+local function report_substitution(feature,lookupname,descriptions,unicode,substitution)
+ report_prepare("%s: base substitution %s => %S",
+ cref(feature,lookupname),
+ gref(descriptions,unicode),
+ gref(descriptions,substitution))
+end
+local function report_ligature(feature,lookupname,descriptions,unicode,ligature)
+ report_prepare("%s: base ligature %s => %S",
+ cref(feature,lookupname),
+ gref(descriptions,ligature),
+ gref(descriptions,unicode))
+end
+local function report_kern(feature,lookupname,descriptions,unicode,otherunicode,value)
+ report_prepare("%s: base kern %s + %s => %S",
+ cref(feature,lookupname),
+ gref(descriptions,unicode),
+ gref(descriptions,otherunicode),
+ value)
+end
+local basemethods={}
+local basemethod="<unset>"
+local function applybasemethod(what,...)
+ local m=basemethods[basemethod][what]
+ if m then
+ return m(...)
+ end
+end
+local basehash,basehashes,applied={},1,{}
+local function registerbasehash(tfmdata)
+ local properties=tfmdata.properties
+ local hash=concat(applied," ")
+ local base=basehash[hash]
+ if not base then
+ basehashes=basehashes+1
+ base=basehashes
+ basehash[hash]=base
+ end
+ properties.basehash=base
+ properties.fullname=properties.fullname.."-"..base
+ applied={}
+end
+local function registerbasefeature(feature,value)
+ applied[#applied+1]=feature.."="..tostring(value)
+end
+local trace=false
+local function finalize_ligatures(tfmdata,ligatures)
+ local nofligatures=#ligatures
+ if nofligatures>0 then
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ local resources=tfmdata.resources
+ local unicodes=resources.unicodes
+ local private=resources.private
+ local alldone=false
+ while not alldone do
+ local done=0
+ for i=1,nofligatures do
+ local ligature=ligatures[i]
+ if ligature then
+ local unicode,lookupdata=ligature[1],ligature[2]
+ if trace_ligatures_detail then
+ report_prepare("building % a into %a",lookupdata,unicode)
+ end
+ local size=#lookupdata
+ local firstcode=lookupdata[1]
+ local firstdata=characters[firstcode]
+ local okay=false
+ if firstdata then
+ local firstname="ctx_"..firstcode
+ for i=1,size-1 do
+ local firstdata=characters[firstcode]
+ if not firstdata then
+ firstcode=private
+ if trace_ligatures_detail then
+ report_prepare("defining %a as %a",firstname,firstcode)
+ end
+ unicodes[firstname]=firstcode
+ firstdata={ intermediate=true,ligatures={} }
+ characters[firstcode]=firstdata
+ descriptions[firstcode]={ name=firstname }
+ private=private+1
+ end
+ local target
+ local secondcode=lookupdata[i+1]
+ local secondname=firstname.."_"..secondcode
+ if i==size-1 then
+ target=unicode
+ if not unicodes[secondname] then
+ unicodes[secondname]=unicode
+ end
+ okay=true
+ else
+ target=unicodes[secondname]
+ if not target then
+ break
+ end
+ end
+ if trace_ligatures_detail then
+ report_prepare("codes (%a,%a) + (%a,%a) -> %a",firstname,firstcode,secondname,secondcode,target)
+ end
+ local firstligs=firstdata.ligatures
+ if firstligs then
+ firstligs[secondcode]={ char=target }
+ else
+ firstdata.ligatures={ [secondcode]={ char=target } }
+ end
+ firstcode=target
+ firstname=secondname
+ end
+ elseif trace_ligatures_detail then
+ report_prepare("no glyph (%a,%a) for building %a",firstname,firstcode,target)
+ end
+ if okay then
+ ligatures[i]=false
+ done=done+1
+ end
+ end
+ end
+ alldone=done==0
+ end
+ if trace_ligatures_detail then
+ for k,v in table.sortedhash(characters) do
+ if v.ligatures then
+ table.print(v,k)
+ end
+ end
+ end
+ resources.private=private
+ end
+end
+local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist)
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ local resources=tfmdata.resources
+ local changed=tfmdata.changed
+ local unicodes=resources.unicodes
+ local lookuphash=resources.lookuphash
+ local lookuptypes=resources.lookuptypes
+ local ligatures={}
+ local alternate=tonumber(value)
+ local defaultalt=otf.defaultbasealternate
+ local trace_singles=trace_baseinit and trace_singles
+ local trace_alternatives=trace_baseinit and trace_alternatives
+ local trace_ligatures=trace_baseinit and trace_ligatures
+ local actions={
+ substitution=function(lookupdata,lookupname,description,unicode)
+ if trace_singles then
+ report_substitution(feature,lookupname,descriptions,unicode,lookupdata)
+ end
+ changed[unicode]=lookupdata
+ end,
+ alternate=function(lookupdata,lookupname,description,unicode)
+ local replacement=lookupdata[alternate]
+ if replacement then
+ changed[unicode]=replacement
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal")
+ end
+ elseif defaultalt=="first" then
+ replacement=lookupdata[1]
+ changed[unicode]=replacement
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ end
+ elseif defaultalt=="last" then
+ replacement=lookupdata[#data]
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ end
+ else
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown")
+ end
+ end
+ end,
+ ligature=function(lookupdata,lookupname,description,unicode)
+ if trace_ligatures then
+ report_ligature(feature,lookupname,descriptions,unicode,lookupdata)
+ end
+ ligatures[#ligatures+1]={ unicode,lookupdata }
+ end,
+ }
+ for unicode,character in next,characters do
+ local description=descriptions[unicode]
+ local lookups=description.slookups
+ if lookups then
+ for l=1,#lookuplist do
+ local lookupname=lookuplist[l]
+ local lookupdata=lookups[lookupname]
+ if lookupdata then
+ local lookuptype=lookuptypes[lookupname]
+ local action=actions[lookuptype]
+ if action then
+ action(lookupdata,lookupname,description,unicode)
+ end
+ end
+ end
+ end
+ local lookups=description.mlookups
+ if lookups then
+ for l=1,#lookuplist do
+ local lookupname=lookuplist[l]
+ local lookuplist=lookups[lookupname]
+ if lookuplist then
+ local lookuptype=lookuptypes[lookupname]
+ local action=actions[lookuptype]
+ if action then
+ for i=1,#lookuplist do
+ action(lookuplist[i],lookupname,description,unicode)
+ end
+ end
+ end
+ end
+ end
+ end
+ finalize_ligatures(tfmdata,ligatures)
+end
+local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist)
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ local resources=tfmdata.resources
+ local unicodes=resources.unicodes
+ local sharedkerns={}
+ local traceindeed=trace_baseinit and trace_kerns
+ for unicode,character in next,characters do
+ local description=descriptions[unicode]
+ local rawkerns=description.kerns
+ if rawkerns then
+ local s=sharedkerns[rawkerns]
+ if s==false then
+ elseif s then
+ character.kerns=s
+ else
+ local newkerns=character.kerns
+ local done=false
+ for l=1,#lookuplist do
+ local lookup=lookuplist[l]
+ local kerns=rawkerns[lookup]
+ if kerns then
+ for otherunicode,value in next,kerns do
+ if value==0 then
+ elseif not newkerns then
+ newkerns={ [otherunicode]=value }
+ done=true
+ if traceindeed then
+ report_kern(feature,lookup,descriptions,unicode,otherunicode,value)
+ end
+ elseif not newkerns[otherunicode] then
+ newkerns[otherunicode]=value
+ done=true
+ if traceindeed then
+ report_kern(feature,lookup,descriptions,unicode,otherunicode,value)
+ end
+ end
+ end
+ end
+ end
+ if done then
+ sharedkerns[rawkerns]=newkerns
+ character.kerns=newkerns
+ else
+ sharedkerns[rawkerns]=false
+ end
+ end
+ end
+ end
+end
+basemethods.independent={
+ preparesubstitutions=preparesubstitutions,
+ preparepositionings=preparepositionings,
+}
+local function makefake(tfmdata,name,present)
+ local resources=tfmdata.resources
+ local private=resources.private
+ local character={ intermediate=true,ligatures={} }
+ resources.unicodes[name]=private
+ tfmdata.characters[private]=character
+ tfmdata.descriptions[private]={ name=name }
+ resources.private=private+1
+ present[name]=private
+ return character
+end
+local function make_1(present,tree,name)
+ for k,v in next,tree do
+ if k=="ligature" then
+ present[name]=v
+ else
+ make_1(present,v,name.."_"..k)
+ end
+ end
+end
+local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,done,lookupname)
+ for k,v in next,tree do
+ if k=="ligature" then
+ local character=characters[preceding]
+ if not character then
+ if trace_baseinit then
+ report_prepare("weird ligature in lookup %a, current %C, preceding %C",lookupname,v,preceding)
+ end
+ character=makefake(tfmdata,name,present)
+ end
+ local ligatures=character.ligatures
+ if ligatures then
+ ligatures[unicode]={ char=v }
+ else
+ character.ligatures={ [unicode]={ char=v } }
+ end
+ if done then
+ local d=done[lookupname]
+ if not d then
+ done[lookupname]={ "dummy",v }
+ else
+ d[#d+1]=v
+ end
+ end
+ else
+ local code=present[name] or unicode
+ local name=name.."_"..k
+ make_2(present,tfmdata,characters,v,name,code,k,done,lookupname)
+ end
+ end
+end
+local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist)
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ local resources=tfmdata.resources
+ local changed=tfmdata.changed
+ local lookuphash=resources.lookuphash
+ local lookuptypes=resources.lookuptypes
+ local ligatures={}
+ local alternate=tonumber(value)
+ local defaultalt=otf.defaultbasealternate
+ local trace_singles=trace_baseinit and trace_singles
+ local trace_alternatives=trace_baseinit and trace_alternatives
+ local trace_ligatures=trace_baseinit and trace_ligatures
+ for l=1,#lookuplist do
+ local lookupname=lookuplist[l]
+ local lookupdata=lookuphash[lookupname]
+ local lookuptype=lookuptypes[lookupname]
+ for unicode,data in next,lookupdata do
+ if lookuptype=="substitution" then
+ if trace_singles then
+ report_substitution(feature,lookupname,descriptions,unicode,data)
+ end
+ changed[unicode]=data
+ elseif lookuptype=="alternate" then
+ local replacement=data[alternate]
+ if replacement then
+ changed[unicode]=replacement
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal")
+ end
+ elseif defaultalt=="first" then
+ replacement=data[1]
+ changed[unicode]=replacement
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ end
+ elseif defaultalt=="last" then
+ replacement=data[#data]
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ end
+ else
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown")
+ end
+ end
+ elseif lookuptype=="ligature" then
+ ligatures[#ligatures+1]={ unicode,data,lookupname }
+ if trace_ligatures then
+ report_ligature(feature,lookupname,descriptions,unicode,data)
+ end
+ end
+ end
+ end
+ local nofligatures=#ligatures
+ if nofligatures>0 then
+ local characters=tfmdata.characters
+ local present={}
+ local done=trace_baseinit and trace_ligatures and {}
+ for i=1,nofligatures do
+ local ligature=ligatures[i]
+ local unicode,tree=ligature[1],ligature[2]
+ make_1(present,tree,"ctx_"..unicode)
+ end
+ for i=1,nofligatures do
+ local ligature=ligatures[i]
+ local unicode,tree,lookupname=ligature[1],ligature[2],ligature[3]
+ make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,lookupname)
+ end
+ end
+end
+local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist)
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ local resources=tfmdata.resources
+ local lookuphash=resources.lookuphash
+ local traceindeed=trace_baseinit and trace_kerns
+ for l=1,#lookuplist do
+ local lookupname=lookuplist[l]
+ local lookupdata=lookuphash[lookupname]
+ for unicode,data in next,lookupdata do
+ local character=characters[unicode]
+ local kerns=character.kerns
+ if not kerns then
+ kerns={}
+ character.kerns=kerns
+ end
+ if traceindeed then
+ for otherunicode,kern in next,data do
+ if not kerns[otherunicode] and kern~=0 then
+ kerns[otherunicode]=kern
+ report_kern(feature,lookup,descriptions,unicode,otherunicode,kern)
+ end
+ end
+ else
+ for otherunicode,kern in next,data do
+ if not kerns[otherunicode] and kern~=0 then
+ kerns[otherunicode]=kern
+ end
+ end
+ end
+ end
+ end
+end
+local function initializehashes(tfmdata)
+ nodeinitializers.features(tfmdata)
+end
+basemethods.shared={
+ initializehashes=initializehashes,
+ preparesubstitutions=preparesubstitutions,
+ preparepositionings=preparepositionings,
+}
+basemethod="independent"
+local function featuresinitializer(tfmdata,value)
+ if true then
+ local starttime=trace_preparing and os.clock()
+ local features=tfmdata.shared.features
+ local fullname=trace_preparing and tfmdata.properties.fullname
+ if features then
+ applybasemethod("initializehashes",tfmdata)
+ local collectlookups=otf.collectlookups
+ local rawdata=tfmdata.shared.rawdata
+ local properties=tfmdata.properties
+ local script=properties.script
+ local language=properties.language
+ local basesubstitutions=rawdata.resources.features.gsub
+ local basepositionings=rawdata.resources.features.gpos
+ if basesubstitutions or basepositionings then
+ local sequences=tfmdata.resources.sequences
+ for s=1,#sequences do
+ local sequence=sequences[s]
+ local sfeatures=sequence.features
+ if sfeatures then
+ local order=sequence.order
+ if order then
+ for i=1,#order do
+ local feature=order[i]
+ if features[feature] then
+ local validlookups,lookuplist=collectlookups(rawdata,feature,script,language)
+ if not validlookups then
+ elseif basesubstitutions and basesubstitutions[feature] then
+ if trace_preparing then
+ report_prepare("filtering base feature %a for %a",feature,fullname)
+ end
+ applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist)
+ registerbasefeature(feature,value)
+ elseif basepositionings and basepositionings[feature] then
+ if trace_preparing then
+ report_prepare("filtering base feature %a for %a",feature,fullname)
+ end
+ applybasemethod("preparepositionings",tfmdata,feature,features[feature],validlookups,lookuplist)
+ registerbasefeature(feature,value)
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ registerbasehash(tfmdata)
+ end
+ if trace_preparing then
+ report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,fullname)
+ end
+ end
+end
+registerotffeature {
+ name="features",
+ description="features",
+ default=true,
+ initializers={
+ base=featuresinitializer,
+ }
+}
+directives.register("fonts.otf.loader.basemethod",function(v)
+ if basemethods[v] then
+ basemethod=v
+ end
+end)
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['node-inj']={
+ version=1.001,
+ comment="companion to node-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+}
+local next=next
+local utfchar=utf.char
+local trace_injections=false trackers.register("nodes.injections",function(v) trace_injections=v end)
+local report_injections=logs.reporter("nodes","injections")
+local attributes,nodes,node=attributes,nodes,node
+fonts=fonts
+local fontdata=fonts.hashes.identifiers
+nodes.injections=nodes.injections or {}
+local injections=nodes.injections
+local nodecodes=nodes.nodecodes
+local glyph_code=nodecodes.glyph
+local kern_code=nodecodes.kern
+local nodepool=nodes.pool
+local newkern=nodepool.kern
+local traverse_id=node.traverse_id
+local insert_node_before=node.insert_before
+local insert_node_after=node.insert_after
+local a_kernpair=attributes.private('kernpair')
+local a_ligacomp=attributes.private('ligacomp')
+local a_markbase=attributes.private('markbase')
+local a_markmark=attributes.private('markmark')
+local a_markdone=attributes.private('markdone')
+local a_cursbase=attributes.private('cursbase')
+local a_curscurs=attributes.private('curscurs')
+local a_cursdone=attributes.private('cursdone')
+function injections.installnewkern(nk)
+ newkern=nk or newkern
+end
+local cursives={}
+local marks={}
+local kerns={}
+function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext)
+ local dx,dy=factor*(exit[1]-entry[1]),factor*(exit[2]-entry[2])
+ local ws,wn=tfmstart.width,tfmnext.width
+ local bound=#cursives+1
+ start[a_cursbase]=bound
+ nxt[a_curscurs]=bound
+ cursives[bound]={ rlmode,dx,dy,ws,wn }
+ return dx,dy,bound
+end
+function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr)
+ local x,y,w,h=factor*spec[1],factor*spec[2],factor*spec[3],factor*spec[4]
+ if x~=0 or w~=0 or y~=0 or h~=0 then
+ local bound=current[a_kernpair]
+ if bound then
+ local kb=kerns[bound]
+ kb[2],kb[3],kb[4],kb[5]=(kb[2] or 0)+x,(kb[3] or 0)+y,(kb[4] or 0)+w,(kb[5] or 0)+h
+ else
+ bound=#kerns+1
+ current[a_kernpair]=bound
+ kerns[bound]={ rlmode,x,y,w,h,r2lflag,tfmchr.width }
+ end
+ return x,y,w,h,bound
+ end
+ return x,y,w,h
+end
+function injections.setkern(current,factor,rlmode,x,tfmchr)
+ local dx=factor*x
+ if dx~=0 then
+ local bound=#kerns+1
+ current[a_kernpair]=bound
+ kerns[bound]={ rlmode,dx }
+ return dx,bound
+ else
+ return 0,0
+ end
+end
+function injections.setmark(start,base,factor,rlmode,ba,ma)
+ local dx,dy=factor*(ba[1]-ma[1]),factor*(ba[2]-ma[2])
+ local bound=base[a_markbase]
+ local index=1
+ if bound then
+ local mb=marks[bound]
+ if mb then
+ index=#mb+1
+ mb[index]={ dx,dy,rlmode }
+ start[a_markmark]=bound
+ start[a_markdone]=index
+ return dx,dy,bound
+ else
+ report_injections("possible problem, %U is base mark without data (id %a)",base.char,bound)
+ end
+ end
+ index=index or 1
+ bound=#marks+1
+ base[a_markbase]=bound
+ start[a_markmark]=bound
+ start[a_markdone]=index
+ marks[bound]={ [index]={ dx,dy,rlmode } }
+ return dx,dy,bound
+end
+local function dir(n)
+ return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset"
+end
+local function trace(head)
+ report_injections("begin run")
+ for n in traverse_id(glyph_code,head) do
+ if n.subtype<256 then
+ local kp=n[a_kernpair]
+ local mb=n[a_markbase]
+ local mm=n[a_markmark]
+ local md=n[a_markdone]
+ local cb=n[a_cursbase]
+ local cc=n[a_curscurs]
+ local char=n.char
+ report_injections("font %s, char %U, glyph %c",n.font,char,char)
+ if kp then
+ local k=kerns[kp]
+ if k[3] then
+ report_injections(" pairkern: dir %a, x %p, y %p, w %p, h %p",dir(k[1]),k[2],k[3],k[4],k[5])
+ else
+ report_injections(" kern: dir %a, dx %p",dir(k[1]),k[2])
+ end
+ end
+ if mb then
+ report_injections(" markbase: bound %a",mb)
+ end
+ if mm then
+ local m=marks[mm]
+ if mb then
+ local m=m[mb]
+ if m then
+ report_injections(" markmark: bound %a, index %a, dx %p, dy %p",mm,md,m[1],m[2])
+ else
+ report_injections(" markmark: bound %a, missing index",mm)
+ end
+ else
+ m=m[1]
+ report_injections(" markmark: bound %a, dx %p, dy %p",mm,m and m[1],m and m[2])
+ end
+ end
+ if cb then
+ report_injections(" cursbase: bound %a",cb)
+ end
+ if cc then
+ local c=cursives[cc]
+ report_injections(" curscurs: bound %a, dir %a, dx %p, dy %p",cc,dir(c[1]),c[2],c[3])
+ end
+ end
+ end
+ report_injections("end run")
+end
+local function show_result(head)
+ local current=head
+ local skipping=false
+ while current do
+ local id=current.id
+ if id==glyph_code then
+ report_injections("char: %C, width %p, xoffset %p, yoffset %p",current.char,current.width,current.xoffset,current.yoffset)
+ skipping=false
+ elseif id==kern_code then
+ report_injections("kern: %p",current.kern)
+ skipping=false
+ elseif not skipping then
+ report_injections()
+ skipping=true
+ end
+ current=current.next
+ end
+end
+function injections.handler(head,where,keep)
+ local has_marks,has_cursives,has_kerns=next(marks),next(cursives),next(kerns)
+ if has_marks or has_cursives then
+ if trace_injections then
+ trace(head)
+ end
+ local done,ky,rl,valid,cx,wx,mk,nofvalid=false,{},{},{},{},{},{},0
+ if has_kerns then
+ local nf,tm=nil,nil
+ for n in traverse_id(glyph_code,head) do
+ if n.subtype<256 then
+ nofvalid=nofvalid+1
+ valid[nofvalid]=n
+ if n.font~=nf then
+ nf=n.font
+ tm=fontdata[nf].resources.marks
+ end
+ if tm then
+ mk[n]=tm[n.char]
+ end
+ local k=n[a_kernpair]
+ if k then
+ local kk=kerns[k]
+ if kk then
+ local x,y,w,h=kk[2] or 0,kk[3] or 0,kk[4] or 0,kk[5] or 0
+ local dy=y-h
+ if dy~=0 then
+ ky[n]=dy
+ end
+ if w~=0 or x~=0 then
+ wx[n]=kk
+ end
+ rl[n]=kk[1]
+ end
+ end
+ end
+ end
+ else
+ local nf,tm=nil,nil
+ for n in traverse_id(glyph_code,head) do
+ if n.subtype<256 then
+ nofvalid=nofvalid+1
+ valid[nofvalid]=n
+ if n.font~=nf then
+ nf=n.font
+ tm=fontdata[nf].resources.marks
+ end
+ if tm then
+ mk[n]=tm[n.char]
+ end
+ end
+ end
+ end
+ if nofvalid>0 then
+ local cx={}
+ if has_kerns and next(ky) then
+ for n,k in next,ky do
+ n.yoffset=k
+ end
+ end
+ if has_cursives then
+ local p_cursbase,p=nil,nil
+ local t,d,maxt={},{},0
+ for i=1,nofvalid do
+ local n=valid[i]
+ if not mk[n] then
+ local n_cursbase=n[a_cursbase]
+ if p_cursbase then
+ local n_curscurs=n[a_curscurs]
+ if p_cursbase==n_curscurs then
+ local c=cursives[n_curscurs]
+ if c then
+ local rlmode,dx,dy,ws,wn=c[1],c[2],c[3],c[4],c[5]
+ if rlmode>=0 then
+ dx=dx-ws
+ else
+ dx=dx+wn
+ end
+ if dx~=0 then
+ cx[n]=dx
+ rl[n]=rlmode
+ end
+ dy=-dy
+ maxt=maxt+1
+ t[maxt]=p
+ d[maxt]=dy
+ else
+ maxt=0
+ end
+ end
+ elseif maxt>0 then
+ local ny=n.yoffset
+ for i=maxt,1,-1 do
+ ny=ny+d[i]
+ local ti=t[i]
+ ti.yoffset=ti.yoffset+ny
+ end
+ maxt=0
+ end
+ if not n_cursbase and maxt>0 then
+ local ny=n.yoffset
+ for i=maxt,1,-1 do
+ ny=ny+d[i]
+ local ti=t[i]
+ ti.yoffset=ny
+ end
+ maxt=0
+ end
+ p_cursbase,p=n_cursbase,n
+ end
+ end
+ if maxt>0 then
+ local ny=n.yoffset
+ for i=maxt,1,-1 do
+ ny=ny+d[i]
+ local ti=t[i]
+ ti.yoffset=ny
+ end
+ maxt=0
+ end
+ if not keep then
+ cursives={}
+ end
+ end
+ if has_marks then
+ for i=1,nofvalid do
+ local p=valid[i]
+ local p_markbase=p[a_markbase]
+ if p_markbase then
+ local mrks=marks[p_markbase]
+ local nofmarks=#mrks
+ for n in traverse_id(glyph_code,p.next) do
+ local n_markmark=n[a_markmark]
+ if p_markbase==n_markmark then
+ local index=n[a_markdone] or 1
+ local d=mrks[index]
+ if d then
+ local rlmode=d[3]
+ local k=wx[p]
+ if k then
+ local x=k[2]
+ local w=k[4]
+ if w then
+ if rlmode and rlmode>=0 then
+ n.xoffset=p.xoffset-p.width+d[1]-(w-x)
+ else
+ n.xoffset=p.xoffset-d[1]-x
+ end
+ else
+ if rlmode and rlmode>=0 then
+ n.xoffset=p.xoffset-p.width+d[1]
+ else
+ n.xoffset=p.xoffset-d[1]-x
+ end
+ end
+ else
+ if rlmode and rlmode>=0 then
+ n.xoffset=p.xoffset-p.width+d[1]
+ else
+ n.xoffset=p.xoffset-d[1]
+ end
+ local w=n.width
+ if w~=0 then
+ insert_node_before(head,n,newkern(-w/2))
+ insert_node_after(head,n,newkern(-w/2))
+ end
+ end
+ if mk[p] then
+ n.yoffset=p.yoffset+d[2]
+ else
+ n.yoffset=n.yoffset+p.yoffset+d[2]
+ end
+ if nofmarks==1 then
+ break
+ else
+ nofmarks=nofmarks-1
+ end
+ end
+ else
+ end
+ end
+ end
+ end
+ if not keep then
+ marks={}
+ end
+ end
+ if next(wx) then
+ for n,k in next,wx do
+ local x=k[2]
+ local w=k[4]
+ if w then
+ local rl=k[1]
+ local wx=w-x
+ if rl<0 then
+ if wx~=0 then
+ insert_node_before(head,n,newkern(wx))
+ end
+ if x~=0 then
+ insert_node_after (head,n,newkern(x))
+ end
+ else
+ if x~=0 then
+ insert_node_before(head,n,newkern(x))
+ end
+ if wx~=0 then
+ insert_node_after (head,n,newkern(wx))
+ end
+ end
+ elseif x~=0 then
+ insert_node_before(head,n,newkern(x))
+ end
+ end
+ end
+ if next(cx) then
+ for n,k in next,cx do
+ if k~=0 then
+ local rln=rl[n]
+ if rln and rln<0 then
+ insert_node_before(head,n,newkern(-k))
+ else
+ insert_node_before(head,n,newkern(k))
+ end
+ end
+ end
+ end
+ if not keep then
+ kerns={}
+ end
+ return head,true
+ elseif not keep then
+ kerns,cursives,marks={},{},{}
+ end
+ elseif has_kerns then
+ if trace_injections then
+ trace(head)
+ end
+ for n in traverse_id(glyph_code,head) do
+ if n.subtype<256 then
+ local k=n[a_kernpair]
+ if k then
+ local kk=kerns[k]
+ if kk then
+ local rl,x,y,w=kk[1],kk[2] or 0,kk[3],kk[4]
+ if y and y~=0 then
+ n.yoffset=y
+ end
+ if w then
+ local wx=w-x
+ if rl<0 then
+ if wx~=0 then
+ insert_node_before(head,n,newkern(wx))
+ end
+ if x~=0 then
+ insert_node_after (head,n,newkern(x))
+ end
+ else
+ if x~=0 then
+ insert_node_before(head,n,newkern(x))
+ end
+ if wx~=0 then
+ insert_node_after(head,n,newkern(wx))
+ end
+ end
+ else
+ if x~=0 then
+ insert_node_before(head,n,newkern(x))
+ end
+ end
+ end
+ end
+ end
+ end
+ if not keep then
+ kerns={}
+ end
+ return head,true
+ else
+ end
+ return head,false
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-ota']={
+ version=1.001,
+ comment="companion to font-otf.lua (analysing)",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type=type
+if not trackers then trackers={ register=function() end } end
+local fonts,nodes,node=fonts,nodes,node
+local allocate=utilities.storage.allocate
+local otf=fonts.handlers.otf
+local analyzers=fonts.analyzers
+local initializers=allocate()
+local methods=allocate()
+analyzers.initializers=initializers
+analyzers.methods=methods
+analyzers.useunicodemarks=false
+local a_state=attributes.private('state')
+local nodecodes=nodes.nodecodes
+local glyph_code=nodecodes.glyph
+local disc_code=nodecodes.disc
+local math_code=nodecodes.math
+local traverse_id=node.traverse_id
+local traverse_node_list=node.traverse
+local end_of_math=node.end_of_math
+local fontdata=fonts.hashes.identifiers
+local categories=characters and characters.categories or {}
+local otffeatures=fonts.constructors.newfeatures("otf")
+local registerotffeature=otffeatures.register
+local s_init=1 local s_rphf=7
+local s_medi=2 local s_half=8
+local s_fina=3 local s_pref=9
+local s_isol=4 local s_blwf=10
+local s_mark=5 local s_pstf=11
+local s_rest=6
+local states={
+ init=s_init,
+ medi=s_medi,
+ fina=s_fina,
+ isol=s_isol,
+ mark=s_mark,
+ rest=s_rest,
+ rphf=s_rphf,
+ half=s_half,
+ pref=s_pref,
+ blwf=s_blwf,
+ pstf=s_pstf,
+}
+local features={
+ init=s_init,
+ medi=s_medi,
+ fina=s_fina,
+ isol=s_isol,
+ rphf=s_rphf,
+ half=s_half,
+ pref=s_pref,
+ blwf=s_blwf,
+ pstf=s_pstf,
+}
+analyzers.states=states
+analyzers.features=features
+function analyzers.setstate(head,font)
+ local useunicodemarks=analyzers.useunicodemarks
+ local tfmdata=fontdata[font]
+ local descriptions=tfmdata.descriptions
+ local first,last,current,n,done=nil,nil,head,0,false
+ while current do
+ local id=current.id
+ if id==glyph_code and current.font==font then
+ done=true
+ local char=current.char
+ local d=descriptions[char]
+ if d then
+ if d.class=="mark" or (useunicodemarks and categories[char]=="mn") then
+ done=true
+ current[a_state]=s_mark
+ elseif n==0 then
+ first,last,n=current,current,1
+ current[a_state]=s_init
+ else
+ last,n=current,n+1
+ current[a_state]=s_medi
+ end
+ else
+ if first and first==last then
+ last[a_state]=s_isol
+ elseif last then
+ last[a_state]=s_fina
+ end
+ first,last,n=nil,nil,0
+ end
+ elseif id==disc_code then
+ current[a_state]=s_medi
+ last=current
+ else
+ if first and first==last then
+ last[a_state]=s_isol
+ elseif last then
+ last[a_state]=s_fina
+ end
+ first,last,n=nil,nil,0
+ if id==math_code then
+ current=end_of_math(current)
+ end
+ end
+ current=current.next
+ end
+ if first and first==last then
+ last[a_state]=s_isol
+ elseif last then
+ last[a_state]=s_fina
+ end
+ return head,done
+end
+local function analyzeinitializer(tfmdata,value)
+ local script,language=otf.scriptandlanguage(tfmdata)
+ local action=initializers[script]
+ if not action then
+ elseif type(action)=="function" then
+ return action(tfmdata,value)
+ else
+ local action=action[language]
+ if action then
+ return action(tfmdata,value)
+ end
+ end
+end
+local function analyzeprocessor(head,font,attr)
+ local tfmdata=fontdata[font]
+ local script,language=otf.scriptandlanguage(tfmdata,attr)
+ local action=methods[script]
+ if not action then
+ elseif type(action)=="function" then
+ return action(head,font,attr)
+ else
+ action=action[language]
+ if action then
+ return action(head,font,attr)
+ end
+ end
+ return head,false
+end
+registerotffeature {
+ name="analyze",
+ description="analysis of character classes",
+ default=true,
+ initializers={
+ node=analyzeinitializer,
+ },
+ processors={
+ position=1,
+ node=analyzeprocessor,
+ }
+}
+methods.latn=analyzers.setstate
+local tatweel=0x0640
+local zwnj=0x200C
+local zwj=0x200D
+local isolated={
+ [0x0600]=true,[0x0601]=true,[0x0602]=true,[0x0603]=true,
+ [0x0604]=true,
+ [0x0608]=true,[0x060B]=true,[0x0621]=true,[0x0674]=true,
+ [0x06DD]=true,
+ [0x0856]=true,[0x0858]=true,[0x0857]=true,
+ [0x07FA]=true,
+ [zwnj]=true,
+ [0x08AD]=true,
+}
+local final={
+ [0x0622]=true,[0x0623]=true,[0x0624]=true,[0x0625]=true,
+ [0x0627]=true,[0x0629]=true,[0x062F]=true,[0x0630]=true,
+ [0x0631]=true,[0x0632]=true,[0x0648]=true,[0x0671]=true,
+ [0x0672]=true,[0x0673]=true,[0x0675]=true,[0x0676]=true,
+ [0x0677]=true,[0x0688]=true,[0x0689]=true,[0x068A]=true,
+ [0x068B]=true,[0x068C]=true,[0x068D]=true,[0x068E]=true,
+ [0x068F]=true,[0x0690]=true,[0x0691]=true,[0x0692]=true,
+ [0x0693]=true,[0x0694]=true,[0x0695]=true,[0x0696]=true,
+ [0x0697]=true,[0x0698]=true,[0x0699]=true,[0x06C0]=true,
+ [0x06C3]=true,[0x06C4]=true,[0x06C5]=true,[0x06C6]=true,
+ [0x06C7]=true,[0x06C8]=true,[0x06C9]=true,[0x06CA]=true,
+ [0x06CB]=true,[0x06CD]=true,[0x06CF]=true,[0x06D2]=true,
+ [0x06D3]=true,[0x06D5]=true,[0x06EE]=true,[0x06EF]=true,
+ [0x0759]=true,[0x075A]=true,[0x075B]=true,[0x076B]=true,
+ [0x076C]=true,[0x0771]=true,[0x0773]=true,[0x0774]=true,
+ [0x0778]=true,[0x0779]=true,
+ [0x08AA]=true,[0x08AB]=true,[0x08AC]=true,
+ [0xFEF5]=true,[0xFEF7]=true,[0xFEF9]=true,[0xFEFB]=true,
+ [0x0710]=true,[0x0715]=true,[0x0716]=true,[0x0717]=true,
+ [0x0718]=true,[0x0719]=true,[0x0728]=true,[0x072A]=true,
+ [0x072C]=true,[0x071E]=true,
+ [0x072F]=true,[0x074D]=true,
+ [0x0840]=true,[0x0849]=true,[0x0854]=true,[0x0846]=true,
+ [0x084F]=true,
+ [0x08AE]=true,[0x08B1]=true,[0x08B2]=true,
+}
+local medial={
+ [0x0626]=true,[0x0628]=true,[0x062A]=true,[0x062B]=true,
+ [0x062C]=true,[0x062D]=true,[0x062E]=true,[0x0633]=true,
+ [0x0634]=true,[0x0635]=true,[0x0636]=true,[0x0637]=true,
+ [0x0638]=true,[0x0639]=true,[0x063A]=true,[0x063B]=true,
+ [0x063C]=true,[0x063D]=true,[0x063E]=true,[0x063F]=true,
+ [0x0641]=true,[0x0642]=true,[0x0643]=true,
+ [0x0644]=true,[0x0645]=true,[0x0646]=true,[0x0647]=true,
+ [0x0649]=true,[0x064A]=true,[0x066E]=true,[0x066F]=true,
+ [0x0678]=true,[0x0679]=true,[0x067A]=true,[0x067B]=true,
+ [0x067C]=true,[0x067D]=true,[0x067E]=true,[0x067F]=true,
+ [0x0680]=true,[0x0681]=true,[0x0682]=true,[0x0683]=true,
+ [0x0684]=true,[0x0685]=true,[0x0686]=true,[0x0687]=true,
+ [0x069A]=true,[0x069B]=true,[0x069C]=true,[0x069D]=true,
+ [0x069E]=true,[0x069F]=true,[0x06A0]=true,[0x06A1]=true,
+ [0x06A2]=true,[0x06A3]=true,[0x06A4]=true,[0x06A5]=true,
+ [0x06A6]=true,[0x06A7]=true,[0x06A8]=true,[0x06A9]=true,
+ [0x06AA]=true,[0x06AB]=true,[0x06AC]=true,[0x06AD]=true,
+ [0x06AE]=true,[0x06AF]=true,[0x06B0]=true,[0x06B1]=true,
+ [0x06B2]=true,[0x06B3]=true,[0x06B4]=true,[0x06B5]=true,
+ [0x06B6]=true,[0x06B7]=true,[0x06B8]=true,[0x06B9]=true,
+ [0x06BA]=true,[0x06BB]=true,[0x06BC]=true,[0x06BD]=true,
+ [0x06BE]=true,[0x06BF]=true,[0x06C1]=true,[0x06C2]=true,
+ [0x06CC]=true,[0x06CE]=true,[0x06D0]=true,[0x06D1]=true,
+ [0x06FA]=true,[0x06FB]=true,[0x06FC]=true,[0x06FF]=true,
+ [0x0750]=true,[0x0751]=true,[0x0752]=true,[0x0753]=true,
+ [0x0754]=true,[0x0755]=true,[0x0756]=true,[0x0757]=true,
+ [0x0758]=true,[0x075C]=true,[0x075D]=true,[0x075E]=true,
+ [0x075F]=true,[0x0760]=true,[0x0761]=true,[0x0762]=true,
+ [0x0763]=true,[0x0764]=true,[0x0765]=true,[0x0766]=true,
+ [0x0767]=true,[0x0768]=true,[0x0769]=true,[0x076A]=true,
+ [0x076D]=true,[0x076E]=true,[0x076F]=true,[0x0770]=true,
+ [0x0772]=true,[0x0775]=true,[0x0776]=true,[0x0777]=true,
+ [0x077A]=true,[0x077B]=true,[0x077C]=true,[0x077D]=true,
+ [0x077E]=true,[0x077F]=true,
+ [0x08A0]=true,[0x08A2]=true,[0x08A4]=true,[0x08A5]=true,
+ [0x08A6]=true,[0x0620]=true,[0x08A8]=true,[0x08A9]=true,
+ [0x08A7]=true,[0x08A3]=true,
+ [0x0712]=true,[0x0713]=true,[0x0714]=true,[0x071A]=true,
+ [0x071B]=true,[0x071C]=true,[0x071D]=true,[0x071F]=true,
+ [0x0720]=true,[0x0721]=true,[0x0722]=true,[0x0723]=true,
+ [0x0724]=true,[0x0725]=true,[0x0726]=true,[0x0727]=true,
+ [0x0729]=true,[0x072B]=true,[0x072D]=true,[0x072E]=true,
+ [0x074E]=true,[0x074F]=true,
+ [0x0841]=true,[0x0842]=true,[0x0843]=true,[0x0844]=true,
+ [0x0845]=true,[0x0847]=true,[0x0848]=true,[0x0855]=true,
+ [0x0851]=true,[0x084E]=true,[0x084D]=true,[0x084A]=true,
+ [0x084B]=true,[0x084C]=true,[0x0850]=true,[0x0852]=true,
+ [0x0853]=true,
+ [0x07D7]=true,[0x07E8]=true,[0x07D9]=true,[0x07EA]=true,
+ [0x07CA]=true,[0x07DB]=true,[0x07CC]=true,[0x07DD]=true,
+ [0x07CE]=true,[0x07DF]=true,[0x07D4]=true,[0x07E5]=true,
+ [0x07E9]=true,[0x07E7]=true,[0x07E3]=true,[0x07E2]=true,
+ [0x07E0]=true,[0x07E1]=true,[0x07DE]=true,[0x07DC]=true,
+ [0x07D1]=true,[0x07DA]=true,[0x07D8]=true,[0x07D6]=true,
+ [0x07D2]=true,[0x07D0]=true,[0x07CF]=true,[0x07CD]=true,
+ [0x07CB]=true,[0x07D3]=true,[0x07E4]=true,[0x07D5]=true,
+ [0x07E6]=true,
+ [tatweel]=true,[zwj]=true,
+ [0x08A1]=true,[0x08AF]=true,[0x08B0]=true,
+}
+local arab_warned={}
+local function warning(current,what)
+ local char=current.char
+ if not arab_warned[char] then
+ log.report("analyze","arab: character %C has no %a class",char,what)
+ arab_warned[char]=true
+ end
+end
+local function finish(first,last)
+ if last then
+ if first==last then
+ local fc=first.char
+ if medial[fc] or final[fc] then
+ first[a_state]=s_isol
+ else
+ warning(first,"isol")
+ first[a_state]=s_error
+ end
+ else
+ local lc=last.char
+ if medial[lc] or final[lc] then
+ last[a_state]=s_fina
+ else
+ warning(last,"fina")
+ last[a_state]=s_error
+ end
+ end
+ first,last=nil,nil
+ elseif first then
+ local fc=first.char
+ if medial[fc] or final[fc] then
+ first[a_state]=s_isol
+ else
+ warning(first,"isol")
+ first[a_state]=s_error
+ end
+ first=nil
+ end
+ return first,last
+end
+function methods.arab(head,font,attr)
+ local useunicodemarks=analyzers.useunicodemarks
+ local tfmdata=fontdata[font]
+ local marks=tfmdata.resources.marks
+ local first,last,current,done=nil,nil,head,false
+ while current do
+ local id=current.id
+ if id==glyph_code and current.font==font and current.subtype<256 and not current[a_state] then
+ done=true
+ local char=current.char
+ if marks[char] or (useunicodemarks and categories[char]=="mn") then
+ current[a_state]=s_mark
+ elseif isolated[char] then
+ first,last=finish(first,last)
+ current[a_state]=s_isol
+ first,last=nil,nil
+ elseif not first then
+ if medial[char] then
+ current[a_state]=s_init
+ first,last=first or current,current
+ elseif final[char] then
+ current[a_state]=s_isol
+ first,last=nil,nil
+ else
+ first,last=finish(first,last)
+ end
+ elseif medial[char] then
+ first,last=first or current,current
+ current[a_state]=s_medi
+ elseif final[char] then
+ if not last[a_state]==s_init then
+ last[a_state]=s_medi
+ end
+ current[a_state]=s_fina
+ first,last=nil,nil
+ elseif char>=0x0600 and char<=0x06FF then
+ current[a_state]=s_rest
+ first,last=finish(first,last)
+ else
+ first,last=finish(first,last)
+ end
+ else
+ if first or last then
+ first,last=finish(first,last)
+ end
+ if id==math_code then
+ current=end_of_math(current)
+ end
+ end
+ current=current.next
+ end
+ if first or last then
+ finish(first,last)
+ end
+ return head,done
+end
+methods.syrc=methods.arab
+methods.mand=methods.arab
+methods.nko=methods.arab
+directives.register("otf.analyze.useunicodemarks",function(v)
+ analyzers.useunicodemarks=v
+end)
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-otn']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+}
+local concat,insert,remove=table.concat,table.insert,table.remove
+local gmatch,gsub,find,match,lower,strip=string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip
+local type,next,tonumber,tostring=type,next,tonumber,tostring
+local lpegmatch=lpeg.match
+local random=math.random
+local formatters=string.formatters
+local logs,trackers,nodes,attributes=logs,trackers,nodes,attributes
+local registertracker=trackers.register
+local fonts=fonts
+local otf=fonts.handlers.otf
+local trace_lookups=false registertracker("otf.lookups",function(v) trace_lookups=v end)
+local trace_singles=false registertracker("otf.singles",function(v) trace_singles=v end)
+local trace_multiples=false registertracker("otf.multiples",function(v) trace_multiples=v end)
+local trace_alternatives=false registertracker("otf.alternatives",function(v) trace_alternatives=v end)
+local trace_ligatures=false registertracker("otf.ligatures",function(v) trace_ligatures=v end)
+local trace_contexts=false registertracker("otf.contexts",function(v) trace_contexts=v end)
+local trace_marks=false registertracker("otf.marks",function(v) trace_marks=v end)
+local trace_kerns=false registertracker("otf.kerns",function(v) trace_kerns=v end)
+local trace_cursive=false registertracker("otf.cursive",function(v) trace_cursive=v end)
+local trace_preparing=false registertracker("otf.preparing",function(v) trace_preparing=v end)
+local trace_bugs=false registertracker("otf.bugs",function(v) trace_bugs=v end)
+local trace_details=false registertracker("otf.details",function(v) trace_details=v end)
+local trace_applied=false registertracker("otf.applied",function(v) trace_applied=v end)
+local trace_steps=false registertracker("otf.steps",function(v) trace_steps=v end)
+local trace_skips=false registertracker("otf.skips",function(v) trace_skips=v end)
+local trace_directions=false registertracker("otf.directions",function(v) trace_directions=v end)
+local report_direct=logs.reporter("fonts","otf direct")
+local report_subchain=logs.reporter("fonts","otf subchain")
+local report_chain=logs.reporter("fonts","otf chain")
+local report_process=logs.reporter("fonts","otf process")
+local report_prepare=logs.reporter("fonts","otf prepare")
+local report_warning=logs.reporter("fonts","otf warning")
+registertracker("otf.verbose_chain",function(v) otf.setcontextchain(v and "verbose") end)
+registertracker("otf.normal_chain",function(v) otf.setcontextchain(v and "normal") end)
+registertracker("otf.replacements","otf.singles,otf.multiples,otf.alternatives,otf.ligatures")
+registertracker("otf.positions","otf.marks,otf.kerns,otf.cursive")
+registertracker("otf.actions","otf.replacements,otf.positions")
+registertracker("otf.injections","nodes.injections")
+registertracker("*otf.sample","otf.steps,otf.actions,otf.analyzing")
+local insert_node_after=node.insert_after
+local delete_node=nodes.delete
+local copy_node=node.copy
+local find_node_tail=node.tail or node.slide
+local flush_node_list=node.flush_list
+local end_of_math=node.end_of_math
+local setmetatableindex=table.setmetatableindex
+local zwnj=0x200C
+local zwj=0x200D
+local wildcard="*"
+local default="dflt"
+local nodecodes=nodes.nodecodes
+local whatcodes=nodes.whatcodes
+local glyphcodes=nodes.glyphcodes
+local disccodes=nodes.disccodes
+local glyph_code=nodecodes.glyph
+local glue_code=nodecodes.glue
+local disc_code=nodecodes.disc
+local whatsit_code=nodecodes.whatsit
+local math_code=nodecodes.math
+local dir_code=whatcodes.dir
+local localpar_code=whatcodes.localpar
+local discretionary_code=disccodes.discretionary
+local ligature_code=glyphcodes.ligature
+local privateattribute=attributes.private
+local a_state=privateattribute('state')
+local a_markbase=privateattribute('markbase')
+local a_markmark=privateattribute('markmark')
+local a_markdone=privateattribute('markdone')
+local a_cursbase=privateattribute('cursbase')
+local a_curscurs=privateattribute('curscurs')
+local a_cursdone=privateattribute('cursdone')
+local a_kernpair=privateattribute('kernpair')
+local a_ligacomp=privateattribute('ligacomp')
+local injections=nodes.injections
+local setmark=injections.setmark
+local setcursive=injections.setcursive
+local setkern=injections.setkern
+local setpair=injections.setpair
+local markonce=true
+local cursonce=true
+local kernonce=true
+local fonthashes=fonts.hashes
+local fontdata=fonthashes.identifiers
+local otffeatures=fonts.constructors.newfeatures("otf")
+local registerotffeature=otffeatures.register
+local onetimemessage=fonts.loggers.onetimemessage or function() end
+otf.defaultnodealternate="none"
+local tfmdata=false
+local characters=false
+local descriptions=false
+local resources=false
+local marks=false
+local currentfont=false
+local lookuptable=false
+local anchorlookups=false
+local lookuptypes=false
+local handlers={}
+local rlmode=0
+local featurevalue=false
+local checkstep=(nodes and nodes.tracers and nodes.tracers.steppers.check) or function() end
+local registerstep=(nodes and nodes.tracers and nodes.tracers.steppers.register) or function() end
+local registermessage=(nodes and nodes.tracers and nodes.tracers.steppers.message) or function() end
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_direct(...)
+end
+local function logwarning(...)
+ report_direct(...)
+end
+local f_unicode=formatters["%U"]
+local f_uniname=formatters["%U (%s)"]
+local f_unilist=formatters["% t (% t)"]
+local function gref(n)
+ if type(n)=="number" then
+ local description=descriptions[n]
+ local name=description and description.name
+ if name then
+ return f_uniname(n,name)
+ else
+ return f_unicode(n)
+ end
+ elseif n then
+ local num,nam={},{}
+ for i=1,#n do
+ local ni=n[i]
+ if tonumber(ni) then
+ local di=descriptions[ni]
+ num[i]=f_unicode(ni)
+ nam[i]=di and di.name or "-"
+ end
+ end
+ return f_unilist(num,nam)
+ else
+ return "<error in node mode tracing>"
+ end
+end
+local function cref(kind,chainname,chainlookupname,lookupname,index)
+ if index then
+ return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookupname,index)
+ elseif lookupname then
+ return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookupname)
+ elseif chainlookupname then
+ return formatters["feature %a, chain %a, sub %a"](kind,chainname,chainlookupname)
+ elseif chainname then
+ return formatters["feature %a, chain %a"](kind,chainname)
+ else
+ return formatters["feature %a"](kind)
+ end
+end
+local function pref(kind,lookupname)
+ return formatters["feature %a, lookup %a"](kind,lookupname)
+end
+local function copy_glyph(g)
+ local components=g.components
+ if components then
+ g.components=nil
+ local n=copy_node(g)
+ g.components=components
+ return n
+ else
+ return copy_node(g)
+ end
+end
+local function markstoligature(kind,lookupname,head,start,stop,char)
+ if start==stop and start.char==char then
+ return head,start
+ else
+ local prev=start.prev
+ local next=stop.next
+ start.prev=nil
+ stop.next=nil
+ local base=copy_glyph(start)
+ if head==start then
+ head=base
+ end
+ base.char=char
+ base.subtype=ligature_code
+ base.components=start
+ if prev then
+ prev.next=base
+ end
+ if next then
+ next.prev=base
+ end
+ base.next=next
+ base.prev=prev
+ return head,base
+ end
+end
+local function getcomponentindex(start)
+ if start.id~=glyph_code then
+ return 0
+ elseif start.subtype==ligature_code then
+ local i=0
+ local components=start.components
+ while components do
+ i=i+getcomponentindex(components)
+ components=components.next
+ end
+ return i
+ elseif not marks[start.char] then
+ return 1
+ else
+ return 0
+ end
+end
+local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound)
+ if start==stop and start.char==char then
+ start.char=char
+ return head,start
+ end
+ local prev=start.prev
+ local next=stop.next
+ start.prev=nil
+ stop.next=nil
+ local base=copy_glyph(start)
+ if start==head then
+ head=base
+ end
+ base.char=char
+ base.subtype=ligature_code
+ base.components=start
+ if prev then
+ prev.next=base
+ end
+ if next then
+ next.prev=base
+ end
+ base.next=next
+ base.prev=prev
+ if not discfound then
+ local deletemarks=markflag~="mark"
+ local components=start
+ local baseindex=0
+ local componentindex=0
+ local head=base
+ local current=base
+ while start do
+ local char=start.char
+ if not marks[char] then
+ baseindex=baseindex+componentindex
+ componentindex=getcomponentindex(start)
+ elseif not deletemarks then
+ start[a_ligacomp]=baseindex+(start[a_ligacomp] or componentindex)
+ if trace_marks then
+ logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp])
+ end
+ head,current=insert_node_after(head,current,copy_node(start))
+ elseif trace_marks then
+ logwarning("%s: delete mark %s",pref(kind,lookupname),gref(char))
+ end
+ start=start.next
+ end
+ local start=current.next
+ while start and start.id==glyph_code do
+ local char=start.char
+ if marks[char] then
+ start[a_ligacomp]=baseindex+(start[a_ligacomp] or componentindex)
+ if trace_marks then
+ logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp])
+ end
+ else
+ break
+ end
+ start=start.next
+ end
+ end
+ return head,base
+end
+function handlers.gsub_single(head,start,kind,lookupname,replacement)
+ if trace_singles then
+ logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(start.char),gref(replacement))
+ end
+ start.char=replacement
+ return head,start,true
+end
+local function get_alternative_glyph(start,alternatives,value,trace_alternatives)
+ local n=#alternatives
+ if value=="random" then
+ local r=random(1,n)
+ return alternatives[r],trace_alternatives and formatters["value %a, taking %a"](value,r)
+ elseif value=="first" then
+ return alternatives[1],trace_alternatives and formatters["value %a, taking %a"](value,1)
+ elseif value=="last" then
+ return alternatives[n],trace_alternatives and formatters["value %a, taking %a"](value,n)
+ else
+ value=tonumber(value)
+ if type(value)~="number" then
+ return alternatives[1],trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
+ elseif value>n then
+ local defaultalt=otf.defaultnodealternate
+ if defaultalt=="first" then
+ return alternatives[n],trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
+ elseif defaultalt=="last" then
+ return alternatives[1],trace_alternatives and formatters["invalid value %s, taking %a"](value,n)
+ else
+ return false,trace_alternatives and formatters["invalid value %a, %s"](value,"out of range")
+ end
+ elseif value==0 then
+ return start.char,trace_alternatives and formatters["invalid value %a, %s"](value,"no change")
+ elseif value<1 then
+ return alternatives[1],trace_alternatives and formatters["invalid value %a, taking %a"](value,1)
+ else
+ return alternatives[value],trace_alternatives and formatters["value %a, taking %a"](value,value)
+ end
+ end
+end
+local function multiple_glyphs(head,start,multiple,ignoremarks)
+ local nofmultiples=#multiple
+ if nofmultiples>0 then
+ start.char=multiple[1]
+ if nofmultiples>1 then
+ local sn=start.next
+ for k=2,nofmultiples do
+ local n=copy_node(start)
+ n.char=multiple[k]
+ n.next=sn
+ n.prev=start
+ if sn then
+ sn.prev=n
+ end
+ start.next=n
+ start=n
+ end
+ end
+ return head,start,true
+ else
+ if trace_multiples then
+ logprocess("no multiple for %s",gref(start.char))
+ end
+ return head,start,false
+ end
+end
+function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence)
+ local value=featurevalue==true and tfmdata.shared.features[kind] or featurevalue
+ local choice,comment=get_alternative_glyph(start,alternative,value,trace_alternatives)
+ if choice then
+ if trace_alternatives then
+ logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(start.char),choice,gref(choice),comment)
+ end
+ start.char=choice
+ else
+ if trace_alternatives then
+ logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(start.char),comment)
+ end
+ end
+ return head,start,true
+end
+function handlers.gsub_multiple(head,start,kind,lookupname,multiple,sequence)
+ if trace_multiples then
+ logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(start.char),gref(multiple))
+ end
+ return multiple_glyphs(head,start,multiple,sequence.flags[1])
+end
+function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
+ local s,stop,discfound=start.next,nil,false
+ local startchar=start.char
+ if marks[startchar] then
+ while s do
+ local id=s.id
+ if id==glyph_code and s.font==currentfont and s.subtype<256 then
+ local lg=ligature[s.char]
+ if lg then
+ stop=s
+ ligature=lg
+ s=s.next
+ else
+ break
+ end
+ else
+ break
+ end
+ end
+ if stop then
+ local lig=ligature.ligature
+ if lig then
+ if trace_ligatures then
+ local stopchar=stop.char
+ head,start=markstoligature(kind,lookupname,head,start,stop,lig)
+ logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
+ else
+ head,start=markstoligature(kind,lookupname,head,start,stop,lig)
+ end
+ return head,start,true
+ else
+ end
+ end
+ else
+ local skipmark=sequence.flags[1]
+ while s do
+ local id=s.id
+ if id==glyph_code and s.subtype<256 then
+ if s.font==currentfont then
+ local char=s.char
+ if skipmark and marks[char] then
+ s=s.next
+ else
+ local lg=ligature[char]
+ if lg then
+ stop=s
+ ligature=lg
+ s=s.next
+ else
+ break
+ end
+ end
+ else
+ break
+ end
+ elseif id==disc_code then
+ discfound=true
+ s=s.next
+ else
+ break
+ end
+ end
+ local lig=ligature.ligature
+ if lig then
+ if stop then
+ if trace_ligatures then
+ local stopchar=stop.char
+ head,start=toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
+ logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
+ else
+ head,start=toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
+ end
+ return head,start,true
+ else
+ start.char=lig
+ if trace_ligatures then
+ logprocess("%s: replacing %s by (no real) ligature %s case 3",pref(kind,lookupname),gref(startchar),gref(lig))
+ end
+ return head,start,true
+ end
+ else
+ end
+ end
+ return head,start,false
+end
+function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence)
+ local markchar=start.char
+ if marks[markchar] then
+ local base=start.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ local basechar=base.char
+ if marks[basechar] then
+ while true do
+ base=base.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ basechar=base.char
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+ end
+ end
+ end
+ local baseanchors=descriptions[basechar]
+ if baseanchors then
+ baseanchors=baseanchors.anchors
+ end
+ if baseanchors then
+ local baseanchors=baseanchors['basechar']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
+ pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head,start,true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s, no matching anchors for mark %s and base %s",pref(kind,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ elseif trace_bugs then
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no char",pref(kind,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+end
+function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence)
+ local markchar=start.char
+ if marks[markchar] then
+ local base=start.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ local basechar=base.char
+ if marks[basechar] then
+ while true do
+ base=base.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ basechar=base.char
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+ end
+ end
+ end
+ local index=start[a_ligacomp]
+ local baseanchors=descriptions[basechar]
+ if baseanchors then
+ baseanchors=baseanchors.anchors
+ if baseanchors then
+ local baseanchors=baseanchors['baselig']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ ba=ba[index]
+ if ba then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ if trace_marks then
+ logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
+ pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy)
+ end
+ return head,start,true
+ else
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and baselig %s with index %a",pref(kind,lookupname),gref(markchar),gref(basechar),index)
+ end
+ end
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and baselig %s",pref(kind,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no char",pref(kind,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+end
+function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence)
+ local markchar=start.char
+ if marks[markchar] then
+ local base=start.prev
+ local slc=start[a_ligacomp]
+ if slc then
+ while base do
+ local blc=base[a_ligacomp]
+ if blc and blc~=slc then
+ base=base.prev
+ else
+ break
+ end
+ end
+ end
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ local basechar=base.char
+ local baseanchors=descriptions[basechar]
+ if baseanchors then
+ baseanchors=baseanchors.anchors
+ if baseanchors then
+ baseanchors=baseanchors['basemark']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
+ pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head,start,true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and basemark %s",pref(kind,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no mark",pref(kind,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+end
+function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence)
+ local alreadydone=cursonce and start[a_cursbase]
+ if not alreadydone then
+ local done=false
+ local startchar=start.char
+ if marks[startchar] then
+ if trace_cursive then
+ logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
+ end
+ else
+ local nxt=start.next
+ while not done and nxt and nxt.id==glyph_code and nxt.font==currentfont and nxt.subtype<256 do
+ local nextchar=nxt.char
+ if marks[nextchar] then
+ nxt=nxt.next
+ else
+ local entryanchors=descriptions[nextchar]
+ if entryanchors then
+ entryanchors=entryanchors.anchors
+ if entryanchors then
+ entryanchors=entryanchors['centry']
+ if entryanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,entry in next,entryanchors do
+ if al[anchor] then
+ local exit=exitanchors[anchor]
+ if exit then
+ local dx,dy,bound=setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ if trace_cursive then
+ logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
+ end
+ done=true
+ break
+ end
+ end
+ end
+ end
+ end
+ elseif trace_bugs then
+ onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
+ end
+ break
+ end
+ end
+ end
+ return head,start,done
+ else
+ if trace_cursive and trace_details then
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
+ end
+ return head,start,false
+ end
+end
+function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence)
+ local startchar=start.char
+ local dx,dy,w,h=setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h)
+ end
+ return head,start,false
+end
+function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence)
+ local snext=start.next
+ if not snext then
+ return head,start,false
+ else
+ local prev,done=start,false
+ local factor=tfmdata.parameters.factor
+ local lookuptype=lookuptypes[lookupname]
+ while snext and snext.id==glyph_code and snext.font==currentfont and snext.subtype<256 do
+ local nextchar=snext.char
+ local krn=kerns[nextchar]
+ if not krn and marks[nextchar] then
+ prev=snext
+ snext=snext.next
+ else
+ if not krn then
+ elseif type(krn)=="table" then
+ if lookuptype=="pair" then
+ local a,b=krn[2],krn[3]
+ if a and #a>0 then
+ local startchar=start.char
+ local x,y,w,h=setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ if b and #b>0 then
+ local startchar=start.char
+ local x,y,w,h=setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
+ if trace_kerns then
+ logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ else
+ report_process("%s: check this out (old kern stuff)",pref(kind,lookupname))
+ end
+ done=true
+ elseif krn~=0 then
+ local k=setkern(snext,factor,rlmode,krn)
+ if trace_kerns then
+ logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
+ end
+ done=true
+ end
+ break
+ end
+ end
+ return head,start,done
+ end
+end
+local chainmores={}
+local chainprocs={}
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_subchain(...)
+end
+local logwarning=report_subchain
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_chain(...)
+end
+local logwarning=report_chain
+function chainprocs.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname)
+ logwarning("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
+ return head,start,false
+end
+function chainmores.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname,n)
+ logprocess("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
+ return head,start,false
+end
+function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements)
+ local char=start.char
+ local replacement=replacements[char]
+ if replacement then
+ if trace_singles then
+ logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement))
+ end
+ start.char=replacement
+ return head,start,true
+ else
+ return head,start,false
+ end
+end
+function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
+ local current=start
+ local subtables=currentlookup.subtables
+ if #subtables>1 then
+ logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," "))
+ end
+ while current do
+ if current.id==glyph_code then
+ local currentchar=current.char
+ local lookupname=subtables[1]
+ local replacement=lookuphash[lookupname]
+ if not replacement then
+ if trace_bugs then
+ logwarning("%s: no single hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
+ end
+ else
+ replacement=replacement[currentchar]
+ if not replacement or replacement=="" then
+ if trace_bugs then
+ logwarning("%s: no single for %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar))
+ end
+ else
+ if trace_singles then
+ logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement))
+ end
+ current.char=replacement
+ end
+ end
+ return head,start,true
+ elseif current==stop then
+ break
+ else
+ current=current.next
+ end
+ end
+ return head,start,false
+end
+chainmores.gsub_single=chainprocs.gsub_single
+function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local startchar=start.char
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local replacements=lookuphash[lookupname]
+ if not replacements then
+ if trace_bugs then
+ logwarning("%s: no multiple hits",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ else
+ replacements=replacements[startchar]
+ if not replacements or replacement=="" then
+ if trace_bugs then
+ logwarning("%s: no multiple for %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar))
+ end
+ else
+ if trace_multiples then
+ logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements))
+ end
+ return multiple_glyphs(head,start,replacements,currentlookup.flags[1])
+ end
+ end
+ return head,start,false
+end
+chainmores.gsub_multiple=chainprocs.gsub_multiple
+function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local current=start
+ local subtables=currentlookup.subtables
+ local value=featurevalue==true and tfmdata.shared.features[kind] or featurevalue
+ while current do
+ if current.id==glyph_code then
+ local currentchar=current.char
+ local lookupname=subtables[1]
+ local alternatives=lookuphash[lookupname]
+ if not alternatives then
+ if trace_bugs then
+ logwarning("%s: no alternative hit",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ else
+ alternatives=alternatives[currentchar]
+ if alternatives then
+ local choice,comment=get_alternative_glyph(current,alternatives,value,trace_alternatives)
+ if choice then
+ if trace_alternatives then
+ logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment)
+ end
+ start.char=choice
+ else
+ if trace_alternatives then
+ logwarning("%s: no variant %a for %s, %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(char),comment)
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s: no alternative for %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(currentchar),comment)
+ end
+ end
+ return head,start,true
+ elseif current==stop then
+ break
+ else
+ current=current.next
+ end
+ end
+ return head,start,false
+end
+chainmores.gsub_alternate=chainprocs.gsub_alternate
+function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
+ local startchar=start.char
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local ligatures=lookuphash[lookupname]
+ if not ligatures then
+ if trace_bugs then
+ logwarning("%s: no ligature hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
+ end
+ else
+ ligatures=ligatures[startchar]
+ if not ligatures then
+ if trace_bugs then
+ logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
+ end
+ else
+ local s=start.next
+ local discfound=false
+ local last=stop
+ local nofreplacements=0
+ local skipmark=currentlookup.flags[1]
+ while s do
+ local id=s.id
+ if id==disc_code then
+ s=s.next
+ discfound=true
+ else
+ local schar=s.char
+ if skipmark and marks[schar] then
+ s=s.next
+ else
+ local lg=ligatures[schar]
+ if lg then
+ ligatures,last,nofreplacements=lg,s,nofreplacements+1
+ if s==stop then
+ break
+ else
+ s=s.next
+ end
+ else
+ break
+ end
+ end
+ end
+ end
+ local l2=ligatures.ligature
+ if l2 then
+ if chainindex then
+ stop=last
+ end
+ if trace_ligatures then
+ if start==stop then
+ logprocess("%s: replacing character %s by ligature %s case 3",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2))
+ else
+ logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char),gref(l2))
+ end
+ end
+ head,start=toligature(kind,lookupname,head,start,stop,l2,currentlookup.flags[1],discfound)
+ return head,start,true,nofreplacements
+ elseif trace_bugs then
+ if start==stop then
+ logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
+ else
+ logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char))
+ end
+ end
+ end
+ end
+ return head,start,false,0
+end
+chainmores.gsub_ligature=chainprocs.gsub_ligature
+function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local markchar=start.char
+ if marks[markchar] then
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local markanchors=lookuphash[lookupname]
+ if markanchors then
+ markanchors=markanchors[markchar]
+ end
+ if markanchors then
+ local base=start.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ local basechar=base.char
+ if marks[basechar] then
+ while true do
+ base=base.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ basechar=base.char
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+ end
+ end
+ end
+ local baseanchors=descriptions[basechar].anchors
+ if baseanchors then
+ local baseanchors=baseanchors['basechar']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head,start,true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s, no matching anchors for mark %s and base %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no char",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ end
+ return head,start,false
+end
+function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local markchar=start.char
+ if marks[markchar] then
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local markanchors=lookuphash[lookupname]
+ if markanchors then
+ markanchors=markanchors[markchar]
+ end
+ if markanchors then
+ local base=start.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ local basechar=base.char
+ if marks[basechar] then
+ while true do
+ base=base.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ basechar=base.char
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",cref(kind,chainname,chainlookupname,lookupname),markchar)
+ end
+ return head,start,false
+ end
+ end
+ end
+ local index=start[a_ligacomp]
+ local baseanchors=descriptions[basechar].anchors
+ if baseanchors then
+ local baseanchors=baseanchors['baselig']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ ba=ba[index]
+ if ba then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy)
+ end
+ return head,start,true
+ end
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and baselig %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ logwarning("feature %s, lookup %s: prev node is no char",kind,lookupname)
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ end
+ return head,start,false
+end
+function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local markchar=start.char
+ if marks[markchar] then
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local markanchors=lookuphash[lookupname]
+ if markanchors then
+ markanchors=markanchors[markchar]
+ end
+ if markanchors then
+ local base=start.prev
+ local slc=start[a_ligacomp]
+ if slc then
+ while base do
+ local blc=base[a_ligacomp]
+ if blc and blc~=slc then
+ base=base.prev
+ else
+ break
+ end
+ end
+ end
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ local basechar=base.char
+ local baseanchors=descriptions[basechar].anchors
+ if baseanchors then
+ baseanchors=baseanchors['basemark']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head,start,true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ end
+ return head,start,false
+end
+function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local alreadydone=cursonce and start[a_cursbase]
+ if not alreadydone then
+ local startchar=start.char
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local exitanchors=lookuphash[lookupname]
+ if exitanchors then
+ exitanchors=exitanchors[startchar]
+ end
+ if exitanchors then
+ local done=false
+ if marks[startchar] then
+ if trace_cursive then
+ logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
+ end
+ else
+ local nxt=start.next
+ while not done and nxt and nxt.id==glyph_code and nxt.font==currentfont and nxt.subtype<256 do
+ local nextchar=nxt.char
+ if marks[nextchar] then
+ nxt=nxt.next
+ else
+ local entryanchors=descriptions[nextchar]
+ if entryanchors then
+ entryanchors=entryanchors.anchors
+ if entryanchors then
+ entryanchors=entryanchors['centry']
+ if entryanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,entry in next,entryanchors do
+ if al[anchor] then
+ local exit=exitanchors[anchor]
+ if exit then
+ local dx,dy,bound=setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ if trace_cursive then
+ logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
+ end
+ done=true
+ break
+ end
+ end
+ end
+ end
+ end
+ elseif trace_bugs then
+ onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
+ end
+ break
+ end
+ end
+ end
+ return head,start,done
+ else
+ if trace_cursive and trace_details then
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
+ end
+ return head,start,false
+ end
+ end
+ return head,start,false
+end
+function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
+ local startchar=start.char
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local kerns=lookuphash[lookupname]
+ if kerns then
+ kerns=kerns[startchar]
+ if kerns then
+ local dx,dy,w,h=setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h)
+ end
+ end
+ end
+ return head,start,false
+end
+chainmores.gpos_single=chainprocs.gpos_single
+function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
+ local snext=start.next
+ if snext then
+ local startchar=start.char
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local kerns=lookuphash[lookupname]
+ if kerns then
+ kerns=kerns[startchar]
+ if kerns then
+ local lookuptype=lookuptypes[lookupname]
+ local prev,done=start,false
+ local factor=tfmdata.parameters.factor
+ while snext and snext.id==glyph_code and snext.font==currentfont and snext.subtype<256 do
+ local nextchar=snext.char
+ local krn=kerns[nextchar]
+ if not krn and marks[nextchar] then
+ prev=snext
+ snext=snext.next
+ else
+ if not krn then
+ elseif type(krn)=="table" then
+ if lookuptype=="pair" then
+ local a,b=krn[2],krn[3]
+ if a and #a>0 then
+ local startchar=start.char
+ local x,y,w,h=setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ if b and #b>0 then
+ local startchar=start.char
+ local x,y,w,h=setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
+ if trace_kerns then
+ logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ else
+ report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname))
+ local a,b=krn[2],krn[6]
+ if a and a~=0 then
+ local k=setkern(snext,factor,rlmode,a)
+ if trace_kerns then
+ logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
+ end
+ end
+ if b and b~=0 then
+ logwarning("%s: ignoring second kern xoff %s",cref(kind,chainname,chainlookupname),b*factor)
+ end
+ end
+ done=true
+ elseif krn~=0 then
+ local k=setkern(snext,factor,rlmode,krn)
+ if trace_kerns then
+ logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
+ end
+ done=true
+ end
+ break
+ end
+ end
+ return head,start,done
+ end
+ end
+ end
+ return head,start,false
+end
+chainmores.gpos_pair=chainprocs.gpos_pair
+local function show_skip(kind,chainname,char,ck,class)
+ if ck[9] then
+ logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a, %a => %a",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10])
+ else
+ logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a",cref(kind,chainname),gref(char),class,ck[1],ck[2])
+ end
+end
+local function normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash)
+ local flags=sequence.flags
+ local done=false
+ local skipmark=flags[1]
+ local skipligature=flags[2]
+ local skipbase=flags[3]
+ local someskip=skipmark or skipligature or skipbase
+ local markclass=sequence.markclass
+ local skipped=false
+ for k=1,#contexts do
+ local match=true
+ local current=start
+ local last=start
+ local ck=contexts[k]
+ local seq=ck[3]
+ local s=#seq
+ if s==1 then
+ match=current.id==glyph_code and current.font==currentfont and current.subtype<256 and seq[1][current.char]
+ else
+ local f,l=ck[4],ck[5]
+ if f==1 and f==l then
+ else
+ if f==l then
+ else
+ local n=f+1
+ last=last.next
+ while n<=l do
+ if last then
+ local id=last.id
+ if id==glyph_code then
+ if last.font==currentfont and last.subtype<256 then
+ local char=last.char
+ local ccd=descriptions[char]
+ if ccd then
+ local class=ccd.class
+ if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then
+ skipped=true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ last=last.next
+ elseif seq[n][char] then
+ if n<l then
+ last=last.next
+ end
+ n=n+1
+ else
+ match=false
+ break
+ end
+ else
+ match=false
+ break
+ end
+ else
+ match=false
+ break
+ end
+ elseif id==disc_code then
+ last=last.next
+ else
+ match=false
+ break
+ end
+ else
+ match=false
+ break
+ end
+ end
+ end
+ end
+ if match and f>1 then
+ local prev=start.prev
+ if prev then
+ local n=f-1
+ while n>=1 do
+ if prev then
+ local id=prev.id
+ if id==glyph_code then
+ if prev.font==currentfont and prev.subtype<256 then
+ local char=prev.char
+ local ccd=descriptions[char]
+ if ccd then
+ local class=ccd.class
+ if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then
+ skipped=true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ elseif seq[n][char] then
+ n=n -1
+ else
+ match=false
+ break
+ end
+ else
+ match=false
+ break
+ end
+ else
+ match=false
+ break
+ end
+ elseif id==disc_code then
+ elseif seq[n][32] then
+ n=n -1
+ else
+ match=false
+ break
+ end
+ prev=prev.prev
+ elseif seq[n][32] then
+ n=n -1
+ else
+ match=false
+ break
+ end
+ end
+ elseif f==2 then
+ match=seq[1][32]
+ else
+ for n=f-1,1 do
+ if not seq[n][32] then
+ match=false
+ break
+ end
+ end
+ end
+ end
+ if match and s>l then
+ local current=last and last.next
+ if current then
+ local n=l+1
+ while n<=s do
+ if current then
+ local id=current.id
+ if id==glyph_code then
+ if current.font==currentfont and current.subtype<256 then
+ local char=current.char
+ local ccd=descriptions[char]
+ if ccd then
+ local class=ccd.class
+ if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then
+ skipped=true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ elseif seq[n][char] then
+ n=n+1
+ else
+ match=false
+ break
+ end
+ else
+ match=false
+ break
+ end
+ else
+ match=false
+ break
+ end
+ elseif id==disc_code then
+ elseif seq[n][32] then
+ n=n+1
+ else
+ match=false
+ break
+ end
+ current=current.next
+ elseif seq[n][32] then
+ n=n+1
+ else
+ match=false
+ break
+ end
+ end
+ elseif s-l==1 then
+ match=seq[s][32]
+ else
+ for n=l+1,s do
+ if not seq[n][32] then
+ match=false
+ break
+ end
+ end
+ end
+ end
+ end
+ if match then
+ if trace_contexts then
+ local rule,lookuptype,f,l=ck[1],ck[2],ck[4],ck[5]
+ local char=start.char
+ if ck[9] then
+ logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a, %a => %a",
+ cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10])
+ else
+ logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a",
+ cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype)
+ end
+ end
+ local chainlookups=ck[6]
+ if chainlookups then
+ local nofchainlookups=#chainlookups
+ if nofchainlookups==1 then
+ local chainlookupname=chainlookups[1]
+ local chainlookup=lookuptable[chainlookupname]
+ if chainlookup then
+ local cp=chainprocs[chainlookup.type]
+ if cp then
+ local ok
+ head,start,ok=cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
+ if ok then
+ done=true
+ end
+ else
+ logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
+ end
+ else
+ logprocess("%s is not yet supported",cref(kind,chainname,chainlookupname))
+ end
+ else
+ local i=1
+ repeat
+ if skipped then
+ while true do
+ local char=start.char
+ local ccd=descriptions[char]
+ if ccd then
+ local class=ccd.class
+ if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then
+ start=start.next
+ else
+ break
+ end
+ else
+ break
+ end
+ end
+ end
+ local chainlookupname=chainlookups[i]
+ local chainlookup=lookuptable[chainlookupname]
+ if not chainlookup then
+ i=i+1
+ else
+ local cp=chainmores[chainlookup.type]
+ if not cp then
+ logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
+ i=i+1
+ else
+ local ok,n
+ head,start,ok,n=cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence)
+ if ok then
+ done=true
+ i=i+(n or 1)
+ else
+ i=i+1
+ end
+ end
+ end
+ if start then
+ start=start.next
+ else
+ end
+ until i>nofchainlookups
+ end
+ else
+ local replacements=ck[7]
+ if replacements then
+ head,start,done=chainprocs.reversesub(head,start,last,kind,chainname,ck,lookuphash,replacements)
+ else
+ done=true
+ if trace_contexts then
+ logprocess("%s: skipping match",cref(kind,chainname))
+ end
+ end
+ end
+ end
+ end
+ return head,start,done
+end
+local verbose_handle_contextchain=function(font,...)
+ logwarning("no verbose handler installed, reverting to 'normal'")
+ otf.setcontextchain()
+ return normal_handle_contextchain(...)
+end
+otf.chainhandlers={
+ normal=normal_handle_contextchain,
+ verbose=verbose_handle_contextchain,
+}
+function otf.setcontextchain(method)
+ if not method or method=="normal" or not otf.chainhandlers[method] then
+ if handlers.contextchain then
+ logwarning("installing normal contextchain handler")
+ end
+ handlers.contextchain=normal_handle_contextchain
+ else
+ logwarning("installing contextchain handler %a",method)
+ local handler=otf.chainhandlers[method]
+ handlers.contextchain=function(...)
+ return handler(currentfont,...)
+ end
+ end
+ handlers.gsub_context=handlers.contextchain
+ handlers.gsub_contextchain=handlers.contextchain
+ handlers.gsub_reversecontextchain=handlers.contextchain
+ handlers.gpos_contextchain=handlers.contextchain
+ handlers.gpos_context=handlers.contextchain
+end
+otf.setcontextchain()
+local missing={}
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_process(...)
+end
+local logwarning=report_process
+local function report_missing_cache(typ,lookup)
+ local f=missing[currentfont] if not f then f={} missing[currentfont]=f end
+ local t=f[typ] if not t then t={} f[typ]=t end
+ if not t[lookup] then
+ t[lookup]=true
+ logwarning("missing cache for lookup %a, type %a, font %a, name %a",lookup,typ,currentfont,tfmdata.properties.fullname)
+ end
+end
+local resolved={}
+local lookuphashes={}
+setmetatableindex(lookuphashes,function(t,font)
+ local lookuphash=fontdata[font].resources.lookuphash
+ if not lookuphash or not next(lookuphash) then
+ lookuphash=false
+ end
+ t[font]=lookuphash
+ return lookuphash
+end)
+local autofeatures=fonts.analyzers.features
+local function initialize(sequence,script,language,enabled)
+ local features=sequence.features
+ if features then
+ local order=sequence.order
+ if order then
+ for i=1,#order do
+ local kind=order[i]
+ local valid=enabled[kind]
+ if valid then
+ local scripts=features[kind]
+ local languages=scripts[script] or scripts[wildcard]
+ if languages and (languages[language] or languages[wildcard]) then
+ return { valid,autofeatures[kind] or false,sequence.chain or 0,kind,sequence }
+ end
+ end
+ end
+ else
+ end
+ end
+ return false
+end
+function otf.dataset(tfmdata,font)
+ local shared=tfmdata.shared
+ local properties=tfmdata.properties
+ local language=properties.language or "dflt"
+ local script=properties.script or "dflt"
+ local enabled=shared.features
+ local res=resolved[font]
+ if not res then
+ res={}
+ resolved[font]=res
+ end
+ local rs=res[script]
+ if not rs then
+ rs={}
+ res[script]=rs
+ end
+ local rl=rs[language]
+ if not rl then
+ rl={
+ }
+ rs[language]=rl
+ local sequences=tfmdata.resources.sequences
+ for s=1,#sequences do
+ local v=enabled and initialize(sequences[s],script,language,enabled)
+ if v then
+ rl[#rl+1]=v
+ end
+ end
+ end
+ return rl
+end
+local function featuresprocessor(head,font,attr)
+ local lookuphash=lookuphashes[font]
+ if not lookuphash then
+ return head,false
+ end
+ if trace_steps then
+ checkstep(head)
+ end
+ tfmdata=fontdata[font]
+ descriptions=tfmdata.descriptions
+ characters=tfmdata.characters
+ resources=tfmdata.resources
+ marks=resources.marks
+ anchorlookups=resources.lookup_to_anchor
+ lookuptable=resources.lookups
+ lookuptypes=resources.lookuptypes
+ currentfont=font
+ rlmode=0
+ local sequences=resources.sequences
+ local done=false
+ local datasets=otf.dataset(tfmdata,font,attr)
+ local dirstack={}
+ for s=1,#datasets do
+ local dataset=datasets[s]
+ featurevalue=dataset[1]
+ local sequence=dataset[5]
+ local rlparmode=0
+ local topstack=0
+ local success=false
+ local attribute=dataset[2]
+ local chain=dataset[3]
+ local typ=sequence.type
+ local subtables=sequence.subtables
+ if chain<0 then
+ local handler=handlers[typ]
+ local start=find_node_tail(head)
+ while start do
+ local id=start.id
+ if id==glyph_code then
+ if start.font==font and start.subtype<256 then
+ local a=start[0]
+ if a then
+ a=a==attr
+ else
+ a=true
+ end
+ if a then
+ for i=1,#subtables do
+ local lookupname=subtables[i]
+ local lookupcache=lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch=lookupcache[start.char]
+ if lookupmatch then
+ head,start,success=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if success then
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ if start then start=start.prev end
+ else
+ start=start.prev
+ end
+ else
+ start=start.prev
+ end
+ else
+ start=start.prev
+ end
+ end
+ else
+ local handler=handlers[typ]
+ local ns=#subtables
+ local start=head
+ rlmode=0
+ if ns==1 then
+ local lookupname=subtables[1]
+ local lookupcache=lookuphash[lookupname]
+ if not lookupcache then
+ report_missing_cache(typ,lookupname)
+ else
+ local function subrun(start)
+ local head=start
+ local done=false
+ while start do
+ local id=start.id
+ if id==glyph_code and start.font==font and start.subtype<256 then
+ local a=start[0]
+ if a then
+ a=(a==attr) and (not attribute or start[a_state]==attribute)
+ else
+ a=not attribute or start[a_state]==attribute
+ end
+ if a then
+ local lookupmatch=lookupcache[start.char]
+ if lookupmatch then
+ local ok
+ head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ done=true
+ end
+ end
+ if start then start=start.next end
+ else
+ start=start.next
+ end
+ else
+ start=start.next
+ end
+ end
+ if done then
+ success=true
+ return head
+ end
+ end
+ local function kerndisc(disc)
+ local prev=disc.prev
+ local next=disc.next
+ if prev and next then
+ prev.next=next
+ local a=prev[0]
+ if a then
+ a=(a==attr) and (not attribute or prev[a_state]==attribute)
+ else
+ a=not attribute or prev[a_state]==attribute
+ end
+ if a then
+ local lookupmatch=lookupcache[prev.char]
+ if lookupmatch then
+ local h,d,ok=handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ done=true
+ success=true
+ end
+ end
+ end
+ prev.next=disc
+ end
+ return next
+ end
+ while start do
+ local id=start.id
+ if id==glyph_code then
+ if start.font==font and start.subtype<256 then
+ local a=start[0]
+ if a then
+ a=(a==attr) and (not attribute or start[a_state]==attribute)
+ else
+ a=not attribute or start[a_state]==attribute
+ end
+ if a then
+ local lookupmatch=lookupcache[start.char]
+ if lookupmatch then
+ local ok
+ head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ success=true
+ end
+ end
+ if start then start=start.next end
+ else
+ start=start.next
+ end
+ else
+ start=start.next
+ end
+ elseif id==disc_code then
+ if start.subtype==discretionary_code then
+ local pre=start.pre
+ if pre then
+ local new=subrun(pre)
+ if new then start.pre=new end
+ end
+ local post=start.post
+ if post then
+ local new=subrun(post)
+ if new then start.post=new end
+ end
+ local replace=start.replace
+ if replace then
+ local new=subrun(replace)
+ if new then start.replace=new end
+ end
+elseif typ=="gpos_single" or typ=="gpos_pair" then
+ kerndisc(start)
+ end
+ start=start.next
+ elseif id==whatsit_code then
+ local subtype=start.subtype
+ if subtype==dir_code then
+ local dir=start.dir
+ if dir=="+TRT" or dir=="+TLT" then
+ topstack=topstack+1
+ dirstack[topstack]=dir
+ elseif dir=="-TRT" or dir=="-TLT" then
+ topstack=topstack-1
+ end
+ local newdir=dirstack[topstack]
+ if newdir=="+TRT" then
+ rlmode=-1
+ elseif newdir=="+TLT" then
+ rlmode=1
+ else
+ rlmode=rlparmode
+ end
+ if trace_directions then
+ report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
+ end
+ elseif subtype==localpar_code then
+ local dir=start.dir
+ if dir=="TRT" then
+ rlparmode=-1
+ elseif dir=="TLT" then
+ rlparmode=1
+ else
+ rlparmode=0
+ end
+ rlmode=rlparmode
+ if trace_directions then
+ report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
+ end
+ end
+ start=start.next
+ elseif id==math_code then
+ start=end_of_math(start).next
+ else
+ start=start.next
+ end
+ end
+ end
+ else
+ local function subrun(start)
+ local head=start
+ local done=false
+ while start do
+ local id=start.id
+ if id==glyph_code and start.id==font and start.subtype<256 then
+ local a=start[0]
+ if a then
+ a=(a==attr) and (not attribute or start[a_state]==attribute)
+ else
+ a=not attribute or start[a_state]==attribute
+ end
+ if a then
+ for i=1,ns do
+ local lookupname=subtables[i]
+ local lookupcache=lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch=lookupcache[start.char]
+ if lookupmatch then
+ local ok
+ head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ done=true
+ break
+ elseif not start then
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ if start then start=start.next end
+ else
+ start=start.next
+ end
+ else
+ start=start.next
+ end
+ end
+ if done then
+ success=true
+ return head
+ end
+ end
+ local function kerndisc(disc)
+ local prev=disc.prev
+ local next=disc.next
+ if prev and next then
+ prev.next=next
+ local a=prev[0]
+ if a then
+ a=(a==attr) and (not attribute or prev[a_state]==attribute)
+ else
+ a=not attribute or prev[a_state]==attribute
+ end
+ if a then
+ for i=1,ns do
+ local lookupname=subtables[i]
+ local lookupcache=lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch=lookupcache[prev.char]
+ if lookupmatch then
+ local h,d,ok=handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ done=true
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ end
+ prev.next=disc
+ end
+ return next
+ end
+ while start do
+ local id=start.id
+ if id==glyph_code then
+ if start.font==font and start.subtype<256 then
+ local a=start[0]
+ if a then
+ a=(a==attr) and (not attribute or start[a_state]==attribute)
+ else
+ a=not attribute or start[a_state]==attribute
+ end
+ if a then
+ for i=1,ns do
+ local lookupname=subtables[i]
+ local lookupcache=lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch=lookupcache[start.char]
+ if lookupmatch then
+ local ok
+ head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ success=true
+ break
+ elseif not start then
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ if start then start=start.next end
+ else
+ start=start.next
+ end
+ else
+ start=start.next
+ end
+ elseif id==disc_code then
+ if start.subtype==discretionary_code then
+ local pre=start.pre
+ if pre then
+ local new=subrun(pre)
+ if new then start.pre=new end
+ end
+ local post=start.post
+ if post then
+ local new=subrun(post)
+ if new then start.post=new end
+ end
+ local replace=start.replace
+ if replace then
+ local new=subrun(replace)
+ if new then start.replace=new end
+ end
+elseif typ=="gpos_single" or typ=="gpos_pair" then
+ kerndisc(start)
+ end
+ start=start.next
+ elseif id==whatsit_code then
+ local subtype=start.subtype
+ if subtype==dir_code then
+ local dir=start.dir
+ if dir=="+TRT" or dir=="+TLT" then
+ topstack=topstack+1
+ dirstack[topstack]=dir
+ elseif dir=="-TRT" or dir=="-TLT" then
+ topstack=topstack-1
+ end
+ local newdir=dirstack[topstack]
+ if newdir=="+TRT" then
+ rlmode=-1
+ elseif newdir=="+TLT" then
+ rlmode=1
+ else
+ rlmode=rlparmode
+ end
+ if trace_directions then
+ report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
+ end
+ elseif subtype==localpar_code then
+ local dir=start.dir
+ if dir=="TRT" then
+ rlparmode=-1
+ elseif dir=="TLT" then
+ rlparmode=1
+ else
+ rlparmode=0
+ end
+ rlmode=rlparmode
+ if trace_directions then
+ report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
+ end
+ end
+ start=start.next
+ elseif id==math_code then
+ start=end_of_math(start).next
+ else
+ start=start.next
+ end
+ end
+ end
+ end
+ if success then
+ done=true
+ end
+ if trace_steps then
+ registerstep(head)
+ end
+ end
+ return head,done
+end
+local function generic(lookupdata,lookupname,unicode,lookuphash)
+ local target=lookuphash[lookupname]
+ if target then
+ target[unicode]=lookupdata
+ else
+ lookuphash[lookupname]={ [unicode]=lookupdata }
+ end
+end
+local action={
+ substitution=generic,
+ multiple=generic,
+ alternate=generic,
+ position=generic,
+ ligature=function(lookupdata,lookupname,unicode,lookuphash)
+ local target=lookuphash[lookupname]
+ if not target then
+ target={}
+ lookuphash[lookupname]=target
+ end
+ for i=1,#lookupdata do
+ local li=lookupdata[i]
+ local tu=target[li]
+ if not tu then
+ tu={}
+ target[li]=tu
+ end
+ target=tu
+ end
+ target.ligature=unicode
+ end,
+ pair=function(lookupdata,lookupname,unicode,lookuphash)
+ local target=lookuphash[lookupname]
+ if not target then
+ target={}
+ lookuphash[lookupname]=target
+ end
+ local others=target[unicode]
+ local paired=lookupdata[1]
+ if others then
+ others[paired]=lookupdata
+ else
+ others={ [paired]=lookupdata }
+ target[unicode]=others
+ end
+ end,
+}
+local function prepare_lookups(tfmdata)
+ local rawdata=tfmdata.shared.rawdata
+ local resources=rawdata.resources
+ local lookuphash=resources.lookuphash
+ local anchor_to_lookup=resources.anchor_to_lookup
+ local lookup_to_anchor=resources.lookup_to_anchor
+ local lookuptypes=resources.lookuptypes
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ for unicode,character in next,characters do
+ local description=descriptions[unicode]
+ if description then
+ local lookups=description.slookups
+ if lookups then
+ for lookupname,lookupdata in next,lookups do
+ action[lookuptypes[lookupname]](lookupdata,lookupname,unicode,lookuphash)
+ end
+ end
+ local lookups=description.mlookups
+ if lookups then
+ for lookupname,lookuplist in next,lookups do
+ local lookuptype=lookuptypes[lookupname]
+ for l=1,#lookuplist do
+ local lookupdata=lookuplist[l]
+ action[lookuptype](lookupdata,lookupname,unicode,lookuphash)
+ end
+ end
+ end
+ local list=description.kerns
+ if list then
+ for lookup,krn in next,list do
+ local target=lookuphash[lookup]
+ if target then
+ target[unicode]=krn
+ else
+ lookuphash[lookup]={ [unicode]=krn }
+ end
+ end
+ end
+ local list=description.anchors
+ if list then
+ for typ,anchors in next,list do
+ if typ=="mark" or typ=="cexit" then
+ for name,anchor in next,anchors do
+ local lookups=anchor_to_lookup[name]
+ if lookups then
+ for lookup,_ in next,lookups do
+ local target=lookuphash[lookup]
+ if target then
+ target[unicode]=anchors
+ else
+ lookuphash[lookup]={ [unicode]=anchors }
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
+local function split(replacement,original)
+ local result={}
+ for i=1,#replacement do
+ result[original[i]]=replacement[i]
+ end
+ return result
+end
+local valid={
+ coverage={ chainsub=true,chainpos=true,contextsub=true },
+ reversecoverage={ reversesub=true },
+ glyphs={ chainsub=true,chainpos=true },
+}
+local function prepare_contextchains(tfmdata)
+ local rawdata=tfmdata.shared.rawdata
+ local resources=rawdata.resources
+ local lookuphash=resources.lookuphash
+ local lookups=rawdata.lookups
+ if lookups then
+ for lookupname,lookupdata in next,rawdata.lookups do
+ local lookuptype=lookupdata.type
+ if lookuptype then
+ local rules=lookupdata.rules
+ if rules then
+ local format=lookupdata.format
+ local validformat=valid[format]
+ if not validformat then
+ report_prepare("unsupported format %a",format)
+ elseif not validformat[lookuptype] then
+ report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookupname)
+ else
+ local contexts=lookuphash[lookupname]
+ if not contexts then
+ contexts={}
+ lookuphash[lookupname]=contexts
+ end
+ local t,nt={},0
+ for nofrules=1,#rules do
+ local rule=rules[nofrules]
+ local current=rule.current
+ local before=rule.before
+ local after=rule.after
+ local replacements=rule.replacements
+ local sequence={}
+ local nofsequences=0
+ if before then
+ for n=1,#before do
+ nofsequences=nofsequences+1
+ sequence[nofsequences]=before[n]
+ end
+ end
+ local start=nofsequences+1
+ for n=1,#current do
+ nofsequences=nofsequences+1
+ sequence[nofsequences]=current[n]
+ end
+ local stop=nofsequences
+ if after then
+ for n=1,#after do
+ nofsequences=nofsequences+1
+ sequence[nofsequences]=after[n]
+ end
+ end
+ if sequence[1] then
+ nt=nt+1
+ t[nt]={ nofrules,lookuptype,sequence,start,stop,rule.lookups,replacements }
+ for unic,_ in next,sequence[start] do
+ local cu=contexts[unic]
+ if not cu then
+ contexts[unic]=t
+ end
+ end
+ end
+ end
+ end
+ else
+ end
+ else
+ report_prepare("missing lookuptype for lookupname %a",lookupname)
+ end
+ end
+ end
+end
+local function featuresinitializer(tfmdata,value)
+ if true then
+ local rawdata=tfmdata.shared.rawdata
+ local properties=rawdata.properties
+ if not properties.initialized then
+ local starttime=trace_preparing and os.clock()
+ local resources=rawdata.resources
+ resources.lookuphash=resources.lookuphash or {}
+ prepare_contextchains(tfmdata)
+ prepare_lookups(tfmdata)
+ properties.initialized=true
+ if trace_preparing then
+ report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,tfmdata.properties.fullname)
+ end
+ end
+ end
+end
+registerotffeature {
+ name="features",
+ description="features",
+ default=true,
+ initializers={
+ position=1,
+ node=featuresinitializer,
+ },
+ processors={
+ node=featuresprocessor,
+ }
+}
+otf.handlers=handlers
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-otp']={
+ version=1.001,
+ comment="companion to font-otf.lua (packing)",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local next,type=next,type
+local sort,concat=table.sort,table.concat
+local sortedhash=table.sortedhash
+local trace_packing=false trackers.register("otf.packing",function(v) trace_packing=v end)
+local trace_loading=false trackers.register("otf.loading",function(v) trace_loading=v end)
+local report_otf=logs.reporter("fonts","otf loading")
+fonts=fonts or {}
+local handlers=fonts.handlers or {}
+fonts.handlers=handlers
+local otf=handlers.otf or {}
+handlers.otf=otf
+local enhancers=otf.enhancers or {}
+otf.enhancers=enhancers
+local glists=otf.glists or { "gsub","gpos" }
+otf.glists=glists
+local criterium=1
+local threshold=0
+local function tabstr_normal(t)
+ local s={}
+ local n=0
+ for k,v in next,t do
+ n=n+1
+ if type(v)=="table" then
+ s[n]=k..">"..tabstr_normal(v)
+ elseif v==true then
+ s[n]=k.."+"
+ elseif v then
+ s[n]=k.."="..v
+ else
+ s[n]=k.."-"
+ end
+ end
+ if n==0 then
+ return ""
+ elseif n==1 then
+ return s[1]
+ else
+ sort(s)
+ return concat(s,",")
+ end
+end
+local function tabstr_flat(t)
+ local s={}
+ local n=0
+ for k,v in next,t do
+ n=n+1
+ s[n]=k.."="..v
+ end
+ if n==0 then
+ return ""
+ elseif n==1 then
+ return s[1]
+ else
+ sort(s)
+ return concat(s,",")
+ end
+end
+local function tabstr_mixed(t)
+ local s={}
+ local n=#t
+ if n==0 then
+ return ""
+ elseif n==1 then
+ local k=t[1]
+ if k==true then
+ return "++"
+ elseif k==false then
+ return "--"
+ else
+ return tostring(k)
+ end
+ else
+ for i=1,n do
+ local k=t[i]
+ if k==true then
+ s[i]="++"
+ elseif k==false then
+ s[i]="--"
+ else
+ s[i]=k
+ end
+ end
+ return concat(s,",")
+ end
+end
+local function tabstr_boolean(t)
+ local s={}
+ local n=0
+ for k,v in next,t do
+ n=n+1
+ if v then
+ s[n]=k.."+"
+ else
+ s[n]=k.."-"
+ end
+ end
+ if n==0 then
+ return ""
+ elseif n==1 then
+ return s[1]
+ else
+ sort(s)
+ return concat(s,",")
+ end
+end
+local function packdata(data)
+ if data then
+ local h,t,c={},{},{}
+ local hh,tt,cc={},{},{}
+ local nt,ntt=0,0
+ local function pack_normal(v)
+ local tag=tabstr_normal(v)
+ local ht=h[tag]
+ if ht then
+ c[ht]=c[ht]+1
+ return ht
+ else
+ nt=nt+1
+ t[nt]=v
+ h[tag]=nt
+ c[nt]=1
+ return nt
+ end
+ end
+ local function pack_flat(v)
+ local tag=tabstr_flat(v)
+ local ht=h[tag]
+ if ht then
+ c[ht]=c[ht]+1
+ return ht
+ else
+ nt=nt+1
+ t[nt]=v
+ h[tag]=nt
+ c[nt]=1
+ return nt
+ end
+ end
+ local function pack_boolean(v)
+ local tag=tabstr_boolean(v)
+ local ht=h[tag]
+ if ht then
+ c[ht]=c[ht]+1
+ return ht
+ else
+ nt=nt+1
+ t[nt]=v
+ h[tag]=nt
+ c[nt]=1
+ return nt
+ end
+ end
+ local function pack_indexed(v)
+ local tag=concat(v," ")
+ local ht=h[tag]
+ if ht then
+ c[ht]=c[ht]+1
+ return ht
+ else
+ nt=nt+1
+ t[nt]=v
+ h[tag]=nt
+ c[nt]=1
+ return nt
+ end
+ end
+ local function pack_mixed(v)
+ local tag=tabstr_mixed(v)
+ local ht=h[tag]
+ if ht then
+ c[ht]=c[ht]+1
+ return ht
+ else
+ nt=nt+1
+ t[nt]=v
+ h[tag]=nt
+ c[nt]=1
+ return nt
+ end
+ end
+ local function pack_final(v)
+ if c[v]<=criterium then
+ return t[v]
+ else
+ local hv=hh[v]
+ if hv then
+ return hv
+ else
+ ntt=ntt+1
+ tt[ntt]=t[v]
+ hh[v]=ntt
+ cc[ntt]=c[v]
+ return ntt
+ end
+ end
+ end
+ local function success(stage,pass)
+ if nt==0 then
+ if trace_loading or trace_packing then
+ report_otf("pack quality: nothing to pack")
+ end
+ return false
+ elseif nt>=threshold then
+ local one,two,rest=0,0,0
+ if pass==1 then
+ for k,v in next,c do
+ if v==1 then
+ one=one+1
+ elseif v==2 then
+ two=two+1
+ else
+ rest=rest+1
+ end
+ end
+ else
+ for k,v in next,cc do
+ if v>20 then
+ rest=rest+1
+ elseif v>10 then
+ two=two+1
+ else
+ one=one+1
+ end
+ end
+ data.tables=tt
+ end
+ if trace_loading or trace_packing then
+ report_otf("pack quality: stage %s, pass %s, %s packed, 1-10:%s, 11-20:%s, rest:%s (criterium: %s)",stage,pass,one+two+rest,one,two,rest,criterium)
+ end
+ return true
+ else
+ if trace_loading or trace_packing then
+ report_otf("pack quality: stage %s, pass %s, %s packed, aborting pack (threshold: %s)",stage,pass,nt,threshold)
+ end
+ return false
+ end
+ end
+ local function packers(pass)
+ if pass==1 then
+ return pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed
+ else
+ return pack_final,pack_final,pack_final,pack_final,pack_final
+ end
+ end
+ local resources=data.resources
+ local lookuptypes=resources.lookuptypes
+ for pass=1,2 do
+ if trace_packing then
+ report_otf("start packing: stage 1, pass %s",pass)
+ end
+ local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass)
+ for unicode,description in next,data.descriptions do
+ local boundingbox=description.boundingbox
+ if boundingbox then
+ description.boundingbox=pack_indexed(boundingbox)
+ end
+ local slookups=description.slookups
+ if slookups then
+ for tag,slookup in next,slookups do
+ local what=lookuptypes[tag]
+ if what=="pair" then
+ local t=slookup[2] if t then slookup[2]=pack_indexed(t) end
+ local t=slookup[3] if t then slookup[3]=pack_indexed(t) end
+ elseif what~="substitution" then
+ slookups[tag]=pack_indexed(slookup)
+ end
+ end
+ end
+ local mlookups=description.mlookups
+ if mlookups then
+ for tag,mlookup in next,mlookups do
+ local what=lookuptypes[tag]
+ if what=="pair" then
+ for i=1,#mlookup do
+ local lookup=mlookup[i]
+ local t=lookup[2] if t then lookup[2]=pack_indexed(t) end
+ local t=lookup[3] if t then lookup[3]=pack_indexed(t) end
+ end
+ elseif what~="substitution" then
+ for i=1,#mlookup do
+ mlookup[i]=pack_indexed(mlookup[i])
+ end
+ end
+ end
+ end
+ local kerns=description.kerns
+ if kerns then
+ for tag,kern in next,kerns do
+ kerns[tag]=pack_flat(kern)
+ end
+ end
+ local math=description.math
+ if math then
+ local kerns=math.kerns
+ if kerns then
+ for tag,kern in next,kerns do
+ kerns[tag]=pack_normal(kern)
+ end
+ end
+ end
+ local anchors=description.anchors
+ if anchors then
+ for what,anchor in next,anchors do
+ if what=="baselig" then
+ for _,a in next,anchor do
+ for k=1,#a do
+ a[k]=pack_indexed(a[k])
+ end
+ end
+ else
+ for k,v in next,anchor do
+ anchor[k]=pack_indexed(v)
+ end
+ end
+ end
+ end
+ local altuni=description.altuni
+ if altuni then
+ for i=1,#altuni do
+ altuni[i]=pack_flat(altuni[i])
+ end
+ end
+ end
+ local lookups=data.lookups
+ if lookups then
+ for _,lookup in next,lookups do
+ local rules=lookup.rules
+ if rules then
+ for i=1,#rules do
+ local rule=rules[i]
+ local r=rule.before if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end
+ local r=rule.after if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end
+ local r=rule.current if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end
+ local r=rule.replacements if r then rule.replacements=pack_flat (r) end
+ local r=rule.lookups if r then rule.lookups=pack_indexed(r) end
+ end
+ end
+ end
+ end
+ local anchor_to_lookup=resources.anchor_to_lookup
+ if anchor_to_lookup then
+ for anchor,lookup in next,anchor_to_lookup do
+ anchor_to_lookup[anchor]=pack_normal(lookup)
+ end
+ end
+ local lookup_to_anchor=resources.lookup_to_anchor
+ if lookup_to_anchor then
+ for lookup,anchor in next,lookup_to_anchor do
+ lookup_to_anchor[lookup]=pack_normal(anchor)
+ end
+ end
+ local sequences=resources.sequences
+ if sequences then
+ for feature,sequence in next,sequences do
+ local flags=sequence.flags
+ if flags then
+ sequence.flags=pack_normal(flags)
+ end
+ local subtables=sequence.subtables
+ if subtables then
+ sequence.subtables=pack_normal(subtables)
+ end
+ local features=sequence.features
+ if features then
+ for script,feature in next,features do
+ features[script]=pack_normal(feature)
+ end
+ end
+ local order=sequence.order
+ if order then
+ sequence.order=pack_indexed(order)
+ end
+ local markclass=sequence.markclass
+ if markclass then
+ sequence.markclass=pack_boolean(markclass)
+ end
+ end
+ end
+ local lookups=resources.lookups
+ if lookups then
+ for name,lookup in next,lookups do
+ local flags=lookup.flags
+ if flags then
+ lookup.flags=pack_normal(flags)
+ end
+ local subtables=lookup.subtables
+ if subtables then
+ lookup.subtables=pack_normal(subtables)
+ end
+ end
+ end
+ local features=resources.features
+ if features then
+ for _,what in next,glists do
+ local list=features[what]
+ if list then
+ for feature,spec in next,list do
+ list[feature]=pack_normal(spec)
+ end
+ end
+ end
+ end
+ if not success(1,pass) then
+ return
+ end
+ end
+ if nt>0 then
+ for pass=1,2 do
+ if trace_packing then
+ report_otf("start packing: stage 2, pass %s",pass)
+ end
+ local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass)
+ for unicode,description in next,data.descriptions do
+ local kerns=description.kerns
+ if kerns then
+ description.kerns=pack_normal(kerns)
+ end
+ local math=description.math
+ if math then
+ local kerns=math.kerns
+ if kerns then
+ math.kerns=pack_normal(kerns)
+ end
+ end
+ local anchors=description.anchors
+ if anchors then
+ description.anchors=pack_normal(anchors)
+ end
+ local mlookups=description.mlookups
+ if mlookups then
+ for tag,mlookup in next,mlookups do
+ mlookups[tag]=pack_normal(mlookup)
+ end
+ end
+ local altuni=description.altuni
+ if altuni then
+ description.altuni=pack_normal(altuni)
+ end
+ end
+ local lookups=data.lookups
+ if lookups then
+ for _,lookup in next,lookups do
+ local rules=lookup.rules
+ if rules then
+ for i=1,#rules do
+ local rule=rules[i]
+ local r=rule.before if r then rule.before=pack_normal(r) end
+ local r=rule.after if r then rule.after=pack_normal(r) end
+ local r=rule.current if r then rule.current=pack_normal(r) end
+ end
+ end
+ end
+ end
+ local sequences=resources.sequences
+ if sequences then
+ for feature,sequence in next,sequences do
+ sequence.features=pack_normal(sequence.features)
+ end
+ end
+ if not success(2,pass) then
+ end
+ end
+ for pass=1,2 do
+ local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass)
+ for unicode,description in next,data.descriptions do
+ local slookups=description.slookups
+ if slookups then
+ description.slookups=pack_normal(slookups)
+ end
+ local mlookups=description.mlookups
+ if mlookups then
+ description.mlookups=pack_normal(mlookups)
+ end
+ end
+ end
+ end
+ end
+end
+local unpacked_mt={
+ __index=function(t,k)
+ t[k]=false
+ return k
+ end
+}
+local function unpackdata(data)
+ if data then
+ local tables=data.tables
+ if tables then
+ local resources=data.resources
+ local lookuptypes=resources.lookuptypes
+ local unpacked={}
+ setmetatable(unpacked,unpacked_mt)
+ for unicode,description in next,data.descriptions do
+ local tv=tables[description.boundingbox]
+ if tv then
+ description.boundingbox=tv
+ end
+ local slookups=description.slookups
+ if slookups then
+ local tv=tables[slookups]
+ if tv then
+ description.slookups=tv
+ slookups=unpacked[tv]
+ end
+ if slookups then
+ for tag,lookup in next,slookups do
+ local what=lookuptypes[tag]
+ if what=="pair" then
+ local tv=tables[lookup[2]]
+ if tv then
+ lookup[2]=tv
+ end
+ local tv=tables[lookup[3]]
+ if tv then
+ lookup[3]=tv
+ end
+ elseif what~="substitution" then
+ local tv=tables[lookup]
+ if tv then
+ slookups[tag]=tv
+ end
+ end
+ end
+ end
+ end
+ local mlookups=description.mlookups
+ if mlookups then
+ local tv=tables[mlookups]
+ if tv then
+ description.mlookups=tv
+ mlookups=unpacked[tv]
+ end
+ if mlookups then
+ for tag,list in next,mlookups do
+ local tv=tables[list]
+ if tv then
+ mlookups[tag]=tv
+ list=unpacked[tv]
+ end
+ if list then
+ local what=lookuptypes[tag]
+ if what=="pair" then
+ for i=1,#list do
+ local lookup=list[i]
+ local tv=tables[lookup[2]]
+ if tv then
+ lookup[2]=tv
+ end
+ local tv=tables[lookup[3]]
+ if tv then
+ lookup[3]=tv
+ end
+ end
+ elseif what~="substitution" then
+ for i=1,#list do
+ local tv=tables[list[i]]
+ if tv then
+ list[i]=tv
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ local kerns=description.kerns
+ if kerns then
+ local tm=tables[kerns]
+ if tm then
+ description.kerns=tm
+ kerns=unpacked[tm]
+ end
+ if kerns then
+ for k,kern in next,kerns do
+ local tv=tables[kern]
+ if tv then
+ kerns[k]=tv
+ end
+ end
+ end
+ end
+ local math=description.math
+ if math then
+ local kerns=math.kerns
+ if kerns then
+ local tm=tables[kerns]
+ if tm then
+ math.kerns=tm
+ kerns=unpacked[tm]
+ end
+ if kerns then
+ for k,kern in next,kerns do
+ local tv=tables[kern]
+ if tv then
+ kerns[k]=tv
+ end
+ end
+ end
+ end
+ end
+ local anchors=description.anchors
+ if anchors then
+ local ta=tables[anchors]
+ if ta then
+ description.anchors=ta
+ anchors=unpacked[ta]
+ end
+ if anchors then
+ for tag,anchor in next,anchors do
+ if tag=="baselig" then
+ for _,list in next,anchor do
+ for i=1,#list do
+ local tv=tables[list[i]]
+ if tv then
+ list[i]=tv
+ end
+ end
+ end
+ else
+ for a,data in next,anchor do
+ local tv=tables[data]
+ if tv then
+ anchor[a]=tv
+ end
+ end
+ end
+ end
+ end
+ end
+ local altuni=description.altuni
+ if altuni then
+ local altuni=tables[altuni]
+ if altuni then
+ description.altuni=altuni
+ for i=1,#altuni do
+ local tv=tables[altuni[i]]
+ if tv then
+ altuni[i]=tv
+ end
+ end
+ end
+ end
+ end
+ local lookups=data.lookups
+ if lookups then
+ for _,lookup in next,lookups do
+ local rules=lookup.rules
+ if rules then
+ for i=1,#rules do
+ local rule=rules[i]
+ local before=rule.before
+ if before then
+ local tv=tables[before]
+ if tv then
+ rule.before=tv
+ before=unpacked[tv]
+ end
+ if before then
+ for i=1,#before do
+ local tv=tables[before[i]]
+ if tv then
+ before[i]=tv
+ end
+ end
+ end
+ end
+ local after=rule.after
+ if after then
+ local tv=tables[after]
+ if tv then
+ rule.after=tv
+ after=unpacked[tv]
+ end
+ if after then
+ for i=1,#after do
+ local tv=tables[after[i]]
+ if tv then
+ after[i]=tv
+ end
+ end
+ end
+ end
+ local current=rule.current
+ if current then
+ local tv=tables[current]
+ if tv then
+ rule.current=tv
+ current=unpacked[tv]
+ end
+ if current then
+ for i=1,#current do
+ local tv=tables[current[i]]
+ if tv then
+ current[i]=tv
+ end
+ end
+ end
+ end
+ local replacements=rule.replacements
+ if replacements then
+ local tv=tables[replacements]
+ if tv then
+ rule.replacements=tv
+ end
+ end
+ local fore=rule.fore
+ if fore then
+ local tv=tables[fore]
+ if tv then
+ rule.fore=tv
+ end
+ end
+ local back=rule.back
+ if back then
+ local tv=tables[back]
+ if tv then
+ rule.back=tv
+ end
+ end
+ local names=rule.names
+ if names then
+ local tv=tables[names]
+ if tv then
+ rule.names=tv
+ end
+ end
+ local lookups=rule.lookups
+ if lookups then
+ local tv=tables[lookups]
+ if tv then
+ rule.lookups=tv
+ end
+ end
+ end
+ end
+ end
+ end
+ local anchor_to_lookup=resources.anchor_to_lookup
+ if anchor_to_lookup then
+ for anchor,lookup in next,anchor_to_lookup do
+ local tv=tables[lookup]
+ if tv then
+ anchor_to_lookup[anchor]=tv
+ end
+ end
+ end
+ local lookup_to_anchor=resources.lookup_to_anchor
+ if lookup_to_anchor then
+ for lookup,anchor in next,lookup_to_anchor do
+ local tv=tables[anchor]
+ if tv then
+ lookup_to_anchor[lookup]=tv
+ end
+ end
+ end
+ local ls=resources.sequences
+ if ls then
+ for _,feature in next,ls do
+ local flags=feature.flags
+ if flags then
+ local tv=tables[flags]
+ if tv then
+ feature.flags=tv
+ end
+ end
+ local subtables=feature.subtables
+ if subtables then
+ local tv=tables[subtables]
+ if tv then
+ feature.subtables=tv
+ end
+ end
+ local features=feature.features
+ if features then
+ local tv=tables[features]
+ if tv then
+ feature.features=tv
+ features=unpacked[tv]
+ end
+ if features then
+ for script,data in next,features do
+ local tv=tables[data]
+ if tv then
+ features[script]=tv
+ end
+ end
+ end
+ end
+ local order=feature.order
+ if order then
+ local tv=tables[order]
+ if tv then
+ feature.order=tv
+ end
+ end
+ local markclass=feature.markclass
+ if markclass then
+ local tv=tables[markclass]
+ if tv then
+ feature.markclass=tv
+ end
+ end
+ end
+ end
+ local lookups=resources.lookups
+ if lookups then
+ for _,lookup in next,lookups do
+ local flags=lookup.flags
+ if flags then
+ local tv=tables[flags]
+ if tv then
+ lookup.flags=tv
+ end
+ end
+ local subtables=lookup.subtables
+ if subtables then
+ local tv=tables[subtables]
+ if tv then
+ lookup.subtables=tv
+ end
+ end
+ end
+ end
+ local features=resources.features
+ if features then
+ for _,what in next,glists do
+ local feature=features[what]
+ if feature then
+ for tag,spec in next,feature do
+ local tv=tables[spec]
+ if tv then
+ feature[tag]=tv
+ end
+ end
+ end
+ end
+ end
+ data.tables=nil
+ end
+ end
+end
+if otf.enhancers.register then
+ otf.enhancers.register("pack",packdata)
+ otf.enhancers.register("unpack",unpackdata)
+end
+otf.enhancers.unpack=unpackdata
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-fonts-lua']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+fonts.formats.lua="lua"
+function fonts.readers.lua(specification)
+ local fullname=specification.filename or ""
+ if fullname=="" then
+ local forced=specification.forced or ""
+ if forced~="" then
+ fullname=specification.name.."."..forced
+ else
+ fullname=specification.name
+ end
+ end
+ local fullname=resolvers.findfile(fullname) or ""
+ if fullname~="" then
+ local loader=loadfile(fullname)
+ loader=loader and loader()
+ return loader and loader(specification)
+ end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-def']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local format,gmatch,match,find,lower,gsub=string.format,string.gmatch,string.match,string.find,string.lower,string.gsub
+local tostring,next=tostring,next
+local lpegmatch=lpeg.match
+local suffixonly,removesuffix=file.suffix,file.removesuffix
+local allocate=utilities.storage.allocate
+local trace_defining=false trackers .register("fonts.defining",function(v) trace_defining=v end)
+local directive_embedall=false directives.register("fonts.embedall",function(v) directive_embedall=v end)
+trackers.register("fonts.loading","fonts.defining","otf.loading","afm.loading","tfm.loading")
+trackers.register("fonts.all","fonts.*","otf.*","afm.*","tfm.*")
+local report_defining=logs.reporter("fonts","defining")
+local fonts=fonts
+local fontdata=fonts.hashes.identifiers
+local readers=fonts.readers
+local definers=fonts.definers
+local specifiers=fonts.specifiers
+local constructors=fonts.constructors
+local fontgoodies=fonts.goodies
+readers.sequence=allocate { 'otf','ttf','afm','tfm','lua' }
+local variants=allocate()
+specifiers.variants=variants
+definers.methods=definers.methods or {}
+local internalized=allocate()
+local lastdefined=nil
+local loadedfonts=constructors.loadedfonts
+local designsizes=constructors.designsizes
+local resolvefile=fontgoodies and fontgoodies.filenames and fontgoodies.filenames.resolve or function(s) return s end
+local splitter,splitspecifiers=nil,""
+local P,C,S,Cc=lpeg.P,lpeg.C,lpeg.S,lpeg.Cc
+local left=P("(")
+local right=P(")")
+local colon=P(":")
+local space=P(" ")
+definers.defaultlookup="file"
+local prefixpattern=P(false)
+local function addspecifier(symbol)
+ splitspecifiers=splitspecifiers..symbol
+ local method=S(splitspecifiers)
+ local lookup=C(prefixpattern)*colon
+ local sub=left*C(P(1-left-right-method)^1)*right
+ local specification=C(method)*C(P(1)^1)
+ local name=C((1-sub-specification)^1)
+ splitter=P((lookup+Cc(""))*name*(sub+Cc(""))*(specification+Cc("")))
+end
+local function addlookup(str,default)
+ prefixpattern=prefixpattern+P(str)
+end
+definers.addlookup=addlookup
+addlookup("file")
+addlookup("name")
+addlookup("spec")
+local function getspecification(str)
+ return lpegmatch(splitter,str or "")
+end
+definers.getspecification=getspecification
+function definers.registersplit(symbol,action,verbosename)
+ addspecifier(symbol)
+ variants[symbol]=action
+ if verbosename then
+ variants[verbosename]=action
+ end
+end
+local function makespecification(specification,lookup,name,sub,method,detail,size)
+ size=size or 655360
+ if not lookup or lookup=="" then
+ lookup=definers.defaultlookup
+ end
+ if trace_defining then
+ report_defining("specification %a, lookup %a, name %a, sub %a, method %a, detail %a",
+ specification,lookup,name,sub,method,detail)
+ end
+ local t={
+ lookup=lookup,
+ specification=specification,
+ size=size,
+ name=name,
+ sub=sub,
+ method=method,
+ detail=detail,
+ resolved="",
+ forced="",
+ features={},
+ }
+ return t
+end
+definers.makespecification=makespecification
+function definers.analyze(specification,size)
+ local lookup,name,sub,method,detail=getspecification(specification or "")
+ return makespecification(specification,lookup,name,sub,method,detail,size)
+end
+definers.resolvers=definers.resolvers or {}
+local resolvers=definers.resolvers
+function resolvers.file(specification)
+ local name=resolvefile(specification.name)
+ local suffix=lower(suffixonly(name))
+ if fonts.formats[suffix] then
+ specification.forced=suffix
+ specification.forcedname=name
+ specification.name=removesuffix(name)
+ else
+ specification.name=name
+ end
+end
+function resolvers.name(specification)
+ local resolve=fonts.names.resolve
+ if resolve then
+ local resolved,sub=resolve(specification.name,specification.sub,specification)
+ if resolved then
+ specification.resolved=resolved
+ specification.sub=sub
+ local suffix=lower(suffixonly(resolved))
+ if fonts.formats[suffix] then
+ specification.forced=suffix
+ specification.forcedname=resolved
+ specification.name=removesuffix(resolved)
+ else
+ specification.name=resolved
+ end
+ end
+ else
+ resolvers.file(specification)
+ end
+end
+function resolvers.spec(specification)
+ local resolvespec=fonts.names.resolvespec
+ if resolvespec then
+ local resolved,sub=resolvespec(specification.name,specification.sub,specification)
+ if resolved then
+ specification.resolved=resolved
+ specification.sub=sub
+ specification.forced=lower(suffixonly(resolved))
+ specification.forcedname=resolved
+ specification.name=removesuffix(resolved)
+ end
+ else
+ resolvers.name(specification)
+ end
+end
+function definers.resolve(specification)
+ if not specification.resolved or specification.resolved=="" then
+ local r=resolvers[specification.lookup]
+ if r then
+ r(specification)
+ end
+ end
+ if specification.forced=="" then
+ specification.forced=nil
+ specification.forcedname=nil
+ end
+ specification.hash=lower(specification.name..' @ '..constructors.hashfeatures(specification))
+ if specification.sub and specification.sub~="" then
+ specification.hash=specification.sub..' @ '..specification.hash
+ end
+ return specification
+end
+function definers.applypostprocessors(tfmdata)
+ local postprocessors=tfmdata.postprocessors
+ if postprocessors then
+ local properties=tfmdata.properties
+ for i=1,#postprocessors do
+ local extrahash=postprocessors[i](tfmdata)
+ if type(extrahash)=="string" and extrahash~="" then
+ extrahash=gsub(lower(extrahash),"[^a-z]","-")
+ properties.fullname=format("%s-%s",properties.fullname,extrahash)
+ end
+ end
+ end
+ return tfmdata
+end
+local function checkembedding(tfmdata)
+ local properties=tfmdata.properties
+ local embedding
+ if directive_embedall then
+ embedding="full"
+ elseif properties and properties.filename and constructors.dontembed[properties.filename] then
+ embedding="no"
+ else
+ embedding="subset"
+ end
+ if properties then
+ properties.embedding=embedding
+ else
+ tfmdata.properties={ embedding=embedding }
+ end
+ tfmdata.embedding=embedding
+end
+function definers.loadfont(specification)
+ local hash=constructors.hashinstance(specification)
+ local tfmdata=loadedfonts[hash]
+ if not tfmdata then
+ local forced=specification.forced or ""
+ if forced~="" then
+ local reader=readers[lower(forced)]
+ tfmdata=reader and reader(specification)
+ if not tfmdata then
+ report_defining("forced type %a of %a not found",forced,specification.name)
+ end
+ else
+ local sequence=readers.sequence
+ for s=1,#sequence do
+ local reader=sequence[s]
+ if readers[reader] then
+ if trace_defining then
+ report_defining("trying (reader sequence driven) type %a for %a with file %a",reader,specification.name,specification.filename)
+ end
+ tfmdata=readers[reader](specification)
+ if tfmdata then
+ break
+ else
+ specification.filename=nil
+ end
+ end
+ end
+ end
+ if tfmdata then
+ tfmdata=definers.applypostprocessors(tfmdata)
+ checkembedding(tfmdata)
+ loadedfonts[hash]=tfmdata
+ designsizes[specification.hash]=tfmdata.parameters.designsize
+ end
+ end
+ if not tfmdata then
+ report_defining("font with asked name %a is not found using lookup %a",specification.name,specification.lookup)
+ end
+ return tfmdata
+end
+function constructors.checkvirtualids()
+end
+function constructors.readanddefine(name,size)
+ local specification=definers.analyze(name,size)
+ local method=specification.method
+ if method and variants[method] then
+ specification=variants[method](specification)
+ end
+ specification=definers.resolve(specification)
+ local hash=constructors.hashinstance(specification)
+ local id=definers.registered(hash)
+ if not id then
+ local tfmdata=definers.loadfont(specification)
+ if tfmdata then
+ tfmdata.properties.hash=hash
+ constructors.checkvirtualids(tfmdata)
+ id=font.define(tfmdata)
+ definers.register(tfmdata,id)
+ else
+ id=0
+ end
+ end
+ return fontdata[id],id
+end
+function definers.current()
+ return lastdefined
+end
+function definers.registered(hash)
+ local id=internalized[hash]
+ return id,id and fontdata[id]
+end
+function definers.register(tfmdata,id)
+ if tfmdata and id then
+ local hash=tfmdata.properties.hash
+ if not hash then
+ report_defining("registering font, id %a, name %a, invalid hash",id,tfmdata.properties.filename or "?")
+ elseif not internalized[hash] then
+ internalized[hash]=id
+ if trace_defining then
+ report_defining("registering font, id %s, hash %a",id,hash)
+ end
+ fontdata[id]=tfmdata
+ end
+ end
+end
+function definers.read(specification,size,id)
+ statistics.starttiming(fonts)
+ if type(specification)=="string" then
+ specification=definers.analyze(specification,size)
+ end
+ local method=specification.method
+ if method and variants[method] then
+ specification=variants[method](specification)
+ end
+ specification=definers.resolve(specification)
+ local hash=constructors.hashinstance(specification)
+ local tfmdata=definers.registered(hash)
+ if tfmdata then
+ if trace_defining then
+ report_defining("already hashed: %s",hash)
+ end
+ else
+ tfmdata=definers.loadfont(specification)
+ if tfmdata then
+ if trace_defining then
+ report_defining("loaded and hashed: %s",hash)
+ end
+ tfmdata.properties.hash=hash
+ if id then
+ definers.register(tfmdata,id)
+ end
+ else
+ if trace_defining then
+ report_defining("not loaded and hashed: %s",hash)
+ end
+ end
+ end
+ lastdefined=tfmdata or id
+ if not tfmdata then
+ report_defining("unknown font %a, loading aborted",specification.name)
+ elseif trace_defining and type(tfmdata)=="table" then
+ local properties=tfmdata.properties or {}
+ local parameters=tfmdata.parameters or {}
+ report_defining("using %s font with id %a, name %a, size %a, bytes %a, encoding %a, fullname %a, filename %a",
+ properties.format,id,properties.name,parameters.size,properties.encodingbytes,
+ properties.encodingname,properties.fullname,file.basename(properties.filename))
+ end
+ statistics.stoptiming(fonts)
+ return tfmdata
+end
+function font.getfont(id)
+ return fontdata[id]
+end
+callbacks.register('define_font',definers.read,"definition of fonts (tfmdata preparation)")
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-font-def']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+fonts.constructors.namemode="specification"
+function fonts.definers.getspecification(str)
+ return "",str,"",":",str
+end
+local list={}
+local function issome () list.lookup='name' end
+local function isfile () list.lookup='file' end
+local function isname () list.lookup='name' end
+local function thename(s) list.name=s end
+local function issub (v) list.sub=v end
+local function iscrap (s) list.crap=string.lower(s) end
+local function iskey (k,v) list[k]=v end
+local function istrue (s) list[s]=true end
+local function isfalse(s) list[s]=false end
+local P,S,R,C=lpeg.P,lpeg.S,lpeg.R,lpeg.C
+local spaces=P(" ")^0
+local namespec=(1-S("/:("))^0
+local crapspec=spaces*P("/")*(((1-P(":"))^0)/iscrap)*spaces
+local filename_1=P("file:")/isfile*(namespec/thename)
+local filename_2=P("[")*P(true)/isname*(((1-P("]"))^0)/thename)*P("]")
+local fontname_1=P("name:")/isname*(namespec/thename)
+local fontname_2=P(true)/issome*(namespec/thename)
+local sometext=(R("az","AZ","09")+S("+-."))^1
+local truevalue=P("+")*spaces*(sometext/istrue)
+local falsevalue=P("-")*spaces*(sometext/isfalse)
+local keyvalue=(C(sometext)*spaces*P("=")*spaces*C(sometext))/iskey
+local somevalue=sometext/istrue
+local subvalue=P("(")*(C(P(1-S("()"))^1)/issub)*P(")")
+local option=spaces*(keyvalue+falsevalue+truevalue+somevalue)*spaces
+local options=P(":")*spaces*(P(";")^0*option)^0
+local pattern=(filename_1+filename_2+fontname_1+fontname_2)*subvalue^0*crapspec^0*options^0
+local function colonized(specification)
+ list={}
+ lpeg.match(pattern,specification.specification)
+ list.crap=nil
+ if list.name then
+ specification.name=list.name
+ list.name=nil
+ end
+ if list.lookup then
+ specification.lookup=list.lookup
+ list.lookup=nil
+ end
+ if list.sub then
+ specification.sub=list.sub
+ list.sub=nil
+ end
+ specification.features.normal=fonts.handlers.otf.features.normalize(list)
+ return specification
+end
+fonts.definers.registersplit(":",colonized,"cryptic")
+fonts.definers.registersplit("",colonized,"more cryptic")
+function fonts.definers.applypostprocessors(tfmdata)
+ local postprocessors=tfmdata.postprocessors
+ if postprocessors then
+ for i=1,#postprocessors do
+ local extrahash=postprocessors[i](tfmdata)
+ if type(extrahash)=="string" and extrahash~="" then
+ extrahash=string.gsub(lower(extrahash),"[^a-z]","-")
+ tfmdata.properties.fullname=format("%s-%s",tfmdata.properties.fullname,extrahash)
+ end
+ end
+ end
+ return tfmdata
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-fonts-ext']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+local otffeatures=fonts.constructors.newfeatures("otf")
+local function initializeitlc(tfmdata,value)
+ if value then
+ local parameters=tfmdata.parameters
+ local italicangle=parameters.italicangle
+ if italicangle and italicangle~=0 then
+ local properties=tfmdata.properties
+ local factor=tonumber(value) or 1
+ properties.hasitalics=true
+ properties.autoitalicamount=factor*(parameters.uwidth or 40)/2
+ end
+ end
+end
+otffeatures.register {
+ name="itlc",
+ description="italic correction",
+ initializers={
+ base=initializeitlc,
+ node=initializeitlc,
+ }
+}
+local function initializeslant(tfmdata,value)
+ value=tonumber(value)
+ if not value then
+ value=0
+ elseif value>1 then
+ value=1
+ elseif value<-1 then
+ value=-1
+ end
+ tfmdata.parameters.slantfactor=value
+end
+otffeatures.register {
+ name="slant",
+ description="slant glyphs",
+ initializers={
+ base=initializeslant,
+ node=initializeslant,
+ }
+}
+local function initializeextend(tfmdata,value)
+ value=tonumber(value)
+ if not value then
+ value=0
+ elseif value>10 then
+ value=10
+ elseif value<-10 then
+ value=-10
+ end
+ tfmdata.parameters.extendfactor=value
+end
+otffeatures.register {
+ name="extend",
+ description="scale glyphs horizontally",
+ initializers={
+ base=initializeextend,
+ node=initializeextend,
+ }
+}
+fonts.protrusions=fonts.protrusions or {}
+fonts.protrusions.setups=fonts.protrusions.setups or {}
+local setups=fonts.protrusions.setups
+local function initializeprotrusion(tfmdata,value)
+ if value then
+ local setup=setups[value]
+ if setup then
+ local factor,left,right=setup.factor or 1,setup.left or 1,setup.right or 1
+ local emwidth=tfmdata.parameters.quad
+ tfmdata.parameters.protrusion={
+ auto=true,
+ }
+ for i,chr in next,tfmdata.characters do
+ local v,pl,pr=setup[i],nil,nil
+ if v then
+ pl,pr=v[1],v[2]
+ end
+ if pl and pl~=0 then chr.left_protruding=left*pl*factor end
+ if pr and pr~=0 then chr.right_protruding=right*pr*factor end
+ end
+ end
+ end
+end
+otffeatures.register {
+ name="protrusion",
+ description="shift characters into the left and or right margin",
+ initializers={
+ base=initializeprotrusion,
+ node=initializeprotrusion,
+ }
+}
+fonts.expansions=fonts.expansions or {}
+fonts.expansions.setups=fonts.expansions.setups or {}
+local setups=fonts.expansions.setups
+local function initializeexpansion(tfmdata,value)
+ if value then
+ local setup=setups[value]
+ if setup then
+ local factor=setup.factor or 1
+ tfmdata.parameters.expansion={
+ stretch=10*(setup.stretch or 0),
+ shrink=10*(setup.shrink or 0),
+ step=10*(setup.step or 0),
+ auto=true,
+ }
+ for i,chr in next,tfmdata.characters do
+ local v=setup[i]
+ if v and v~=0 then
+ chr.expansion_factor=v*factor
+ else
+ chr.expansion_factor=factor
+ end
+ end
+ end
+ end
+end
+otffeatures.register {
+ name="expansion",
+ description="apply hz optimization",
+ initializers={
+ base=initializeexpansion,
+ node=initializeexpansion,
+ }
+}
+function fonts.loggers.onetimemessage() end
+local byte=string.byte
+fonts.expansions.setups['default']={
+ stretch=2,shrink=2,step=.5,factor=1,
+ [byte('A')]=0.5,[byte('B')]=0.7,[byte('C')]=0.7,[byte('D')]=0.5,[byte('E')]=0.7,
+ [byte('F')]=0.7,[byte('G')]=0.5,[byte('H')]=0.7,[byte('K')]=0.7,[byte('M')]=0.7,
+ [byte('N')]=0.7,[byte('O')]=0.5,[byte('P')]=0.7,[byte('Q')]=0.5,[byte('R')]=0.7,
+ [byte('S')]=0.7,[byte('U')]=0.7,[byte('W')]=0.7,[byte('Z')]=0.7,
+ [byte('a')]=0.7,[byte('b')]=0.7,[byte('c')]=0.7,[byte('d')]=0.7,[byte('e')]=0.7,
+ [byte('g')]=0.7,[byte('h')]=0.7,[byte('k')]=0.7,[byte('m')]=0.7,[byte('n')]=0.7,
+ [byte('o')]=0.7,[byte('p')]=0.7,[byte('q')]=0.7,[byte('s')]=0.7,[byte('u')]=0.7,
+ [byte('w')]=0.7,[byte('z')]=0.7,
+ [byte('2')]=0.7,[byte('3')]=0.7,[byte('6')]=0.7,[byte('8')]=0.7,[byte('9')]=0.7,
+}
+fonts.protrusions.setups['default']={
+ factor=1,left=1,right=1,
+ [0x002C]={ 0,1 },
+ [0x002E]={ 0,1 },
+ [0x003A]={ 0,1 },
+ [0x003B]={ 0,1 },
+ [0x002D]={ 0,1 },
+ [0x2013]={ 0,0.50 },
+ [0x2014]={ 0,0.33 },
+ [0x3001]={ 0,1 },
+ [0x3002]={ 0,1 },
+ [0x060C]={ 0,1 },
+ [0x061B]={ 0,1 },
+ [0x06D4]={ 0,1 },
+}
+fonts.handlers.otf.features.normalize=function(t)
+ if t.rand then
+ t.rand="random"
+ end
+ return t
+end
+function fonts.helpers.nametoslot(name)
+ local t=type(name)
+ if t=="string" then
+ local tfmdata=fonts.hashes.identifiers[currentfont()]
+ local shared=tfmdata and tfmdata.shared
+ local fntdata=shared and shared.rawdata
+ return fntdata and fntdata.resources.unicodes[name]
+ elseif t=="number" then
+ return n
+ end
+end
+fonts.encodings=fonts.encodings or {}
+local reencodings={}
+fonts.encodings.reencodings=reencodings
+local function specialreencode(tfmdata,value)
+ local encoding=value and reencodings[value]
+ if encoding then
+ local temp={}
+ local char=tfmdata.characters
+ for k,v in next,encoding do
+ temp[k]=char[v]
+ end
+ for k,v in next,temp do
+ char[k]=temp[k]
+ end
+ return string.format("reencoded:%s",value)
+ end
+end
+local function reencode(tfmdata,value)
+ tfmdata.postprocessors=tfmdata.postprocessors or {}
+ table.insert(tfmdata.postprocessors,
+ function(tfmdata)
+ return specialreencode(tfmdata,value)
+ end
+ )
+end
+otffeatures.register {
+ name="reencode",
+ description="reencode characters",
+ manipulators={
+ base=reencode,
+ node=reencode,
+ }
+}
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-fonts-cbk']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+local nodes=nodes
+local traverse_id=node.traverse_id
+local glyph_code=nodes.nodecodes.glyph
+function nodes.handlers.characters(head)
+ local fontdata=fonts.hashes.identifiers
+ if fontdata then
+ local usedfonts,done,prevfont={},false,nil
+ for n in traverse_id(glyph_code,head) do
+ local font=n.font
+ if font~=prevfont then
+ prevfont=font
+ local used=usedfonts[font]
+ if not used then
+ local tfmdata=fontdata[font]
+ if tfmdata then
+ local shared=tfmdata.shared
+ if shared then
+ local processors=shared.processes
+ if processors and #processors>0 then
+ usedfonts[font]=processors
+ done=true
+ end
+ end
+ end
+ end
+ end
+ end
+ if done then
+ for font,processors in next,usedfonts do
+ for i=1,#processors do
+ local h,d=processors[i](head,font,0)
+ head,done=h or head,done or d
+ end
+ end
+ end
+ return head,true
+ else
+ return head,false
+ end
+end
+function nodes.simple_font_handler(head)
+ head=nodes.handlers.characters(head)
+ nodes.injections.handler(head)
+ nodes.handlers.protectglyphs(head)
+ head=node.ligaturing(head)
+ head=node.kerning(head)
+ return head
+end
+
+end -- closure
diff --git a/src/luaotfload-fonts-cbk.lua b/src/luaotfload-fonts-cbk.lua
new file mode 100644
index 0000000..9db94f6
--- /dev/null
+++ b/src/luaotfload-fonts-cbk.lua
@@ -0,0 +1,68 @@
+if not modules then modules = { } end modules ['luatex-fonts-cbk'] = {
+ version = 1.001,
+ comment = "companion to luatex-*.tex",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+
+local fonts = fonts
+local nodes = nodes
+
+-- Fonts: (might move to node-gef.lua)
+
+local traverse_id = node.traverse_id
+local glyph_code = nodes.nodecodes.glyph
+
+function nodes.handlers.characters(head)
+ local fontdata = fonts.hashes.identifiers
+ if fontdata then
+ local usedfonts, done, prevfont = { }, false, nil
+ for n in traverse_id(glyph_code,head) do
+ local font = n.font
+ if font ~= prevfont then
+ prevfont = font
+ local used = usedfonts[font]
+ if not used then
+ local tfmdata = fontdata[font] --
+ if tfmdata then
+ local shared = tfmdata.shared -- we need to check shared, only when same features
+ if shared then
+ local processors = shared.processes
+ if processors and #processors > 0 then
+ usedfonts[font] = processors
+ done = true
+ end
+ end
+ end
+ end
+ end
+ end
+ if done then
+ for font, processors in next, usedfonts do
+ for i=1,#processors do
+ local h, d = processors[i](head,font,0)
+ head, done = h or head, done or d
+ end
+ end
+ end
+ return head, true
+ else
+ return head, false
+ end
+end
+
+function nodes.simple_font_handler(head)
+-- lang.hyphenate(head)
+ head = nodes.handlers.characters(head)
+ nodes.injections.handler(head)
+ nodes.handlers.protectglyphs(head)
+ head = node.ligaturing(head)
+ head = node.kerning(head)
+ return head
+end
diff --git a/src/luaotfload-fonts-def.lua b/src/luaotfload-fonts-def.lua
new file mode 100644
index 0000000..0c2f0db
--- /dev/null
+++ b/src/luaotfload-fonts-def.lua
@@ -0,0 +1,97 @@
+if not modules then modules = { } end modules ['luatex-font-def'] = {
+ version = 1.001,
+ comment = "companion to luatex-*.tex",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+
+local fonts = fonts
+
+-- A bit of tuning for definitions.
+
+fonts.constructors.namemode = "specification" -- somehow latex needs this (changed name!) => will change into an overload
+
+-- tricky: we sort of bypass the parser and directly feed all into
+-- the sub parser
+
+function fonts.definers.getspecification(str)
+ return "", str, "", ":", str
+end
+
+-- the generic name parser (different from context!)
+
+local list = { }
+
+local function issome () list.lookup = 'name' end -- xetex mode prefers name (not in context!)
+local function isfile () list.lookup = 'file' end
+local function isname () list.lookup = 'name' end
+local function thename(s) list.name = s end
+local function issub (v) list.sub = v end
+local function iscrap (s) list.crap = string.lower(s) end
+local function iskey (k,v) list[k] = v end
+local function istrue (s) list[s] = true end
+local function isfalse(s) list[s] = false end
+
+local P, S, R, C = lpeg.P, lpeg.S, lpeg.R, lpeg.C
+
+local spaces = P(" ")^0
+local namespec = (1-S("/:("))^0 -- was: (1-S("/: ("))^0
+local crapspec = spaces * P("/") * (((1-P(":"))^0)/iscrap) * spaces
+local filename_1 = P("file:")/isfile * (namespec/thename)
+local filename_2 = P("[") * P(true)/isname * (((1-P("]"))^0)/thename) * P("]")
+local fontname_1 = P("name:")/isname * (namespec/thename)
+local fontname_2 = P(true)/issome * (namespec/thename)
+local sometext = (R("az","AZ","09") + S("+-."))^1
+local truevalue = P("+") * spaces * (sometext/istrue)
+local falsevalue = P("-") * spaces * (sometext/isfalse)
+local keyvalue = (C(sometext) * spaces * P("=") * spaces * C(sometext))/iskey
+local somevalue = sometext/istrue
+local subvalue = P("(") * (C(P(1-S("()"))^1)/issub) * P(")") -- for Kim
+local option = spaces * (keyvalue + falsevalue + truevalue + somevalue) * spaces
+local options = P(":") * spaces * (P(";")^0 * option)^0
+
+local pattern = (filename_1 + filename_2 + fontname_1 + fontname_2) * subvalue^0 * crapspec^0 * options^0
+
+local function colonized(specification) -- xetex mode
+ list = { }
+ lpeg.match(pattern,specification.specification)
+ list.crap = nil -- style not supported, maybe some day
+ if list.name then
+ specification.name = list.name
+ list.name = nil
+ end
+ if list.lookup then
+ specification.lookup = list.lookup
+ list.lookup = nil
+ end
+ if list.sub then
+ specification.sub = list.sub
+ list.sub = nil
+ end
+ specification.features.normal = fonts.handlers.otf.features.normalize(list)
+ return specification
+end
+
+fonts.definers.registersplit(":",colonized,"cryptic")
+fonts.definers.registersplit("", colonized,"more cryptic") -- catches \font\text=[names]
+
+function fonts.definers.applypostprocessors(tfmdata)
+ local postprocessors = tfmdata.postprocessors
+ if postprocessors then
+ for i=1,#postprocessors do
+ local extrahash = postprocessors[i](tfmdata) -- after scaling etc
+ if type(extrahash) == "string" and extrahash ~= "" then
+ -- e.g. a reencoding needs this
+ extrahash = string.gsub(lower(extrahash),"[^a-z]","-")
+ tfmdata.properties.fullname = format("%s-%s",tfmdata.properties.fullname,extrahash)
+ end
+ end
+ end
+ return tfmdata
+end
diff --git a/src/luaotfload-fonts-enc.lua b/src/luaotfload-fonts-enc.lua
new file mode 100644
index 0000000..e20c3a0
--- /dev/null
+++ b/src/luaotfload-fonts-enc.lua
@@ -0,0 +1,28 @@
+if not modules then modules = { } end modules ['luatex-font-enc'] = {
+ version = 1.001,
+ comment = "companion to luatex-*.tex",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+
+local fonts = fonts
+fonts.encodings = { }
+fonts.encodings.agl = { }
+
+setmetatable(fonts.encodings.agl, { __index = function(t,k)
+ if k == "unicodes" then
+ texio.write(" <loading (extended) adobe glyph list>")
+ local unicodes = dofile(resolvers.findfile("font-age.lua"))
+ fonts.encodings.agl = { unicodes = unicodes }
+ return unicodes
+ else
+ return nil
+ end
+end })
+
diff --git a/src/luaotfload-fonts-ext.lua b/src/luaotfload-fonts-ext.lua
new file mode 100644
index 0000000..b60d045
--- /dev/null
+++ b/src/luaotfload-fonts-ext.lua
@@ -0,0 +1,272 @@
+if not modules then modules = { } end modules ['luatex-fonts-ext'] = {
+ version = 1.001,
+ comment = "companion to luatex-*.tex",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+
+local fonts = fonts
+local otffeatures = fonts.constructors.newfeatures("otf")
+
+-- A few generic extensions.
+
+local function initializeitlc(tfmdata,value)
+ if value then
+ -- the magic 40 and it formula come from Dohyun Kim but we might need another guess
+ local parameters = tfmdata.parameters
+ local italicangle = parameters.italicangle
+ if italicangle and italicangle ~= 0 then
+ local properties = tfmdata.properties
+ local factor = tonumber(value) or 1
+ properties.hasitalics = true
+ properties.autoitalicamount = factor * (parameters.uwidth or 40)/2
+ end
+ end
+end
+
+otffeatures.register {
+ name = "itlc",
+ description = "italic correction",
+ initializers = {
+ base = initializeitlc,
+ node = initializeitlc,
+ }
+}
+
+-- slant and extend
+
+local function initializeslant(tfmdata,value)
+ value = tonumber(value)
+ if not value then
+ value = 0
+ elseif value > 1 then
+ value = 1
+ elseif value < -1 then
+ value = -1
+ end
+ tfmdata.parameters.slantfactor = value
+end
+
+otffeatures.register {
+ name = "slant",
+ description = "slant glyphs",
+ initializers = {
+ base = initializeslant,
+ node = initializeslant,
+ }
+}
+
+local function initializeextend(tfmdata,value)
+ value = tonumber(value)
+ if not value then
+ value = 0
+ elseif value > 10 then
+ value = 10
+ elseif value < -10 then
+ value = -10
+ end
+ tfmdata.parameters.extendfactor = value
+end
+
+otffeatures.register {
+ name = "extend",
+ description = "scale glyphs horizontally",
+ initializers = {
+ base = initializeextend,
+ node = initializeextend,
+ }
+}
+
+-- expansion and protrusion
+
+fonts.protrusions = fonts.protrusions or { }
+fonts.protrusions.setups = fonts.protrusions.setups or { }
+
+local setups = fonts.protrusions.setups
+
+local function initializeprotrusion(tfmdata,value)
+ if value then
+ local setup = setups[value]
+ if setup then
+ local factor, left, right = setup.factor or 1, setup.left or 1, setup.right or 1
+ local emwidth = tfmdata.parameters.quad
+ tfmdata.parameters.protrusion = {
+ auto = true,
+ }
+ for i, chr in next, tfmdata.characters do
+ local v, pl, pr = setup[i], nil, nil
+ if v then
+ pl, pr = v[1], v[2]
+ end
+ if pl and pl ~= 0 then chr.left_protruding = left *pl*factor end
+ if pr and pr ~= 0 then chr.right_protruding = right*pr*factor end
+ end
+ end
+ end
+end
+
+otffeatures.register {
+ name = "protrusion",
+ description = "shift characters into the left and or right margin",
+ initializers = {
+ base = initializeprotrusion,
+ node = initializeprotrusion,
+ }
+}
+
+fonts.expansions = fonts.expansions or { }
+fonts.expansions.setups = fonts.expansions.setups or { }
+
+local setups = fonts.expansions.setups
+
+local function initializeexpansion(tfmdata,value)
+ if value then
+ local setup = setups[value]
+ if setup then
+ local factor = setup.factor or 1
+ tfmdata.parameters.expansion = {
+ stretch = 10 * (setup.stretch or 0),
+ shrink = 10 * (setup.shrink or 0),
+ step = 10 * (setup.step or 0),
+ auto = true,
+ }
+ for i, chr in next, tfmdata.characters do
+ local v = setup[i]
+ if v and v ~= 0 then
+ chr.expansion_factor = v*factor
+ else -- can be option
+ chr.expansion_factor = factor
+ end
+ end
+ end
+ end
+end
+
+otffeatures.register {
+ name = "expansion",
+ description = "apply hz optimization",
+ initializers = {
+ base = initializeexpansion,
+ node = initializeexpansion,
+ }
+}
+
+-- left over
+
+function fonts.loggers.onetimemessage() end
+
+-- example vectors
+
+local byte = string.byte
+
+fonts.expansions.setups['default'] = {
+
+ stretch = 2, shrink = 2, step = .5, factor = 1,
+
+ [byte('A')] = 0.5, [byte('B')] = 0.7, [byte('C')] = 0.7, [byte('D')] = 0.5, [byte('E')] = 0.7,
+ [byte('F')] = 0.7, [byte('G')] = 0.5, [byte('H')] = 0.7, [byte('K')] = 0.7, [byte('M')] = 0.7,
+ [byte('N')] = 0.7, [byte('O')] = 0.5, [byte('P')] = 0.7, [byte('Q')] = 0.5, [byte('R')] = 0.7,
+ [byte('S')] = 0.7, [byte('U')] = 0.7, [byte('W')] = 0.7, [byte('Z')] = 0.7,
+ [byte('a')] = 0.7, [byte('b')] = 0.7, [byte('c')] = 0.7, [byte('d')] = 0.7, [byte('e')] = 0.7,
+ [byte('g')] = 0.7, [byte('h')] = 0.7, [byte('k')] = 0.7, [byte('m')] = 0.7, [byte('n')] = 0.7,
+ [byte('o')] = 0.7, [byte('p')] = 0.7, [byte('q')] = 0.7, [byte('s')] = 0.7, [byte('u')] = 0.7,
+ [byte('w')] = 0.7, [byte('z')] = 0.7,
+ [byte('2')] = 0.7, [byte('3')] = 0.7, [byte('6')] = 0.7, [byte('8')] = 0.7, [byte('9')] = 0.7,
+}
+
+fonts.protrusions.setups['default'] = {
+
+ factor = 1, left = 1, right = 1,
+
+ [0x002C] = { 0, 1 }, -- comma
+ [0x002E] = { 0, 1 }, -- period
+ [0x003A] = { 0, 1 }, -- colon
+ [0x003B] = { 0, 1 }, -- semicolon
+ [0x002D] = { 0, 1 }, -- hyphen
+ [0x2013] = { 0, 0.50 }, -- endash
+ [0x2014] = { 0, 0.33 }, -- emdash
+ [0x3001] = { 0, 1 }, -- ideographic comma 、
+ [0x3002] = { 0, 1 }, -- ideographic full stop 。
+ [0x060C] = { 0, 1 }, -- arabic comma ،
+ [0x061B] = { 0, 1 }, -- arabic semicolon ؛
+ [0x06D4] = { 0, 1 }, -- arabic full stop ۔
+
+}
+
+-- normalizer
+
+fonts.handlers.otf.features.normalize = function(t)
+ if t.rand then
+ t.rand = "random"
+ end
+ return t
+end
+
+-- bonus
+
+function fonts.helpers.nametoslot(name)
+ local t = type(name)
+ if t == "string" then
+ local tfmdata = fonts.hashes.identifiers[currentfont()]
+ local shared = tfmdata and tfmdata.shared
+ local fntdata = shared and shared.rawdata
+ return fntdata and fntdata.resources.unicodes[name]
+ elseif t == "number" then
+ return n
+ end
+end
+
+-- \font\test=file:somefont:reencode=mymessup
+--
+-- fonts.encodings.reencodings.mymessup = {
+-- [109] = 110, -- m
+-- [110] = 109, -- n
+-- }
+
+fonts.encodings = fonts.encodings or { }
+local reencodings = { }
+fonts.encodings.reencodings = reencodings
+
+local function specialreencode(tfmdata,value)
+ -- we forget about kerns as we assume symbols and we
+ -- could issue a message if ther are kerns but it's
+ -- a hack anyway so we odn't care too much here
+ local encoding = value and reencodings[value]
+ if encoding then
+ local temp = { }
+ local char = tfmdata.characters
+ for k, v in next, encoding do
+ temp[k] = char[v]
+ end
+ for k, v in next, temp do
+ char[k] = temp[k]
+ end
+ -- if we use the font otherwise luatex gets confused so
+ -- we return an additional hash component for fullname
+ return string.format("reencoded:%s",value)
+ end
+end
+
+local function reencode(tfmdata,value)
+ tfmdata.postprocessors = tfmdata.postprocessors or { }
+ table.insert(tfmdata.postprocessors,
+ function(tfmdata)
+ return specialreencode(tfmdata,value)
+ end
+ )
+end
+
+otffeatures.register {
+ name = "reencode",
+ description = "reencode characters",
+ manipulators = {
+ base = reencode,
+ node = reencode,
+ }
+}
diff --git a/src/luaotfload-fonts-inj.lua b/src/luaotfload-fonts-inj.lua
new file mode 100644
index 0000000..ae48150
--- /dev/null
+++ b/src/luaotfload-fonts-inj.lua
@@ -0,0 +1,526 @@
+if not modules then modules = { } end modules ['node-inj'] = {
+ version = 1.001,
+ comment = "companion to node-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+-- This is very experimental (this will change when we have luatex > .50 and
+-- a few pending thingies are available. Also, Idris needs to make a few more
+-- test fonts. Btw, future versions of luatex will have extended glyph properties
+-- that can be of help. Some optimizations can go away when we have faster machines.
+
+-- todo: make a special one for context
+
+local next = next
+local utfchar = utf.char
+
+local trace_injections = false trackers.register("nodes.injections", function(v) trace_injections = v end)
+
+local report_injections = logs.reporter("nodes","injections")
+
+local attributes, nodes, node = attributes, nodes, node
+
+fonts = fonts
+local fontdata = fonts.hashes.identifiers
+
+nodes.injections = nodes.injections or { }
+local injections = nodes.injections
+
+local nodecodes = nodes.nodecodes
+local glyph_code = nodecodes.glyph
+local kern_code = nodecodes.kern
+local nodepool = nodes.pool
+local newkern = nodepool.kern
+
+local traverse_id = node.traverse_id
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+
+local a_kernpair = attributes.private('kernpair')
+local a_ligacomp = attributes.private('ligacomp')
+local a_markbase = attributes.private('markbase')
+local a_markmark = attributes.private('markmark')
+local a_markdone = attributes.private('markdone')
+local a_cursbase = attributes.private('cursbase')
+local a_curscurs = attributes.private('curscurs')
+local a_cursdone = attributes.private('cursdone')
+
+-- This injector has been tested by Idris Samawi Hamid (several arabic fonts as well as
+-- the rather demanding Husayni font), Khaled Hosny (latin and arabic) and Kaj Eigner
+-- (arabic, hebrew and thai) and myself (whatever font I come across). I'm pretty sure
+-- that this code is not 100% okay but examples are needed to figure things out.
+
+function injections.installnewkern(nk)
+ newkern = nk or newkern
+end
+
+local cursives = { }
+local marks = { }
+local kerns = { }
+
+-- Currently we do gpos/kern in a bit inofficial way but when we have the extra fields in
+-- glyphnodes to manipulate ht/dp/wd explicitly I will provide an alternative; also, we
+-- can share tables.
+
+-- For the moment we pass the r2l key ... volt/arabtype tests .. idris: this needs
+-- checking with husayni (volt and fontforge).
+
+function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext)
+ local dx, dy = factor*(exit[1]-entry[1]), factor*(exit[2]-entry[2])
+ local ws, wn = tfmstart.width, tfmnext.width
+ local bound = #cursives + 1
+ start[a_cursbase] = bound
+ nxt[a_curscurs] = bound
+ cursives[bound] = { rlmode, dx, dy, ws, wn }
+ return dx, dy, bound
+end
+
+function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr)
+ local x, y, w, h = factor*spec[1], factor*spec[2], factor*spec[3], factor*spec[4]
+ -- dy = y - h
+ if x ~= 0 or w ~= 0 or y ~= 0 or h ~= 0 then
+ local bound = current[a_kernpair]
+ if bound then
+ local kb = kerns[bound]
+ -- inefficient but singles have less, but weird anyway, needs checking
+ kb[2], kb[3], kb[4], kb[5] = (kb[2] or 0) + x, (kb[3] or 0) + y, (kb[4] or 0)+ w, (kb[5] or 0) + h
+ else
+ bound = #kerns + 1
+ current[a_kernpair] = bound
+ kerns[bound] = { rlmode, x, y, w, h, r2lflag, tfmchr.width }
+ end
+ return x, y, w, h, bound
+ end
+ return x, y, w, h -- no bound
+end
+
+function injections.setkern(current,factor,rlmode,x,tfmchr)
+ local dx = factor*x
+ if dx ~= 0 then
+ local bound = #kerns + 1
+ current[a_kernpair] = bound
+ kerns[bound] = { rlmode, dx }
+ return dx, bound
+ else
+ return 0, 0
+ end
+end
+
+function injections.setmark(start,base,factor,rlmode,ba,ma,index,baseismark) -- ba=baseanchor, ma=markanchor
+ local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2]) -- the index argument is no longer used but when this
+ local bound = base[a_markbase] -- fails again we should pass it
+ local index = 1
+ if bound then
+ local mb = marks[bound]
+ if mb then
+ -- if not index then index = #mb + 1 end
+ index = #mb + 1
+ mb[index] = { dx, dy, rlmode }
+ start[a_markmark] = bound
+ start[a_markdone] = index
+ return dx, dy, bound
+ else
+ report_injections("possible problem, %U is base mark without data (id %a)",base.char,bound)
+ end
+ end
+-- index = index or 1
+ index = index or 1
+ bound = #marks + 1
+ base[a_markbase] = bound
+ start[a_markmark] = bound
+ start[a_markdone] = index
+ marks[bound] = { [index] = { dx, dy, rlmode, baseismark } }
+ return dx, dy, bound
+end
+
+local function dir(n)
+ return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset"
+end
+
+local function trace(head)
+ report_injections("begin run")
+ for n in traverse_id(glyph_code,head) do
+ if n.subtype < 256 then
+ local kp = n[a_kernpair]
+ local mb = n[a_markbase]
+ local mm = n[a_markmark]
+ local md = n[a_markdone]
+ local cb = n[a_cursbase]
+ local cc = n[a_curscurs]
+ local char = n.char
+ report_injections("font %s, char %U, glyph %c",n.font,char,char)
+ if kp then
+ local k = kerns[kp]
+ if k[3] then
+ report_injections(" pairkern: dir %a, x %p, y %p, w %p, h %p",dir(k[1]),k[2],k[3],k[4],k[5])
+ else
+ report_injections(" kern: dir %a, dx %p",dir(k[1]),k[2])
+ end
+ end
+ if mb then
+ report_injections(" markbase: bound %a",mb)
+ end
+ if mm then
+ local m = marks[mm]
+ if mb then
+ local m = m[mb]
+ if m then
+ report_injections(" markmark: bound %a, index %a, dx %p, dy %p",mm,md,m[1],m[2])
+ else
+ report_injections(" markmark: bound %a, missing index",mm)
+ end
+ else
+ m = m[1]
+ report_injections(" markmark: bound %a, dx %p, dy %p",mm,m and m[1],m and m[2])
+ end
+ end
+ if cb then
+ report_injections(" cursbase: bound %a",cb)
+ end
+ if cc then
+ local c = cursives[cc]
+ report_injections(" curscurs: bound %a, dir %a, dx %p, dy %p",cc,dir(c[1]),c[2],c[3])
+ end
+ end
+ end
+ report_injections("end run")
+end
+
+-- todo: reuse tables (i.e. no collection), but will be extra fields anyway
+-- todo: check for attribute
+
+-- We can have a fast test on a font being processed, so we can check faster for marks etc
+-- but I'll make a context variant anyway.
+
+local function show_result(head)
+ local current = head
+ local skipping = false
+ while current do
+ local id = current.id
+ if id == glyph_code then
+ report_injections("char: %C, width %p, xoffset %p, yoffset %p",current.char,current.width,current.xoffset,current.yoffset)
+ skipping = false
+ elseif id == kern_code then
+ report_injections("kern: %p",current.kern)
+ skipping = false
+ elseif not skipping then
+ report_injections()
+ skipping = true
+ end
+ current = current.next
+ end
+end
+
+function injections.handler(head,where,keep)
+ local has_marks, has_cursives, has_kerns = next(marks), next(cursives), next(kerns)
+ if has_marks or has_cursives then
+ if trace_injections then
+ trace(head)
+ end
+ -- in the future variant we will not copy items but refs to tables
+ local done, ky, rl, valid, cx, wx, mk, nofvalid = false, { }, { }, { }, { }, { }, { }, 0
+ if has_kerns then -- move outside loop
+ local nf, tm = nil, nil
+ for n in traverse_id(glyph_code,head) do -- only needed for relevant fonts
+ if n.subtype < 256 then
+ nofvalid = nofvalid + 1
+ valid[nofvalid] = n
+ if n.font ~= nf then
+ nf = n.font
+ tm = fontdata[nf].resources.marks
+ end
+ if tm then
+ mk[n] = tm[n.char]
+ end
+ local k = n[a_kernpair]
+ if k then
+ local kk = kerns[k]
+ if kk then
+ local x, y, w, h = kk[2] or 0, kk[3] or 0, kk[4] or 0, kk[5] or 0
+ local dy = y - h
+ if dy ~= 0 then
+ ky[n] = dy
+ end
+ if w ~= 0 or x ~= 0 then
+ wx[n] = kk
+ end
+ rl[n] = kk[1] -- could move in test
+ end
+ end
+ end
+ end
+ else
+ local nf, tm = nil, nil
+ for n in traverse_id(glyph_code,head) do
+ if n.subtype < 256 then
+ nofvalid = nofvalid + 1
+ valid[nofvalid] = n
+ if n.font ~= nf then
+ nf = n.font
+ tm = fontdata[nf].resources.marks
+ end
+ if tm then
+ mk[n] = tm[n.char]
+ end
+ end
+ end
+ end
+ if nofvalid > 0 then
+ -- we can assume done == true because we have cursives and marks
+ local cx = { }
+ if has_kerns and next(ky) then
+ for n, k in next, ky do
+ n.yoffset = k
+ end
+ end
+ -- todo: reuse t and use maxt
+ if has_cursives then
+ local p_cursbase, p = nil, nil
+ -- since we need valid[n+1] we can also use a "while true do"
+ local t, d, maxt = { }, { }, 0
+ for i=1,nofvalid do -- valid == glyphs
+ local n = valid[i]
+ if not mk[n] then
+ local n_cursbase = n[a_cursbase]
+ if p_cursbase then
+ local n_curscurs = n[a_curscurs]
+ if p_cursbase == n_curscurs then
+ local c = cursives[n_curscurs]
+ if c then
+ local rlmode, dx, dy, ws, wn = c[1], c[2], c[3], c[4], c[5]
+ if rlmode >= 0 then
+ dx = dx - ws
+ else
+ dx = dx + wn
+ end
+ if dx ~= 0 then
+ cx[n] = dx
+ rl[n] = rlmode
+ end
+ -- if rlmode and rlmode < 0 then
+ dy = -dy
+ -- end
+ maxt = maxt + 1
+ t[maxt] = p
+ d[maxt] = dy
+ else
+ maxt = 0
+ end
+ end
+ elseif maxt > 0 then
+ local ny = n.yoffset
+ for i=maxt,1,-1 do
+ ny = ny + d[i]
+ local ti = t[i]
+ ti.yoffset = ti.yoffset + ny
+ end
+ maxt = 0
+ end
+ if not n_cursbase and maxt > 0 then
+ local ny = n.yoffset
+ for i=maxt,1,-1 do
+ ny = ny + d[i]
+ local ti = t[i]
+ ti.yoffset = ny
+ end
+ maxt = 0
+ end
+ p_cursbase, p = n_cursbase, n
+ end
+ end
+ if maxt > 0 then
+ local ny = n.yoffset
+ for i=maxt,1,-1 do
+ ny = ny + d[i]
+ local ti = t[i]
+ ti.yoffset = ny
+ end
+ maxt = 0
+ end
+ if not keep then
+ cursives = { }
+ end
+ end
+ if has_marks then
+ for i=1,nofvalid do
+ local p = valid[i]
+ local p_markbase = p[a_markbase]
+ if p_markbase then
+ local mrks = marks[p_markbase]
+ local nofmarks = #mrks
+ for n in traverse_id(glyph_code,p.next) do
+ local n_markmark = n[a_markmark]
+ if p_markbase == n_markmark then
+ local index = n[a_markdone] or 1
+ local d = mrks[index]
+ if d then
+ local rlmode = d[3]
+ --
+ local k = wx[p]
+ if k then
+ local x = k[2]
+ local w = k[4]
+ if w then
+ if rlmode and rlmode >= 0 then
+ -- kern(x) glyph(p) kern(w-x) mark(n)
+ n.xoffset = p.xoffset - p.width + d[1] - (w-x)
+ else
+ -- kern(w-x) glyph(p) kern(x) mark(n)
+ n.xoffset = p.xoffset - d[1] - x
+ end
+ else
+ if rlmode and rlmode >= 0 then
+ -- okay for husayni
+ n.xoffset = p.xoffset - p.width + d[1]
+ else
+ -- needs checking: is x ok here?
+ n.xoffset = p.xoffset - d[1] - x
+ end
+ end
+ else
+ if rlmode and rlmode >= 0 then
+ n.xoffset = p.xoffset - p.width + d[1]
+ else
+ n.xoffset = p.xoffset - d[1]
+ end
+ local w = n.width
+ if w ~= 0 then
+ insert_node_before(head,n,newkern(-w/2))
+ insert_node_after(head,n,newkern(-w/2))
+ end
+ end
+ -- --
+ if mk[p] then
+ n.yoffset = p.yoffset + d[2]
+ else
+ n.yoffset = n.yoffset + p.yoffset + d[2]
+ end
+ --
+ if nofmarks == 1 then
+ break
+ else
+ nofmarks = nofmarks - 1
+ end
+ end
+ else
+ -- KE: there can be <mark> <mkmk> <mark> sequences in ligatures
+ end
+ end
+ end
+ end
+ if not keep then
+ marks = { }
+ end
+ end
+ -- todo : combine
+ if next(wx) then
+ for n, k in next, wx do
+ -- only w can be nil (kernclasses), can be sped up when w == nil
+ local x = k[2]
+ local w = k[4]
+ if w then
+ local rl = k[1] -- r2l = k[6]
+ local wx = w - x
+ if rl < 0 then -- KE: don't use r2l here
+ if wx ~= 0 then
+ insert_node_before(head,n,newkern(wx)) -- type 0/2
+ end
+ if x ~= 0 then
+ insert_node_after (head,n,newkern(x)) -- type 0/2
+ end
+ else
+ if x ~= 0 then
+ insert_node_before(head,n,newkern(x)) -- type 0/2
+ end
+ if wx ~= 0 then
+ insert_node_after (head,n,newkern(wx)) -- type 0/2
+ end
+ end
+ elseif x ~= 0 then
+ -- this needs checking for rl < 0 but it is unlikely that a r2l script
+ -- uses kernclasses between glyphs so we're probably safe (KE has a
+ -- problematic font where marks interfere with rl < 0 in the previous
+ -- case)
+ insert_node_before(head,n,newkern(x)) -- a real font kern, type 0
+ end
+ end
+ end
+ if next(cx) then
+ for n, k in next, cx do
+ if k ~= 0 then
+ local rln = rl[n]
+ if rln and rln < 0 then
+ insert_node_before(head,n,newkern(-k)) -- type 0/2
+ else
+ insert_node_before(head,n,newkern(k)) -- type 0/2
+ end
+ end
+ end
+ end
+ if not keep then
+ kerns = { }
+ end
+ -- if trace_injections then
+ -- show_result(head)
+ -- end
+ return head, true
+ elseif not keep then
+ kerns, cursives, marks = { }, { }, { }
+ end
+ elseif has_kerns then
+ if trace_injections then
+ trace(head)
+ end
+ for n in traverse_id(glyph_code,head) do
+ if n.subtype < 256 then
+ local k = n[a_kernpair]
+ if k then
+ local kk = kerns[k]
+ if kk then
+ local rl, x, y, w = kk[1], kk[2] or 0, kk[3], kk[4]
+ if y and y ~= 0 then
+ n.yoffset = y -- todo: h ?
+ end
+ if w then
+ -- copied from above
+ -- local r2l = kk[6]
+ local wx = w - x
+ if rl < 0 then -- KE: don't use r2l here
+ if wx ~= 0 then
+ insert_node_before(head,n,newkern(wx))
+ end
+ if x ~= 0 then
+ insert_node_after (head,n,newkern(x))
+ end
+ else
+ if x ~= 0 then
+ insert_node_before(head,n,newkern(x))
+ end
+ if wx ~= 0 then
+ insert_node_after(head,n,newkern(wx))
+ end
+ end
+ else
+ -- simple (e.g. kernclass kerns)
+ if x ~= 0 then
+ insert_node_before(head,n,newkern(x))
+ end
+ end
+ end
+ end
+ end
+ end
+ if not keep then
+ kerns = { }
+ end
+ -- if trace_injections then
+ -- show_result(head)
+ -- end
+ return head, true
+ else
+ -- no tracing needed
+ end
+ return head, false
+end
diff --git a/src/luaotfload-fonts-lua.lua b/src/luaotfload-fonts-lua.lua
new file mode 100644
index 0000000..ec3fe38
--- /dev/null
+++ b/src/luaotfload-fonts-lua.lua
@@ -0,0 +1,33 @@
+if not modules then modules = { } end modules ['luatex-fonts-lua'] = {
+ version = 1.001,
+ comment = "companion to luatex-*.tex",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+
+local fonts = fonts
+fonts.formats.lua = "lua"
+
+function fonts.readers.lua(specification)
+ local fullname = specification.filename or ""
+ if fullname == "" then
+ local forced = specification.forced or ""
+ if forced ~= "" then
+ fullname = specification.name .. "." .. forced
+ else
+ fullname = specification.name
+ end
+ end
+ local fullname = resolvers.findfile(fullname) or ""
+ if fullname ~= "" then
+ local loader = loadfile(fullname)
+ loader = loader and loader()
+ return loader and loader(specification)
+ end
+end
diff --git a/src/luaotfload-fonts-otn.lua b/src/luaotfload-fonts-otn.lua
new file mode 100644
index 0000000..c57be5f
--- /dev/null
+++ b/src/luaotfload-fonts-otn.lua
@@ -0,0 +1,2848 @@
+if not modules then modules = { } end modules ['font-otn'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+-- preprocessors = { "nodes" }
+
+-- this is still somewhat preliminary and it will get better in due time;
+-- much functionality could only be implemented thanks to the husayni font
+-- of Idris Samawi Hamid to who we dedicate this module.
+
+-- in retrospect it always looks easy but believe it or not, it took a lot
+-- of work to get proper open type support done: buggy fonts, fuzzy specs,
+-- special made testfonts, many skype sessions between taco, idris and me,
+-- torture tests etc etc ... unfortunately the code does not show how much
+-- time it took ...
+
+-- todo:
+--
+-- kerning is probably not yet ok for latin around dics nodes (interesting challenge)
+-- extension infrastructure (for usage out of context)
+-- sorting features according to vendors/renderers
+-- alternative loop quitters
+-- check cursive and r2l
+-- find out where ignore-mark-classes went
+-- default features (per language, script)
+-- handle positions (we need example fonts)
+-- handle gpos_single (we might want an extra width field in glyph nodes because adding kerns might interfere)
+-- mark (to mark) code is still not what it should be (too messy but we need some more extreem husayni tests)
+-- remove some optimizations (when I have a faster machine)
+--
+-- maybe redo the lot some way (more context specific)
+
+--[[ldx--
+<p>This module is a bit more split up that I'd like but since we also want to test
+with plain <l n='tex'/> it has to be so. This module is part of <l n='context'/>
+and discussion about improvements and functionality mostly happens on the
+<l n='context'/> mailing list.</p>
+
+<p>The specification of OpenType is kind of vague. Apart from a lack of a proper
+free specifications there's also the problem that Microsoft and Adobe
+may have their own interpretation of how and in what order to apply features.
+In general the Microsoft website has more detailed specifications and is a
+better reference. There is also some information in the FontForge help files.</p>
+
+<p>Because there is so much possible, fonts might contain bugs and/or be made to
+work with certain rederers. These may evolve over time which may have the side
+effect that suddenly fonts behave differently.</p>
+
+<p>After a lot of experiments (mostly by Taco, me and Idris) we're now at yet another
+implementation. Of course all errors are mine and of course the code can be
+improved. There are quite some optimizations going on here and processing speed
+is currently acceptable. Not all functions are implemented yet, often because I
+lack the fonts for testing. Many scripts are not yet supported either, but I will
+look into them as soon as <l n='context'/> users ask for it.</p>
+
+<p>Because there are different interpretations possible, I will extend the code
+with more (configureable) variants. I can also add hooks for users so that they can
+write their own extensions.</p>
+
+<p>Glyphs are indexed not by unicode but in their own way. This is because there is no
+relationship with unicode at all, apart from the fact that a font might cover certain
+ranges of characters. One character can have multiple shapes. However, at the
+<l n='tex'/> end we use unicode so and all extra glyphs are mapped into a private
+space. This is needed because we need to access them and <l n='tex'/> has to include
+then in the output eventually.</p>
+
+<p>The raw table as it coms from <l n='fontforge'/> gets reorganized in to fit out needs.
+In <l n='context'/> that table is packed (similar tables are shared) and cached on disk
+so that successive runs can use the optimized table (after loading the table is
+unpacked). The flattening code used later is a prelude to an even more compact table
+format (and as such it keeps evolving).</p>
+
+<p>This module is sparsely documented because it is a moving target. The table format
+of the reader changes and we experiment a lot with different methods for supporting
+features.</p>
+
+<p>As with the <l n='afm'/> code, we may decide to store more information in the
+<l n='otf'/> table.</p>
+
+<p>Incrementing the version number will force a re-cache. We jump the number by one
+when there's a fix in the <l n='fontforge'/> library or <l n='lua'/> code that
+results in different tables.</p>
+--ldx]]--
+
+-- action handler chainproc chainmore comment
+--
+-- gsub_single ok ok ok
+-- gsub_multiple ok ok not implemented yet
+-- gsub_alternate ok ok not implemented yet
+-- gsub_ligature ok ok ok
+-- gsub_context ok --
+-- gsub_contextchain ok --
+-- gsub_reversecontextchain ok --
+-- chainsub -- ok
+-- reversesub -- ok
+-- gpos_mark2base ok ok
+-- gpos_mark2ligature ok ok
+-- gpos_mark2mark ok ok
+-- gpos_cursive ok untested
+-- gpos_single ok ok
+-- gpos_pair ok ok
+-- gpos_context ok --
+-- gpos_contextchain ok --
+--
+-- todo: contextpos and contextsub and class stuff
+--
+-- actions:
+--
+-- handler : actions triggered by lookup
+-- chainproc : actions triggered by contextual lookup
+-- chainmore : multiple substitutions triggered by contextual lookup (e.g. fij -> f + ij)
+--
+-- remark: the 'not implemented yet' variants will be done when we have fonts that use them
+-- remark: we need to check what to do with discretionaries
+
+-- We used to have independent hashes for lookups but as the tags are unique
+-- we now use only one hash. If needed we can have multiple again but in that
+-- case I will probably prefix (i.e. rename) the lookups in the cached font file.
+
+-- Todo: make plugin feature that operates on char/glyphnode arrays
+
+local concat, insert, remove = table.concat, table.insert, table.remove
+local gmatch, gsub, find, match, lower, strip = string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
+local type, next, tonumber, tostring = type, next, tonumber, tostring
+local lpegmatch = lpeg.match
+local random = math.random
+local formatters = string.formatters
+
+local logs, trackers, nodes, attributes = logs, trackers, nodes, attributes
+
+local registertracker = trackers.register
+
+local fonts = fonts
+local otf = fonts.handlers.otf
+
+local trace_lookups = false registertracker("otf.lookups", function(v) trace_lookups = v end)
+local trace_singles = false registertracker("otf.singles", function(v) trace_singles = v end)
+local trace_multiples = false registertracker("otf.multiples", function(v) trace_multiples = v end)
+local trace_alternatives = false registertracker("otf.alternatives", function(v) trace_alternatives = v end)
+local trace_ligatures = false registertracker("otf.ligatures", function(v) trace_ligatures = v end)
+local trace_contexts = false registertracker("otf.contexts", function(v) trace_contexts = v end)
+local trace_marks = false registertracker("otf.marks", function(v) trace_marks = v end)
+local trace_kerns = false registertracker("otf.kerns", function(v) trace_kerns = v end)
+local trace_cursive = false registertracker("otf.cursive", function(v) trace_cursive = v end)
+local trace_preparing = false registertracker("otf.preparing", function(v) trace_preparing = v end)
+local trace_bugs = false registertracker("otf.bugs", function(v) trace_bugs = v end)
+local trace_details = false registertracker("otf.details", function(v) trace_details = v end)
+local trace_applied = false registertracker("otf.applied", function(v) trace_applied = v end)
+local trace_steps = false registertracker("otf.steps", function(v) trace_steps = v end)
+local trace_skips = false registertracker("otf.skips", function(v) trace_skips = v end)
+local trace_directions = false registertracker("otf.directions", function(v) trace_directions = v end)
+
+local report_direct = logs.reporter("fonts","otf direct")
+local report_subchain = logs.reporter("fonts","otf subchain")
+local report_chain = logs.reporter("fonts","otf chain")
+local report_process = logs.reporter("fonts","otf process")
+local report_prepare = logs.reporter("fonts","otf prepare")
+local report_warning = logs.reporter("fonts","otf warning")
+
+registertracker("otf.verbose_chain", function(v) otf.setcontextchain(v and "verbose") end)
+registertracker("otf.normal_chain", function(v) otf.setcontextchain(v and "normal") end)
+
+registertracker("otf.replacements", "otf.singles,otf.multiples,otf.alternatives,otf.ligatures")
+registertracker("otf.positions","otf.marks,otf.kerns,otf.cursive")
+registertracker("otf.actions","otf.replacements,otf.positions")
+registertracker("otf.injections","nodes.injections")
+
+registertracker("*otf.sample","otf.steps,otf.actions,otf.analyzing")
+
+local insert_node_after = node.insert_after
+local delete_node = nodes.delete
+local copy_node = node.copy
+local find_node_tail = node.tail or node.slide
+local flush_node_list = node.flush_list
+local end_of_math = node.end_of_math
+
+local setmetatableindex = table.setmetatableindex
+
+local zwnj = 0x200C
+local zwj = 0x200D
+local wildcard = "*"
+local default = "dflt"
+
+local nodecodes = nodes.nodecodes
+local whatcodes = nodes.whatcodes
+local glyphcodes = nodes.glyphcodes
+local disccodes = nodes.disccodes
+
+local glyph_code = nodecodes.glyph
+local glue_code = nodecodes.glue
+local disc_code = nodecodes.disc
+local whatsit_code = nodecodes.whatsit
+local math_code = nodecodes.math
+
+local dir_code = whatcodes.dir
+local localpar_code = whatcodes.localpar
+
+local discretionary_code = disccodes.discretionary
+
+local ligature_code = glyphcodes.ligature
+
+local privateattribute = attributes.private
+
+-- Something is messed up: we have two mark / ligature indices, one at the injection
+-- end and one here ... this is bases in KE's patches but there is something fishy
+-- there as I'm pretty sure that for husayni we need some connection (as it's much
+-- more complex than an average font) but I need proper examples of all cases, not
+-- of only some.
+
+local a_state = privateattribute('state')
+local a_markbase = privateattribute('markbase')
+local a_markmark = privateattribute('markmark')
+local a_markdone = privateattribute('markdone') -- assigned at the injection end
+local a_cursbase = privateattribute('cursbase')
+local a_curscurs = privateattribute('curscurs')
+local a_cursdone = privateattribute('cursdone')
+local a_kernpair = privateattribute('kernpair')
+local a_ligacomp = privateattribute('ligacomp') -- assigned here (ideally it should be combined)
+
+local injections = nodes.injections
+local setmark = injections.setmark
+local setcursive = injections.setcursive
+local setkern = injections.setkern
+local setpair = injections.setpair
+
+local markonce = true
+local cursonce = true
+local kernonce = true
+
+local fonthashes = fonts.hashes
+local fontdata = fonthashes.identifiers
+
+local otffeatures = fonts.constructors.newfeatures("otf")
+local registerotffeature = otffeatures.register
+
+local onetimemessage = fonts.loggers.onetimemessage or function() end
+
+otf.defaultnodealternate = "none" -- first last
+
+-- we share some vars here, after all, we have no nested lookups and less code
+
+local tfmdata = false
+local characters = false
+local descriptions = false
+local resources = false
+local marks = false
+local currentfont = false
+local lookuptable = false
+local anchorlookups = false
+local lookuptypes = false
+local handlers = { }
+local rlmode = 0
+local featurevalue = false
+
+-- head is always a whatsit so we can safely assume that head is not changed
+
+-- we use this for special testing and documentation
+
+local checkstep = (nodes and nodes.tracers and nodes.tracers.steppers.check) or function() end
+local registerstep = (nodes and nodes.tracers and nodes.tracers.steppers.register) or function() end
+local registermessage = (nodes and nodes.tracers and nodes.tracers.steppers.message) or function() end
+
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_direct(...)
+end
+
+local function logwarning(...)
+ report_direct(...)
+end
+
+local f_unicode = formatters["%U"]
+local f_uniname = formatters["%U (%s)"]
+local f_unilist = formatters["% t (% t)"]
+
+local function gref(n) -- currently the same as in font-otb
+ if type(n) == "number" then
+ local description = descriptions[n]
+ local name = description and description.name
+ if name then
+ return f_uniname(n,name)
+ else
+ return f_unicode(n)
+ end
+ elseif n then
+ local num, nam = { }, { }
+ for i=1,#n do
+ local ni = n[i]
+ if tonumber(ni) then -- later we will start at 2
+ local di = descriptions[ni]
+ num[i] = f_unicode(ni)
+ nam[i] = di and di.name or "-"
+ end
+ end
+ return f_unilist(num,nam)
+ else
+ return "<error in node mode tracing>"
+ end
+end
+
+local function cref(kind,chainname,chainlookupname,lookupname,index) -- not in the mood to alias f_
+ if index then
+ return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookupname,index)
+ elseif lookupname then
+ return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookupname)
+ elseif chainlookupname then
+ return formatters["feature %a, chain %a, sub %a"](kind,chainname,chainlookupname)
+ elseif chainname then
+ return formatters["feature %a, chain %a"](kind,chainname)
+ else
+ return formatters["feature %a"](kind)
+ end
+end
+
+local function pref(kind,lookupname)
+ return formatters["feature %a, lookup %a"](kind,lookupname)
+end
+
+-- We can assume that languages that use marks are not hyphenated. We can also assume
+-- that at most one discretionary is present.
+
+-- We do need components in funny kerning mode but maybe I can better reconstruct then
+-- as we do have the font components info available; removing components makes the
+-- previous code much simpler. Also, later on copying and freeing becomes easier.
+-- However, for arabic we need to keep them around for the sake of mark placement
+-- and indices.
+
+local function copy_glyph(g) -- next and prev are untouched !
+ local components = g.components
+ if components then
+ g.components = nil
+ local n = copy_node(g)
+ g.components = components
+ return n
+ else
+ return copy_node(g)
+ end
+end
+
+-- start is a mark and we need to keep that one
+
+local function markstoligature(kind,lookupname,head,start,stop,char)
+ if start == stop and start.char == char then
+ return head, start
+ else
+ local prev = start.prev
+ local next = stop.next
+ start.prev = nil
+ stop.next = nil
+ local base = copy_glyph(start)
+ if head == start then
+ head = base
+ end
+ base.char = char
+ base.subtype = ligature_code
+ base.components = start
+ if prev then
+ prev.next = base
+ end
+ if next then
+ next.prev = base
+ end
+ base.next = next
+ base.prev = prev
+ return head, base
+ end
+end
+
+-- The next code is somewhat complicated by the fact that some fonts can have ligatures made
+-- from ligatures that themselves have marks. This was identified by Kai in for instance
+-- arabtype: KAF LAM SHADDA ALEF FATHA (0x0643 0x0644 0x0651 0x0627 0x064E). This becomes
+-- KAF LAM-ALEF with a SHADDA on the first and a FATHA op de second component. In a next
+-- iteration this becomes a KAF-LAM-ALEF with a SHADDA on the second and a FATHA on the
+-- third component.
+
+local function getcomponentindex(start)
+ if start.id ~= glyph_code then
+ return 0
+ elseif start.subtype == ligature_code then
+ local i = 0
+ local components = start.components
+ while components do
+ i = i + getcomponentindex(components)
+ components = components.next
+ end
+ return i
+ elseif not marks[start.char] then
+ return 1
+ else
+ return 0
+ end
+end
+
+-- eventually we will do positioning in an other way (needs addional w/h/d fields)
+
+local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound) -- brr head
+ if start == stop and start.char == char then
+ start.char = char
+ return head, start
+ end
+ local prev = start.prev
+ local next = stop.next
+ start.prev = nil
+ stop.next = nil
+ local base = copy_glyph(start)
+ if start == head then
+ head = base
+ end
+ base.char = char
+ base.subtype = ligature_code
+ base.components = start -- start can have components
+ if prev then
+ prev.next = base
+ end
+ if next then
+ next.prev = base
+ end
+ base.next = next
+ base.prev = prev
+ if not discfound then
+ local deletemarks = markflag ~= "mark"
+ local components = start
+ local baseindex = 0
+ local componentindex = 0
+ local head = base
+ local current = base
+ -- first we loop over the glyphs in start .. stop
+ while start do
+ local char = start.char
+ if not marks[char] then
+ baseindex = baseindex + componentindex
+ componentindex = getcomponentindex(start)
+ elseif not deletemarks then -- quite fishy
+ start[a_ligacomp] = baseindex + (start[a_ligacomp] or componentindex)
+ if trace_marks then
+ logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp])
+ end
+ head, current = insert_node_after(head,current,copy_node(start)) -- unlikely that mark has components
+ elseif trace_marks then
+ logwarning("%s: delete mark %s",pref(kind,lookupname),gref(char))
+ end
+ start = start.next
+ end
+ -- we can have one accent as part of a lookup and another following
+ -- local start = components -- was wrong (component scanning was introduced when more complex ligs in devanagari was added)
+ local start = current.next
+ while start and start.id == glyph_code do
+ local char = start.char
+ if marks[char] then
+ start[a_ligacomp] = baseindex + (start[a_ligacomp] or componentindex)
+ if trace_marks then
+ logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp])
+ end
+ else
+ break
+ end
+ start = start.next
+ end
+ end
+ return head, base
+end
+
+function handlers.gsub_single(head,start,kind,lookupname,replacement)
+ if trace_singles then
+ logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(start.char),gref(replacement))
+ end
+ start.char = replacement
+ return head, start, true
+end
+
+local function get_alternative_glyph(start,alternatives,value,trace_alternatives)
+ local n = #alternatives
+ if value == "random" then
+ local r = random(1,n)
+ return alternatives[r], trace_alternatives and formatters["value %a, taking %a"](value,r)
+ elseif value == "first" then
+ return alternatives[1], trace_alternatives and formatters["value %a, taking %a"](value,1)
+ elseif value == "last" then
+ return alternatives[n], trace_alternatives and formatters["value %a, taking %a"](value,n)
+ else
+ value = tonumber(value)
+ if type(value) ~= "number" then
+ return alternatives[1], trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
+ elseif value > n then
+ local defaultalt = otf.defaultnodealternate
+ if defaultalt == "first" then
+ return alternatives[n], trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
+ elseif defaultalt == "last" then
+ return alternatives[1], trace_alternatives and formatters["invalid value %s, taking %a"](value,n)
+ else
+ return false, trace_alternatives and formatters["invalid value %a, %s"](value,"out of range")
+ end
+ elseif value == 0 then
+ return start.char, trace_alternatives and formatters["invalid value %a, %s"](value,"no change")
+ elseif value < 1 then
+ return alternatives[1], trace_alternatives and formatters["invalid value %a, taking %a"](value,1)
+ else
+ return alternatives[value], trace_alternatives and formatters["value %a, taking %a"](value,value)
+ end
+ end
+end
+
+local function multiple_glyphs(head,start,multiple,ignoremarks)
+ local nofmultiples = #multiple
+ if nofmultiples > 0 then
+ start.char = multiple[1]
+ if nofmultiples > 1 then
+ local sn = start.next
+ for k=2,nofmultiples do -- todo: use insert_node
+-- untested:
+--
+-- while ignoremarks and marks[sn.char] then
+-- local sn = sn.next
+-- end
+ local n = copy_node(start) -- ignore components
+ n.char = multiple[k]
+ n.next = sn
+ n.prev = start
+ if sn then
+ sn.prev = n
+ end
+ start.next = n
+ start = n
+ end
+ end
+ return head, start, true
+ else
+ if trace_multiples then
+ logprocess("no multiple for %s",gref(start.char))
+ end
+ return head, start, false
+ end
+end
+
+function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence)
+ local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue
+ local choice, comment = get_alternative_glyph(start,alternative,value,trace_alternatives)
+ if choice then
+ if trace_alternatives then
+ logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(start.char),choice,gref(choice),comment)
+ end
+ start.char = choice
+ else
+ if trace_alternatives then
+ logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(start.char),comment)
+ end
+ end
+ return head, start, true
+end
+
+function handlers.gsub_multiple(head,start,kind,lookupname,multiple,sequence)
+ if trace_multiples then
+ logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(start.char),gref(multiple))
+ end
+ return multiple_glyphs(head,start,multiple,sequence.flags[1])
+end
+
+function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
+ local s, stop, discfound = start.next, nil, false
+ local startchar = start.char
+ if marks[startchar] then
+ while s do
+ local id = s.id
+ if id == glyph_code and s.font == currentfont and s.subtype<256 then
+ local lg = ligature[s.char]
+ if lg then
+ stop = s
+ ligature = lg
+ s = s.next
+ else
+ break
+ end
+ else
+ break
+ end
+ end
+ if stop then
+ local lig = ligature.ligature
+ if lig then
+ if trace_ligatures then
+ local stopchar = stop.char
+ head, start = markstoligature(kind,lookupname,head,start,stop,lig)
+ logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
+ else
+ head, start = markstoligature(kind,lookupname,head,start,stop,lig)
+ end
+ return head, start, true
+ else
+ -- ok, goto next lookup
+ end
+ end
+ else
+ local skipmark = sequence.flags[1]
+ while s do
+ local id = s.id
+ if id == glyph_code and s.subtype<256 then
+ if s.font == currentfont then
+ local char = s.char
+ if skipmark and marks[char] then
+ s = s.next
+ else
+ local lg = ligature[char]
+ if lg then
+ stop = s
+ ligature = lg
+ s = s.next
+ else
+ break
+ end
+ end
+ else
+ break
+ end
+ elseif id == disc_code then
+ discfound = true
+ s = s.next
+ else
+ break
+ end
+ end
+ local lig = ligature.ligature
+ if lig then
+ if stop then
+ if trace_ligatures then
+ local stopchar = stop.char
+ head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
+ logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
+ else
+ head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
+ end
+ return head, start, true
+ else
+ -- weird but happens (in some arabic font)
+ start.char = lig
+ if trace_ligatures then
+ logprocess("%s: replacing %s by (no real) ligature %s case 3",pref(kind,lookupname),gref(startchar),gref(lig))
+ end
+ return head, start, true
+ end
+ else
+ -- weird but happens
+ end
+ end
+ return head, start, false
+end
+
+--[[ldx--
+<p>We get hits on a mark, but we're not sure if the it has to be applied so
+we need to explicitly test for basechar, baselig and basemark entries.</p>
+--ldx]]--
+
+function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence)
+ local markchar = start.char
+ if marks[markchar] then
+ local base = start.prev -- [glyph] [start=mark]
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ local basechar = base.char
+ if marks[basechar] then
+ while true do
+ base = base.prev
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ basechar = base.char
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
+ end
+ return head, start, false
+ end
+ end
+ end
+ local baseanchors = descriptions[basechar]
+ if baseanchors then
+ baseanchors = baseanchors.anchors
+ end
+ if baseanchors then
+ local baseanchors = baseanchors['basechar']
+ if baseanchors then
+ local al = anchorlookups[lookupname]
+ for anchor,ba in next, baseanchors do
+ if al[anchor] then
+ local ma = markanchors[anchor]
+ if ma then
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
+ pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head, start, true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s, no matching anchors for mark %s and base %s",pref(kind,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ elseif trace_bugs then
+ -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no char",pref(kind,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ end
+ return head, start, false
+end
+
+function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence)
+ -- check chainpos variant
+ local markchar = start.char
+ if marks[markchar] then
+ local base = start.prev -- [glyph] [optional marks] [start=mark]
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ local basechar = base.char
+ if marks[basechar] then
+ while true do
+ base = base.prev
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ basechar = base.char
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
+ end
+ return head, start, false
+ end
+ end
+ end
+ local index = start[a_ligacomp]
+ local baseanchors = descriptions[basechar]
+ if baseanchors then
+ baseanchors = baseanchors.anchors
+ if baseanchors then
+ local baseanchors = baseanchors['baselig']
+ if baseanchors then
+ local al = anchorlookups[lookupname]
+ for anchor, ba in next, baseanchors do
+ if al[anchor] then
+ local ma = markanchors[anchor]
+ if ma then
+ ba = ba[index]
+ if ba then
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) -- index
+ if trace_marks then
+ logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
+ pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy)
+ end
+ return head, start, true
+ else
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and baselig %s with index %a",pref(kind,lookupname),gref(markchar),gref(basechar),index)
+ end
+ end
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and baselig %s",pref(kind,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no char",pref(kind,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ end
+ return head, start, false
+end
+
+function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence)
+ local markchar = start.char
+ if marks[markchar] then
+ local base = start.prev -- [glyph] [basemark] [start=mark]
+ local slc = start[a_ligacomp]
+ if slc then -- a rather messy loop ... needs checking with husayni
+ while base do
+ local blc = base[a_ligacomp]
+ if blc and blc ~= slc then
+ base = base.prev
+ else
+ break
+ end
+ end
+ end
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then -- subtype test can go
+ local basechar = base.char
+ local baseanchors = descriptions[basechar]
+ if baseanchors then
+ baseanchors = baseanchors.anchors
+ if baseanchors then
+ baseanchors = baseanchors['basemark']
+ if baseanchors then
+ local al = anchorlookups[lookupname]
+ for anchor,ba in next, baseanchors do
+ if al[anchor] then
+ local ma = markanchors[anchor]
+ if ma then
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
+ pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head, start, true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and basemark %s",pref(kind,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no mark",pref(kind,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ end
+ return head, start, false
+end
+
+function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence) -- to be checked
+ local alreadydone = cursonce and start[a_cursbase]
+ if not alreadydone then
+ local done = false
+ local startchar = start.char
+ if marks[startchar] then
+ if trace_cursive then
+ logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
+ end
+ else
+ local nxt = start.next
+ while not done and nxt and nxt.id == glyph_code and nxt.font == currentfont and nxt.subtype<256 do
+ local nextchar = nxt.char
+ if marks[nextchar] then
+ -- should not happen (maybe warning)
+ nxt = nxt.next
+ else
+ local entryanchors = descriptions[nextchar]
+ if entryanchors then
+ entryanchors = entryanchors.anchors
+ if entryanchors then
+ entryanchors = entryanchors['centry']
+ if entryanchors then
+ local al = anchorlookups[lookupname]
+ for anchor, entry in next, entryanchors do
+ if al[anchor] then
+ local exit = exitanchors[anchor]
+ if exit then
+ local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ if trace_cursive then
+ logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
+ end
+ done = true
+ break
+ end
+ end
+ end
+ end
+ end
+ elseif trace_bugs then
+ -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar))
+ onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
+ end
+ break
+ end
+ end
+ end
+ return head, start, done
+ else
+ if trace_cursive and trace_details then
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
+ end
+ return head, start, false
+ end
+end
+
+function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence)
+ local startchar = start.char
+ local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h)
+ end
+ return head, start, false
+end
+
+function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence)
+ -- todo: kerns in disc nodes: pre, post, replace -> loop over disc too
+ -- todo: kerns in components of ligatures
+ local snext = start.next
+ if not snext then
+ return head, start, false
+ else
+ local prev, done = start, false
+ local factor = tfmdata.parameters.factor
+ local lookuptype = lookuptypes[lookupname]
+ while snext and snext.id == glyph_code and snext.font == currentfont and snext.subtype<256 do
+ local nextchar = snext.char
+ local krn = kerns[nextchar]
+ if not krn and marks[nextchar] then
+ prev = snext
+ snext = snext.next
+ else
+ if not krn then
+ -- skip
+ elseif type(krn) == "table" then
+ if lookuptype == "pair" then -- probably not needed
+ local a, b = krn[2], krn[3]
+ if a and #a > 0 then
+ local startchar = start.char
+ local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ if b and #b > 0 then
+ local startchar = start.char
+ local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
+ if trace_kerns then
+ logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ else -- wrong ... position has different entries
+ report_process("%s: check this out (old kern stuff)",pref(kind,lookupname))
+ -- local a, b = krn[2], krn[6]
+ -- if a and a ~= 0 then
+ -- local k = setkern(snext,factor,rlmode,a)
+ -- if trace_kerns then
+ -- logprocess("%s: inserting first kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
+ -- end
+ -- end
+ -- if b and b ~= 0 then
+ -- logwarning("%s: ignoring second kern xoff %s",pref(kind,lookupname),b*factor)
+ -- end
+ end
+ done = true
+ elseif krn ~= 0 then
+ local k = setkern(snext,factor,rlmode,krn)
+ if trace_kerns then
+ logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
+ end
+ done = true
+ end
+ break
+ end
+ end
+ return head, start, done
+ end
+end
+
+--[[ldx--
+<p>I will implement multiple chain replacements once I run into a font that uses
+it. It's not that complex to handle.</p>
+--ldx]]--
+
+local chainmores = { }
+local chainprocs = { }
+
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_subchain(...)
+end
+
+local logwarning = report_subchain
+
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_chain(...)
+end
+
+local logwarning = report_chain
+
+-- We could share functions but that would lead to extra function calls with many
+-- arguments, redundant tests and confusing messages.
+
+function chainprocs.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname)
+ logwarning("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
+ return head, start, false
+end
+
+function chainmores.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname,n)
+ logprocess("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
+ return head, start, false
+end
+
+-- The reversesub is a special case, which is why we need to store the replacements
+-- in a bit weird way. There is no lookup and the replacement comes from the lookup
+-- itself. It is meant mostly for dealing with Urdu.
+
+function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements)
+ local char = start.char
+ local replacement = replacements[char]
+ if replacement then
+ if trace_singles then
+ logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement))
+ end
+ start.char = replacement
+ return head, start, true
+ else
+ return head, start, false
+ end
+end
+
+--[[ldx--
+<p>This chain stuff is somewhat tricky since we can have a sequence of actions to be
+applied: single, alternate, multiple or ligature where ligature can be an invalid
+one in the sense that it will replace multiple by one but not neccessary one that
+looks like the combination (i.e. it is the counterpart of multiple then). For
+example, the following is valid:</p>
+
+<typing>
+<line>xxxabcdexxx [single a->A][multiple b->BCD][ligature cde->E] xxxABCDExxx</line>
+</typing>
+
+<p>Therefore we we don't really do the replacement here already unless we have the
+single lookup case. The efficiency of the replacements can be improved by deleting
+as less as needed but that would also make the code even more messy.</p>
+--ldx]]--
+
+-- local function delete_till_stop(head,start,stop,ignoremarks) -- keeps start
+-- local n = 1
+-- if start == stop then
+-- -- done
+-- elseif ignoremarks then
+-- repeat -- start x x m x x stop => start m
+-- local next = start.next
+-- if not marks[next.char] then
+-- local components = next.components
+-- if components then -- probably not needed
+-- flush_node_list(components)
+-- end
+-- head = delete_node(head,next)
+-- end
+-- n = n + 1
+-- until next == stop
+-- else -- start x x x stop => start
+-- repeat
+-- local next = start.next
+-- local components = next.components
+-- if components then -- probably not needed
+-- flush_node_list(components)
+-- end
+-- head = delete_node(head,next)
+-- n = n + 1
+-- until next == stop
+-- end
+-- return head, n
+-- end
+
+--[[ldx--
+<p>Here we replace start by a single variant, First we delete the rest of the
+match.</p>
+--ldx]]--
+
+function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
+ -- todo: marks ?
+ local current = start
+ local subtables = currentlookup.subtables
+ if #subtables > 1 then
+ logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," "))
+ end
+ while current do
+ if current.id == glyph_code then
+ local currentchar = current.char
+ local lookupname = subtables[1] -- only 1
+ local replacement = lookuphash[lookupname]
+ if not replacement then
+ if trace_bugs then
+ logwarning("%s: no single hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
+ end
+ else
+ replacement = replacement[currentchar]
+ if not replacement or replacement == "" then
+ if trace_bugs then
+ logwarning("%s: no single for %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar))
+ end
+ else
+ if trace_singles then
+ logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement))
+ end
+ current.char = replacement
+ end
+ end
+ return head, start, true
+ elseif current == stop then
+ break
+ else
+ current = current.next
+ end
+ end
+ return head, start, false
+end
+
+chainmores.gsub_single = chainprocs.gsub_single
+
+--[[ldx--
+<p>Here we replace start by a sequence of new glyphs. First we delete the rest of
+the match.</p>
+--ldx]]--
+
+function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ -- local head, n = delete_till_stop(head,start,stop)
+ local startchar = start.char
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local replacements = lookuphash[lookupname]
+ if not replacements then
+ if trace_bugs then
+ logwarning("%s: no multiple hits",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ else
+ replacements = replacements[startchar]
+ if not replacements or replacement == "" then
+ if trace_bugs then
+ logwarning("%s: no multiple for %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar))
+ end
+ else
+ if trace_multiples then
+ logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements))
+ end
+ return multiple_glyphs(head,start,replacements,currentlookup.flags[1])
+ end
+ end
+ return head, start, false
+end
+
+chainmores.gsub_multiple = chainprocs.gsub_multiple
+
+--[[ldx--
+<p>Here we replace start by new glyph. First we delete the rest of the match.</p>
+--ldx]]--
+
+-- char_1 mark_1 -> char_x mark_1 (ignore marks)
+-- char_1 mark_1 -> char_x
+
+-- to be checked: do we always have just one glyph?
+-- we can also have alternates for marks
+-- marks come last anyway
+-- are there cases where we need to delete the mark
+
+function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local current = start
+ local subtables = currentlookup.subtables
+ local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue
+ while current do
+ if current.id == glyph_code then -- is this check needed?
+ local currentchar = current.char
+ local lookupname = subtables[1]
+ local alternatives = lookuphash[lookupname]
+ if not alternatives then
+ if trace_bugs then
+ logwarning("%s: no alternative hit",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ else
+ alternatives = alternatives[currentchar]
+ if alternatives then
+ local choice, comment = get_alternative_glyph(current,alternatives,value,trace_alternatives)
+ if choice then
+ if trace_alternatives then
+ logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment)
+ end
+ start.char = choice
+ else
+ if trace_alternatives then
+ logwarning("%s: no variant %a for %s, %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(char),comment)
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s: no alternative for %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(currentchar),comment)
+ end
+ end
+ return head, start, true
+ elseif current == stop then
+ break
+ else
+ current = current.next
+ end
+ end
+ return head, start, false
+end
+
+chainmores.gsub_alternate = chainprocs.gsub_alternate
+
+--[[ldx--
+<p>When we replace ligatures we use a helper that handles the marks. I might change
+this function (move code inline and handle the marks by a separate function). We
+assume rather stupid ligatures (no complex disc nodes).</p>
+--ldx]]--
+
+function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
+ local startchar = start.char
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local ligatures = lookuphash[lookupname]
+ if not ligatures then
+ if trace_bugs then
+ logwarning("%s: no ligature hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
+ end
+ else
+ ligatures = ligatures[startchar]
+ if not ligatures then
+ if trace_bugs then
+ logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
+ end
+ else
+ local s = start.next
+ local discfound = false
+ local last = stop
+ local nofreplacements = 0
+ local skipmark = currentlookup.flags[1]
+ while s do
+ local id = s.id
+ if id == disc_code then
+ s = s.next
+ discfound = true
+ else
+ local schar = s.char
+ if skipmark and marks[schar] then -- marks
+ s = s.next
+ else
+ local lg = ligatures[schar]
+ if lg then
+ ligatures, last, nofreplacements = lg, s, nofreplacements + 1
+ if s == stop then
+ break
+ else
+ s = s.next
+ end
+ else
+ break
+ end
+ end
+ end
+ end
+ local l2 = ligatures.ligature
+ if l2 then
+ if chainindex then
+ stop = last
+ end
+ if trace_ligatures then
+ if start == stop then
+ logprocess("%s: replacing character %s by ligature %s case 3",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2))
+ else
+ logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char),gref(l2))
+ end
+ end
+ head, start = toligature(kind,lookupname,head,start,stop,l2,currentlookup.flags[1],discfound)
+ return head, start, true, nofreplacements
+ elseif trace_bugs then
+ if start == stop then
+ logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
+ else
+ logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char))
+ end
+ end
+ end
+ end
+ return head, start, false, 0
+end
+
+chainmores.gsub_ligature = chainprocs.gsub_ligature
+
+function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local markchar = start.char
+ if marks[markchar] then
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local markanchors = lookuphash[lookupname]
+ if markanchors then
+ markanchors = markanchors[markchar]
+ end
+ if markanchors then
+ local base = start.prev -- [glyph] [start=mark]
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ local basechar = base.char
+ if marks[basechar] then
+ while true do
+ base = base.prev
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ basechar = base.char
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
+ end
+ return head, start, false
+ end
+ end
+ end
+ local baseanchors = descriptions[basechar].anchors
+ if baseanchors then
+ local baseanchors = baseanchors['basechar']
+ if baseanchors then
+ local al = anchorlookups[lookupname]
+ for anchor,ba in next, baseanchors do
+ if al[anchor] then
+ local ma = markanchors[anchor]
+ if ma then
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head, start, true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s, no matching anchors for mark %s and base %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no char",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ end
+ return head, start, false
+end
+
+function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local markchar = start.char
+ if marks[markchar] then
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local markanchors = lookuphash[lookupname]
+ if markanchors then
+ markanchors = markanchors[markchar]
+ end
+ if markanchors then
+ local base = start.prev -- [glyph] [optional marks] [start=mark]
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ local basechar = base.char
+ if marks[basechar] then
+ while true do
+ base = base.prev
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
+ basechar = base.char
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",cref(kind,chainname,chainlookupname,lookupname),markchar)
+ end
+ return head, start, false
+ end
+ end
+ end
+ -- todo: like marks a ligatures hash
+ local index = start[a_ligacomp]
+ local baseanchors = descriptions[basechar].anchors
+ if baseanchors then
+ local baseanchors = baseanchors['baselig']
+ if baseanchors then
+ local al = anchorlookups[lookupname]
+ for anchor,ba in next, baseanchors do
+ if al[anchor] then
+ local ma = markanchors[anchor]
+ if ma then
+ ba = ba[index]
+ if ba then
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) -- index
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy)
+ end
+ return head, start, true
+ end
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and baselig %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ logwarning("feature %s, lookup %s: prev node is no char",kind,lookupname)
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ end
+ return head, start, false
+end
+
+function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local markchar = start.char
+ if marks[markchar] then
+ -- local alreadydone = markonce and start[a_markmark]
+ -- if not alreadydone then
+ -- local markanchors = descriptions[markchar].anchors markanchors = markanchors and markanchors.mark
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local markanchors = lookuphash[lookupname]
+ if markanchors then
+ markanchors = markanchors[markchar]
+ end
+ if markanchors then
+ local base = start.prev -- [glyph] [basemark] [start=mark]
+ local slc = start[a_ligacomp]
+ if slc then -- a rather messy loop ... needs checking with husayni
+ while base do
+ local blc = base[a_ligacomp]
+ if blc and blc ~= slc then
+ base = base.prev
+ else
+ break
+ end
+ end
+ end
+ if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then -- subtype test can go
+ local basechar = base.char
+ local baseanchors = descriptions[basechar].anchors
+ if baseanchors then
+ baseanchors = baseanchors['basemark']
+ if baseanchors then
+ local al = anchorlookups[lookupname]
+ for anchor,ba in next, baseanchors do
+ if al[anchor] then
+ local ma = markanchors[anchor]
+ if ma then
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head, start, true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
+ -- elseif trace_marks and trace_details then
+ -- logprocess("%s, mark %s is already bound (n=%s), ignoring mark2mark",pref(kind,lookupname),gref(markchar),alreadydone)
+ -- end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ end
+ return head, start, false
+end
+
+function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local alreadydone = cursonce and start[a_cursbase]
+ if not alreadydone then
+ local startchar = start.char
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local exitanchors = lookuphash[lookupname]
+ if exitanchors then
+ exitanchors = exitanchors[startchar]
+ end
+ if exitanchors then
+ local done = false
+ if marks[startchar] then
+ if trace_cursive then
+ logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
+ end
+ else
+ local nxt = start.next
+ while not done and nxt and nxt.id == glyph_code and nxt.font == currentfont and nxt.subtype<256 do
+ local nextchar = nxt.char
+ if marks[nextchar] then
+ -- should not happen (maybe warning)
+ nxt = nxt.next
+ else
+ local entryanchors = descriptions[nextchar]
+ if entryanchors then
+ entryanchors = entryanchors.anchors
+ if entryanchors then
+ entryanchors = entryanchors['centry']
+ if entryanchors then
+ local al = anchorlookups[lookupname]
+ for anchor, entry in next, entryanchors do
+ if al[anchor] then
+ local exit = exitanchors[anchor]
+ if exit then
+ local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ if trace_cursive then
+ logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
+ end
+ done = true
+ break
+ end
+ end
+ end
+ end
+ end
+ elseif trace_bugs then
+ -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar))
+ onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
+ end
+ break
+ end
+ end
+ end
+ return head, start, done
+ else
+ if trace_cursive and trace_details then
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
+ end
+ return head, start, false
+ end
+ end
+ return head, start, false
+end
+
+function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
+ -- untested .. needs checking for the new model
+ local startchar = start.char
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local kerns = lookuphash[lookupname]
+ if kerns then
+ kerns = kerns[startchar] -- needed ?
+ if kerns then
+ local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h)
+ end
+ end
+ end
+ return head, start, false
+end
+
+chainmores.gpos_single = chainprocs.gpos_single -- okay?
+
+-- when machines become faster i will make a shared function
+
+function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
+ local snext = start.next
+ if snext then
+ local startchar = start.char
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local kerns = lookuphash[lookupname]
+ if kerns then
+ kerns = kerns[startchar]
+ if kerns then
+ local lookuptype = lookuptypes[lookupname]
+ local prev, done = start, false
+ local factor = tfmdata.parameters.factor
+ while snext and snext.id == glyph_code and snext.font == currentfont and snext.subtype<256 do
+ local nextchar = snext.char
+ local krn = kerns[nextchar]
+ if not krn and marks[nextchar] then
+ prev = snext
+ snext = snext.next
+ else
+ if not krn then
+ -- skip
+ elseif type(krn) == "table" then
+ if lookuptype == "pair" then
+ local a, b = krn[2], krn[3]
+ if a and #a > 0 then
+ local startchar = start.char
+ local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ if b and #b > 0 then
+ local startchar = start.char
+ local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
+ if trace_kerns then
+ logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ else
+ report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname))
+ local a, b = krn[2], krn[6]
+ if a and a ~= 0 then
+ local k = setkern(snext,factor,rlmode,a)
+ if trace_kerns then
+ logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
+ end
+ end
+ if b and b ~= 0 then
+ logwarning("%s: ignoring second kern xoff %s",cref(kind,chainname,chainlookupname),b*factor)
+ end
+ end
+ done = true
+ elseif krn ~= 0 then
+ local k = setkern(snext,factor,rlmode,krn)
+ if trace_kerns then
+ logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
+ end
+ done = true
+ end
+ break
+ end
+ end
+ return head, start, done
+ end
+ end
+ end
+ return head, start, false
+end
+
+chainmores.gpos_pair = chainprocs.gpos_pair -- okay?
+
+-- what pointer to return, spec says stop
+-- to be discussed ... is bidi changer a space?
+-- elseif char == zwnj and sequence[n][32] then -- brrr
+
+-- somehow l or f is global
+-- we don't need to pass the currentcontext, saves a bit
+-- make a slow variant then can be activated but with more tracing
+
+local function show_skip(kind,chainname,char,ck,class)
+ if ck[9] then
+ logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a, %a => %a",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10])
+ else
+ logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a",cref(kind,chainname),gref(char),class,ck[1],ck[2])
+ end
+end
+
+local function normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash)
+ -- local rule, lookuptype, sequence, f, l, lookups = ck[1], ck[2] ,ck[3], ck[4], ck[5], ck[6]
+ local flags = sequence.flags
+ local done = false
+ local skipmark = flags[1]
+ local skipligature = flags[2]
+ local skipbase = flags[3]
+ local someskip = skipmark or skipligature or skipbase -- could be stored in flags for a fast test (hm, flags could be false !)
+ local markclass = sequence.markclass -- todo, first we need a proper test
+ local skipped = false
+ for k=1,#contexts do
+ local match = true
+ local current = start
+ local last = start
+ local ck = contexts[k]
+ local seq = ck[3]
+ local s = #seq
+ -- f..l = mid string
+ if s == 1 then
+ -- never happens
+ match = current.id == glyph_code and current.font == currentfont and current.subtype<256 and seq[1][current.char]
+ else
+ -- maybe we need a better space check (maybe check for glue or category or combination)
+ -- we cannot optimize for n=2 because there can be disc nodes
+ local f, l = ck[4], ck[5]
+ -- current match
+ if f == 1 and f == l then -- current only
+ -- already a hit
+ -- match = true
+ else -- before/current/after | before/current | current/after
+ -- no need to test first hit (to be optimized)
+ if f == l then -- new, else last out of sync (f is > 1)
+ -- match = true
+ else
+ local n = f + 1
+ last = last.next
+ while n <= l do
+ if last then
+ local id = last.id
+ if id == glyph_code then
+ if last.font == currentfont and last.subtype<256 then
+ local char = last.char
+ local ccd = descriptions[char]
+ if ccd then
+ local class = ccd.class
+ if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
+ skipped = true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ last = last.next
+ elseif seq[n][char] then
+ if n < l then
+ last = last.next
+ end
+ n = n + 1
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ elseif id == disc_code then
+ last = last.next
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ end
+ end
+ end
+ -- before
+ if match and f > 1 then
+ local prev = start.prev
+ if prev then
+ local n = f-1
+ while n >= 1 do
+ if prev then
+ local id = prev.id
+ if id == glyph_code then
+ if prev.font == currentfont and prev.subtype<256 then -- normal char
+ local char = prev.char
+ local ccd = descriptions[char]
+ if ccd then
+ local class = ccd.class
+ if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
+ skipped = true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ elseif seq[n][char] then
+ n = n -1
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ elseif id == disc_code then
+ -- skip 'm
+ elseif seq[n][32] then
+ n = n -1
+ else
+ match = false
+ break
+ end
+ prev = prev.prev
+ elseif seq[n][32] then -- somewhat special, as zapfino can have many preceding spaces
+ n = n -1
+ else
+ match = false
+ break
+ end
+ end
+ elseif f == 2 then
+ match = seq[1][32]
+ else
+ for n=f-1,1 do
+ if not seq[n][32] then
+ match = false
+ break
+ end
+ end
+ end
+ end
+ -- after
+ if match and s > l then
+ local current = last and last.next
+ if current then
+ -- removed optimization for s-l == 1, we have to deal with marks anyway
+ local n = l + 1
+ while n <= s do
+ if current then
+ local id = current.id
+ if id == glyph_code then
+ if current.font == currentfont and current.subtype<256 then -- normal char
+ local char = current.char
+ local ccd = descriptions[char]
+ if ccd then
+ local class = ccd.class
+ if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
+ skipped = true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ elseif seq[n][char] then
+ n = n + 1
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ elseif id == disc_code then
+ -- skip 'm
+ elseif seq[n][32] then -- brrr
+ n = n + 1
+ else
+ match = false
+ break
+ end
+ current = current.next
+ elseif seq[n][32] then
+ n = n + 1
+ else
+ match = false
+ break
+ end
+ end
+ elseif s-l == 1 then
+ match = seq[s][32]
+ else
+ for n=l+1,s do
+ if not seq[n][32] then
+ match = false
+ break
+ end
+ end
+ end
+ end
+ end
+ if match then
+ -- ck == currentcontext
+ if trace_contexts then
+ local rule, lookuptype, f, l = ck[1], ck[2], ck[4], ck[5]
+ local char = start.char
+ if ck[9] then
+ logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a, %a => %a",
+ cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10])
+ else
+ logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a",
+ cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype)
+ end
+ end
+ local chainlookups = ck[6]
+ if chainlookups then
+ local nofchainlookups = #chainlookups
+ -- we can speed this up if needed
+ if nofchainlookups == 1 then
+ local chainlookupname = chainlookups[1]
+ local chainlookup = lookuptable[chainlookupname]
+ if chainlookup then
+ local cp = chainprocs[chainlookup.type]
+ if cp then
+ local ok
+ head, start, ok = cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
+ if ok then
+ done = true
+ end
+ else
+ logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
+ end
+ else -- shouldn't happen
+ logprocess("%s is not yet supported",cref(kind,chainname,chainlookupname))
+ end
+ else
+ local i = 1
+ repeat
+ if skipped then
+ while true do
+ local char = start.char
+ local ccd = descriptions[char]
+ if ccd then
+ local class = ccd.class
+ if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
+ start = start.next
+ else
+ break
+ end
+ else
+ break
+ end
+ end
+ end
+ local chainlookupname = chainlookups[i]
+ local chainlookup = lookuptable[chainlookupname]
+ if not chainlookup then
+ -- okay, n matches, < n replacements
+ i = i + 1
+ else
+ local cp = chainmores[chainlookup.type]
+ if not cp then
+ -- actually an error
+ logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
+ i = i + 1
+ else
+ local ok, n
+ head, start, ok, n = cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence)
+ -- messy since last can be changed !
+ if ok then
+ done = true
+ -- skip next one(s) if ligature
+ i = i + (n or 1)
+ else
+ i = i + 1
+ end
+ end
+ end
+ if start then
+ start = start.next
+ else
+ -- weird
+ end
+ until i > nofchainlookups
+ end
+ else
+ local replacements = ck[7]
+ if replacements then
+ head, start, done = chainprocs.reversesub(head,start,last,kind,chainname,ck,lookuphash,replacements) -- sequence
+ else
+ done = true -- can be meant to be skipped
+ if trace_contexts then
+ logprocess("%s: skipping match",cref(kind,chainname))
+ end
+ end
+ end
+ end
+ end
+ return head, start, done
+end
+
+-- Because we want to keep this elsewhere (an because speed is less an issue) we
+-- pass the font id so that the verbose variant can access the relevant helper tables.
+
+local verbose_handle_contextchain = function(font,...)
+ logwarning("no verbose handler installed, reverting to 'normal'")
+ otf.setcontextchain()
+ return normal_handle_contextchain(...)
+end
+
+otf.chainhandlers = {
+ normal = normal_handle_contextchain,
+ verbose = verbose_handle_contextchain,
+}
+
+function otf.setcontextchain(method)
+ if not method or method == "normal" or not otf.chainhandlers[method] then
+ if handlers.contextchain then -- no need for a message while making the format
+ logwarning("installing normal contextchain handler")
+ end
+ handlers.contextchain = normal_handle_contextchain
+ else
+ logwarning("installing contextchain handler %a",method)
+ local handler = otf.chainhandlers[method]
+ handlers.contextchain = function(...)
+ return handler(currentfont,...) -- hm, get rid of ...
+ end
+ end
+ handlers.gsub_context = handlers.contextchain
+ handlers.gsub_contextchain = handlers.contextchain
+ handlers.gsub_reversecontextchain = handlers.contextchain
+ handlers.gpos_contextchain = handlers.contextchain
+ handlers.gpos_context = handlers.contextchain
+end
+
+otf.setcontextchain()
+
+local missing = { } -- we only report once
+
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_process(...)
+end
+
+local logwarning = report_process
+
+local function report_missing_cache(typ,lookup)
+ local f = missing[currentfont] if not f then f = { } missing[currentfont] = f end
+ local t = f[typ] if not t then t = { } f[typ] = t end
+ if not t[lookup] then
+ t[lookup] = true
+ logwarning("missing cache for lookup %a, type %a, font %a, name %a",lookup,typ,currentfont,tfmdata.properties.fullname)
+ end
+end
+
+local resolved = { } -- we only resolve a font,script,language pair once
+
+-- todo: pass all these 'locals' in a table
+
+local lookuphashes = { }
+
+setmetatableindex(lookuphashes, function(t,font)
+ local lookuphash = fontdata[font].resources.lookuphash
+ if not lookuphash or not next(lookuphash) then
+ lookuphash = false
+ end
+ t[font] = lookuphash
+ return lookuphash
+end)
+
+-- fonts.hashes.lookups = lookuphashes
+
+local autofeatures = fonts.analyzers.features -- was: constants
+
+local function initialize(sequence,script,language,enabled)
+ local features = sequence.features
+ if features then
+ for kind, scripts in next, features do
+ local valid = enabled[kind]
+ if valid then
+ local languages = scripts[script] or scripts[wildcard]
+ if languages and (languages[language] or languages[wildcard]) then
+ return { valid, autofeatures[kind] or false, sequence.chain or 0, kind, sequence }
+ end
+ end
+ end
+ end
+ return false
+end
+
+function otf.dataset(tfmdata,font) -- generic variant, overloaded in context
+ local shared = tfmdata.shared
+ local properties = tfmdata.properties
+ local language = properties.language or "dflt"
+ local script = properties.script or "dflt"
+ local enabled = shared.features
+ local res = resolved[font]
+ if not res then
+ res = { }
+ resolved[font] = res
+ end
+ local rs = res[script]
+ if not rs then
+ rs = { }
+ res[script] = rs
+ end
+ local rl = rs[language]
+ if not rl then
+ rl = {
+ -- indexed but we can also add specific data by key
+ }
+ rs[language] = rl
+ local sequences = tfmdata.resources.sequences
+-- setmetatableindex(rl, function(t,k)
+-- if type(k) == "number" then
+-- local v = enabled and initialize(sequences[k],script,language,enabled)
+-- t[k] = v
+-- return v
+-- end
+-- end)
+for s=1,#sequences do
+ local v = enabled and initialize(sequences[s],script,language,enabled)
+ if v then
+ rl[#rl+1] = v
+ end
+end
+ end
+ return rl
+end
+
+-- elseif id == glue_code then
+-- if p[5] then -- chain
+-- local pc = pp[32]
+-- if pc then
+-- start, ok = start, false -- p[1](start,kind,p[2],pc,p[3],p[4])
+-- if ok then
+-- done = true
+-- end
+-- if start then start = start.next end
+-- else
+-- start = start.next
+-- end
+-- else
+-- start = start.next
+-- end
+
+-- there will be a new direction parser (pre-parsed etc)
+
+-- less bytecode: 290 -> 254
+--
+-- attr = attr or false
+--
+-- local a = getattr(start,0)
+-- if (a == attr and (not attribute or getattr(start,a_state) == attribute)) or (not attribute or getattr(start,a_state) == attribute) then
+-- -- the action
+-- end
+
+local function featuresprocessor(head,font,attr)
+
+ local lookuphash = lookuphashes[font] -- we can also check sequences here
+
+ if not lookuphash then
+ return head, false
+ end
+
+ if trace_steps then
+ checkstep(head)
+ end
+
+ tfmdata = fontdata[font]
+ descriptions = tfmdata.descriptions
+ characters = tfmdata.characters
+ resources = tfmdata.resources
+
+ marks = resources.marks
+ anchorlookups = resources.lookup_to_anchor
+ lookuptable = resources.lookups
+ lookuptypes = resources.lookuptypes
+
+ currentfont = font
+ rlmode = 0
+
+ local sequences = resources.sequences
+ local done = false
+ local datasets = otf.dataset(tfmdata,font,attr)
+
+ local dirstack = { } -- could move outside function
+
+ -- We could work on sub start-stop ranges instead but I wonder if there is that
+ -- much speed gain (experiments showed that it made not much sense) and we need
+ -- to keep track of directions anyway. Also at some point I want to play with
+ -- font interactions and then we do need the full sweeps.
+
+ -- Keeping track of the headnode is needed for devanagari (I generalized it a bit
+ -- so that multiple cases are also covered.)
+
+ for s=1,#datasets do
+ local dataset = datasets[s]
+ featurevalue = dataset[1] -- todo: pass to function instead of using a global
+
+ local sequence = dataset[5] -- sequences[s] -- also dataset[5]
+ local rlparmode = 0
+ local topstack = 0
+ local success = false
+ local attribute = dataset[2]
+ local chain = dataset[3] -- sequence.chain or 0
+ local typ = sequence.type
+ local subtables = sequence.subtables
+ if chain < 0 then
+ -- this is a limited case, no special treatments like 'init' etc
+ local handler = handlers[typ]
+ -- we need to get rid of this slide! probably no longer needed in latest luatex
+ local start = find_node_tail(head) -- slow (we can store tail because there's always a skip at the end): todo
+ while start do
+ local id = start.id
+ if id == glyph_code then
+ if start.font == font and start.subtype<256 then
+ local a = start[0]
+ if a then
+ a = a == attr
+ else
+ a = true
+ end
+ if a then
+ for i=1,#subtables do
+ local lookupname = subtables[i]
+ local lookupcache = lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch = lookupcache[start.char]
+ if lookupmatch then
+ head, start, success = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if success then
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ if start then start = start.prev end
+ else
+ start = start.prev
+ end
+ else
+ start = start.prev
+ end
+ else
+ start = start.prev
+ end
+ end
+ else
+ local handler = handlers[typ]
+ local ns = #subtables
+ local start = head -- local ?
+ rlmode = 0 -- to be checked ?
+ if ns == 1 then -- happens often
+ local lookupname = subtables[1]
+ local lookupcache = lookuphash[lookupname]
+ if not lookupcache then -- also check for empty cache
+ report_missing_cache(typ,lookupname)
+ else
+
+ local function subrun(start)
+ -- mostly for gsub, gpos would demand a more clever approach
+ local head = start
+ local done = false
+ while start do
+ local id = start.id
+ if id == glyph_code and start.font == font and start.subtype <256 then
+ local a = start[0]
+ if a then
+ a = (a == attr) and (not attribute or start[a_state] == attribute)
+ else
+ a = not attribute or start[a_state] == attribute
+ end
+ if a then
+ local lookupmatch = lookupcache[start.char]
+ if lookupmatch then
+ -- sequence kan weg
+ local ok
+ head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ done = true
+ end
+ end
+ if start then start = start.next end
+ else
+ start = start.next
+ end
+ else
+ start = start.next
+ end
+ end
+ if done then
+ success = true
+ return head
+ end
+ end
+
+ local function kerndisc(disc) -- we can assume that prev and next are glyphs
+ local prev = disc.prev
+ local next = disc.next
+ if prev and next then
+ prev.next = next
+ -- next.prev = prev
+ local a = prev[0]
+ if a then
+ a = (a == attr) and (not attribute or prev[a_state] == attribute)
+ else
+ a = not attribute or prev[a_state] == attribute
+ end
+ if a then
+ local lookupmatch = lookupcache[prev.char]
+ if lookupmatch then
+ -- sequence kan weg
+ local h, d, ok = handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ done = true
+ success = true
+ end
+ end
+ end
+ prev.next = disc
+ -- next.prev = disc
+ end
+ return next
+ end
+
+ while start do
+ local id = start.id
+ if id == glyph_code then
+ if start.font == font and start.subtype<256 then
+ local a = start[0]
+ if a then
+ a = (a == attr) and (not attribute or start[a_state] == attribute)
+ else
+ a = not attribute or start[a_state] == attribute
+ end
+ if a then
+ local lookupmatch = lookupcache[start.char]
+ if lookupmatch then
+ -- sequence kan weg
+ local ok
+ head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ success = true
+ end
+ end
+ if start then start = start.next end
+ else
+ start = start.next
+ end
+ else
+ start = start.next
+ end
+ elseif id == disc_code then
+ -- mostly for gsub
+ if start.subtype == discretionary_code then
+ local pre = start.pre
+ if pre then
+ local new = subrun(pre)
+ if new then start.pre = new end
+ end
+ local post = start.post
+ if post then
+ local new = subrun(post)
+ if new then start.post = new end
+ end
+ local replace = start.replace
+ if replace then
+ local new = subrun(replace)
+ if new then start.replace = new end
+ end
+elseif typ == "gpos_single" or typ == "gpos_pair" then
+ kerndisc(start)
+ end
+ start = start.next
+ elseif id == whatsit_code then -- will be function
+ local subtype = start.subtype
+ if subtype == dir_code then
+ local dir = start.dir
+ if dir == "+TRT" or dir == "+TLT" then
+ topstack = topstack + 1
+ dirstack[topstack] = dir
+ elseif dir == "-TRT" or dir == "-TLT" then
+ topstack = topstack - 1
+ end
+ local newdir = dirstack[topstack]
+ if newdir == "+TRT" then
+ rlmode = -1
+ elseif newdir == "+TLT" then
+ rlmode = 1
+ else
+ rlmode = rlparmode
+ end
+ if trace_directions then
+ report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
+ end
+ elseif subtype == localpar_code then
+ local dir = start.dir
+ if dir == "TRT" then
+ rlparmode = -1
+ elseif dir == "TLT" then
+ rlparmode = 1
+ else
+ rlparmode = 0
+ end
+ -- one might wonder if the par dir should be looked at, so we might as well drop the next line
+ rlmode = rlparmode
+ if trace_directions then
+ report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
+ end
+ end
+ start = start.next
+ elseif id == math_code then
+ start = end_of_math(start).next
+ else
+ start = start.next
+ end
+ end
+ end
+ else
+
+ local function subrun(start)
+ -- mostly for gsub, gpos would demand a more clever approach
+ local head = start
+ local done = false
+ while start do
+ local id = start.id
+ if id == glyph_code and start.id == font and start.subtype <256 then
+ local a = start[0]
+ if a then
+ a = (a == attr) and (not attribute or start[a_state] == attribute)
+ else
+ a = not attribute or start[a_state] == attribute
+ end
+ if a then
+ for i=1,ns do
+ local lookupname = subtables[i]
+ local lookupcache = lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch = lookupcache[start.char]
+ if lookupmatch then
+ -- we could move all code inline but that makes things even more unreadable
+ local ok
+ head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ done = true
+ break
+ elseif not start then
+ -- don't ask why ... shouldn't happen
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ if start then start = start.next end
+ else
+ start = start.next
+ end
+ else
+ start = start.next
+ end
+ end
+ if done then
+ success = true
+ return head
+ end
+ end
+
+ local function kerndisc(disc) -- we can assume that prev and next are glyphs
+ local prev = disc.prev
+ local next = disc.next
+ if prev and next then
+ prev.next = next
+ -- next.prev = prev
+ local a = prev[0]
+ if a then
+ a = (a == attr) and (not attribute or prev[a_state] == attribute)
+ else
+ a = not attribute or prev[a_state] == attribute
+ end
+ if a then
+ for i=1,ns do
+ local lookupname = subtables[i]
+ local lookupcache = lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch = lookupcache[prev.char]
+ if lookupmatch then
+ -- we could move all code inline but that makes things even more unreadable
+ local h, d, ok = handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ done = true
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ end
+ prev.next = disc
+ -- next.prev = disc
+ end
+ return next
+ end
+
+ while start do
+ local id = start.id
+ if id == glyph_code then
+ if start.font == font and start.subtype<256 then
+ local a = start[0]
+ if a then
+ a = (a == attr) and (not attribute or start[a_state] == attribute)
+ else
+ a = not attribute or start[a_state] == attribute
+ end
+ if a then
+ for i=1,ns do
+ local lookupname = subtables[i]
+ local lookupcache = lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch = lookupcache[start.char]
+ if lookupmatch then
+ -- we could move all code inline but that makes things even more unreadable
+ local ok
+ head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ success = true
+ break
+ elseif not start then
+ -- don't ask why ... shouldn't happen
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ if start then start = start.next end
+ else
+ start = start.next
+ end
+ else
+ start = start.next
+ end
+ elseif id == disc_code then
+ -- mostly for gsub
+ if start.subtype == discretionary_code then
+ local pre = start.pre
+ if pre then
+ local new = subrun(pre)
+ if new then start.pre = new end
+ end
+ local post = start.post
+ if post then
+ local new = subrun(post)
+ if new then start.post = new end
+ end
+ local replace = start.replace
+ if replace then
+ local new = subrun(replace)
+ if new then start.replace = new end
+ end
+elseif typ == "gpos_single" or typ == "gpos_pair" then
+ kerndisc(start)
+ end
+ start = start.next
+ elseif id == whatsit_code then
+ local subtype = start.subtype
+ if subtype == dir_code then
+ local dir = start.dir
+ if dir == "+TRT" or dir == "+TLT" then
+ topstack = topstack + 1
+ dirstack[topstack] = dir
+ elseif dir == "-TRT" or dir == "-TLT" then
+ topstack = topstack - 1
+ end
+ local newdir = dirstack[topstack]
+ if newdir == "+TRT" then
+ rlmode = -1
+ elseif newdir == "+TLT" then
+ rlmode = 1
+ else
+ rlmode = rlparmode
+ end
+ if trace_directions then
+ report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
+ end
+ elseif subtype == localpar_code then
+ local dir = start.dir
+ if dir == "TRT" then
+ rlparmode = -1
+ elseif dir == "TLT" then
+ rlparmode = 1
+ else
+ rlparmode = 0
+ end
+ rlmode = rlparmode
+ if trace_directions then
+ report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
+ end
+ end
+ start = start.next
+ elseif id == math_code then
+ start = end_of_math(start).next
+ else
+ start = start.next
+ end
+ end
+ end
+ end
+ if success then
+ done = true
+ end
+ if trace_steps then -- ?
+ registerstep(head)
+ end
+ end
+ return head, done
+end
+
+local function generic(lookupdata,lookupname,unicode,lookuphash)
+ local target = lookuphash[lookupname]
+ if target then
+ target[unicode] = lookupdata
+ else
+ lookuphash[lookupname] = { [unicode] = lookupdata }
+ end
+end
+
+local action = {
+
+ substitution = generic,
+ multiple = generic,
+ alternate = generic,
+ position = generic,
+
+ ligature = function(lookupdata,lookupname,unicode,lookuphash)
+ local target = lookuphash[lookupname]
+ if not target then
+ target = { }
+ lookuphash[lookupname] = target
+ end
+ for i=1,#lookupdata do
+ local li = lookupdata[i]
+ local tu = target[li]
+ if not tu then
+ tu = { }
+ target[li] = tu
+ end
+ target = tu
+ end
+ target.ligature = unicode
+ end,
+
+ pair = function(lookupdata,lookupname,unicode,lookuphash)
+ local target = lookuphash[lookupname]
+ if not target then
+ target = { }
+ lookuphash[lookupname] = target
+ end
+ local others = target[unicode]
+ local paired = lookupdata[1]
+ if others then
+ others[paired] = lookupdata
+ else
+ others = { [paired] = lookupdata }
+ target[unicode] = others
+ end
+ end,
+
+}
+
+local function prepare_lookups(tfmdata)
+
+ local rawdata = tfmdata.shared.rawdata
+ local resources = rawdata.resources
+ local lookuphash = resources.lookuphash
+ local anchor_to_lookup = resources.anchor_to_lookup
+ local lookup_to_anchor = resources.lookup_to_anchor
+ local lookuptypes = resources.lookuptypes
+ local characters = tfmdata.characters
+ local descriptions = tfmdata.descriptions
+
+ -- we cannot free the entries in the descriptions as sometimes we access
+ -- then directly (for instance anchors) ... selectively freeing does save
+ -- much memory as it's only a reference to a table and the slot in the
+ -- description hash is not freed anyway
+
+ for unicode, character in next, characters do -- we cannot loop over descriptions !
+
+ local description = descriptions[unicode]
+
+ if description then
+
+ local lookups = description.slookups
+ if lookups then
+ for lookupname, lookupdata in next, lookups do
+ action[lookuptypes[lookupname]](lookupdata,lookupname,unicode,lookuphash)
+ end
+ end
+
+ local lookups = description.mlookups
+ if lookups then
+ for lookupname, lookuplist in next, lookups do
+ local lookuptype = lookuptypes[lookupname]
+ for l=1,#lookuplist do
+ local lookupdata = lookuplist[l]
+ action[lookuptype](lookupdata,lookupname,unicode,lookuphash)
+ end
+ end
+ end
+
+ local list = description.kerns
+ if list then
+ for lookup, krn in next, list do -- ref to glyph, saves lookup
+ local target = lookuphash[lookup]
+ if target then
+ target[unicode] = krn
+ else
+ lookuphash[lookup] = { [unicode] = krn }
+ end
+ end
+ end
+
+ local list = description.anchors
+ if list then
+ for typ, anchors in next, list do -- types
+ if typ == "mark" or typ == "cexit" then -- or entry?
+ for name, anchor in next, anchors do
+ local lookups = anchor_to_lookup[name]
+ if lookups then
+ for lookup, _ in next, lookups do
+ local target = lookuphash[lookup]
+ if target then
+ target[unicode] = anchors
+ else
+ lookuphash[lookup] = { [unicode] = anchors }
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+
+ end
+
+ end
+
+end
+
+local function split(replacement,original)
+ local result = { }
+ for i=1,#replacement do
+ result[original[i]] = replacement[i]
+ end
+ return result
+end
+
+local valid = {
+ coverage = { chainsub = true, chainpos = true, contextsub = true },
+ reversecoverage = { reversesub = true },
+ glyphs = { chainsub = true, chainpos = true },
+}
+
+local function prepare_contextchains(tfmdata)
+ local rawdata = tfmdata.shared.rawdata
+ local resources = rawdata.resources
+ local lookuphash = resources.lookuphash
+ local lookups = rawdata.lookups
+ if lookups then
+ for lookupname, lookupdata in next, rawdata.lookups do
+ local lookuptype = lookupdata.type
+ if lookuptype then
+ local rules = lookupdata.rules
+ if rules then
+ local format = lookupdata.format
+ local validformat = valid[format]
+ if not validformat then
+ report_prepare("unsupported format %a",format)
+ elseif not validformat[lookuptype] then
+ -- todo: dejavu-serif has one (but i need to see what use it has)
+ report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookupname)
+ else
+ local contexts = lookuphash[lookupname]
+ if not contexts then
+ contexts = { }
+ lookuphash[lookupname] = contexts
+ end
+ local t, nt = { }, 0
+ for nofrules=1,#rules do
+ local rule = rules[nofrules]
+ local current = rule.current
+ local before = rule.before
+ local after = rule.after
+ local replacements = rule.replacements
+ local sequence = { }
+ local nofsequences = 0
+ -- Eventually we can store start, stop and sequence in the cached file
+ -- but then less sharing takes place so best not do that without a lot
+ -- of profiling so let's forget about it.
+ if before then
+ for n=1,#before do
+ nofsequences = nofsequences + 1
+ sequence[nofsequences] = before[n]
+ end
+ end
+ local start = nofsequences + 1
+ for n=1,#current do
+ nofsequences = nofsequences + 1
+ sequence[nofsequences] = current[n]
+ end
+ local stop = nofsequences
+ if after then
+ for n=1,#after do
+ nofsequences = nofsequences + 1
+ sequence[nofsequences] = after[n]
+ end
+ end
+ if sequence[1] then
+ -- Replacements only happen with reverse lookups as they are single only. We
+ -- could pack them into current (replacement value instead of true) and then
+ -- use sequence[start] instead but it's somewhat ugly.
+ nt = nt + 1
+ t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups, replacements }
+ for unic, _ in next, sequence[start] do
+ local cu = contexts[unic]
+ if not cu then
+ contexts[unic] = t
+ end
+ end
+ end
+ end
+ end
+ else
+ -- no rules
+ end
+ else
+ report_prepare("missing lookuptype for lookupname %a",lookupname)
+ end
+ end
+ end
+end
+
+-- we can consider lookuphash == false (initialized but empty) vs lookuphash == table
+
+local function featuresinitializer(tfmdata,value)
+ if true then -- value then
+ -- beware we need to use the topmost properties table
+ local rawdata = tfmdata.shared.rawdata
+ local properties = rawdata.properties
+ if not properties.initialized then
+ local starttime = trace_preparing and os.clock()
+ local resources = rawdata.resources
+ resources.lookuphash = resources.lookuphash or { }
+ prepare_contextchains(tfmdata)
+ prepare_lookups(tfmdata)
+ properties.initialized = true
+ if trace_preparing then
+ report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,tfmdata.properties.fullname)
+ end
+ end
+ end
+end
+
+registerotffeature {
+ name = "features",
+ description = "features",
+ default = true,
+ initializers = {
+ position = 1,
+ node = featuresinitializer,
+ },
+ processors = {
+ node = featuresprocessor,
+ }
+}
+
+-- This can be used for extra handlers, but should be used with care!
+
+otf.handlers = handlers
diff --git a/src/luaotfload-fonts-tfm.lua b/src/luaotfload-fonts-tfm.lua
new file mode 100644
index 0000000..b9bb1bd
--- /dev/null
+++ b/src/luaotfload-fonts-tfm.lua
@@ -0,0 +1,38 @@
+if not modules then modules = { } end modules ['luatex-fonts-tfm'] = {
+ version = 1.001,
+ comment = "companion to luatex-*.tex",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+
+local fonts = fonts
+local tfm = { }
+fonts.handlers.tfm = tfm
+fonts.formats.tfm = "type1" -- we need to have at least a value here
+
+function fonts.readers.tfm(specification)
+ local fullname = specification.filename or ""
+ if fullname == "" then
+ local forced = specification.forced or ""
+ if forced ~= "" then
+ fullname = specification.name .. "." .. forced
+ else
+ fullname = specification.name
+ end
+ end
+ local foundname = resolvers.findbinfile(fullname, 'tfm') or ""
+ if foundname == "" then
+ foundname = resolvers.findbinfile(fullname, 'ofm') or ""
+ end
+ if foundname ~= "" then
+ specification.filename = foundname
+ specification.format = "ofm"
+ return font.read_tfm(specification.filename,specification.size)
+ end
+end
diff --git a/src/luaotfload-letterspace.lua b/src/luaotfload-letterspace.lua
new file mode 100644
index 0000000..20f29f5
--- /dev/null
+++ b/src/luaotfload-letterspace.lua
@@ -0,0 +1,544 @@
+if not modules then modules = { } end modules ['letterspace'] = {
+ version = "2.5",
+ comment = "companion to luaotfload-main.lua",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL; adapted by Philipp Gesang",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local log = luaotfload.log
+local report = log.report
+
+local getmetatable = getmetatable
+local require = require
+local setmetatable = setmetatable
+local tonumber = tonumber
+
+local next = next
+local nodes, node, fonts = nodes, node, fonts
+
+local find_node_tail = node.tail or node.slide
+local free_node = node.free
+local copy_node = node.copy
+local new_node = node.new
+local insert_node_before = node.insert_before
+
+local nodepool = nodes.pool
+
+local new_kern = nodepool.kern
+local new_glue = nodepool.glue
+
+local nodecodes = nodes.nodecodes
+
+local glyph_code = nodecodes.glyph
+local kern_code = nodecodes.kern
+local disc_code = nodecodes.disc
+local math_code = nodecodes.math
+
+local fonthashes = fonts.hashes
+local chardata = fonthashes.characters
+local quaddata = fonthashes.quads
+local otffeatures = fonts.constructors.newfeatures "otf"
+
+--[[doc--
+
+ Since the letterspacing method was derived initially from Context’s
+ typo-krn.lua we keep the sub-namespace “letterspace” inside the
+ “luaotfload” table.
+
+--doc]]--
+
+luaotfload.letterspace = luaotfload.letterspace or { }
+local letterspace = luaotfload.letterspace
+
+letterspace.keepligature = false
+letterspace.keeptogether = false
+
+---=================================================================---
+--- preliminary definitions
+---=================================================================---
+-- We set up a layer emulating some Context internals that are needed
+-- for the letterspacing callback.
+-----------------------------------------------------------------------
+--- node-ini
+-----------------------------------------------------------------------
+
+local bothways = function (t) return table.swapped (t, t) end
+local kerncodes = bothways { [0] = "fontkern"
+ , [1] = "userkern"
+ , [2] = "accentkern"
+ }
+
+kerncodes.kerning = kerncodes.fontkern --- idiosyncrasy
+local kerning_code = kerncodes.kerning
+local userkern_code = kerncodes.userkern
+
+
+-----------------------------------------------------------------------
+--- node-res
+-----------------------------------------------------------------------
+
+nodes.pool = nodes.pool or { }
+local pool = nodes.pool
+
+local kern = new_node ("kern", kerncodes.userkern)
+local glue_spec = new_node "glue_spec"
+
+pool.kern = function (k)
+ local n = copy_node (kern)
+ n.kern = k
+ return n
+end
+
+pool.glue = function (width, stretch, shrink,
+ stretch_order, shrink_order)
+ local n = new_node"glue"
+ if not width then
+ -- no spec
+ elseif width == false or tonumber(width) then
+ local s = copy_node(glue_spec)
+ if width then s.width = width end
+ if stretch then s.stretch = stretch end
+ if shrink then s.shrink = shrink end
+ if stretch_order then s.stretch_order = stretch_order end
+ if shrink_order then s.shrink_order = shrink_order end
+ n.spec = s
+ else
+ -- shared
+ n.spec = copy_node(width)
+ end
+ return n
+end
+
+-----------------------------------------------------------------------
+--- font-hsh
+-----------------------------------------------------------------------
+--- some initialization resembling font-hsh
+local fonthashes = fonts.hashes
+local identifiers = fonthashes.identifiers --- was: fontdata
+local chardata = fonthashes.characters
+local quaddata = fonthashes.quads
+local parameters = fonthashes.parameters
+
+--- ('a, 'a) hash -> (('a, 'a) hash -> 'a -> 'a) -> ('a, 'a) hash
+local setmetatableindex = function (t, f)
+ local mt = getmetatable(t)
+ if mt then
+ mt.__index = f
+ else
+ setmetatable(t, { __index = f })
+ end
+ return t
+end
+
+if not parameters then
+ parameters = { }
+ setmetatableindex(parameters, function(t, k)
+ if k == true then
+ return parameters[currentfont()]
+ else
+ local parameters = identifiers[k].parameters
+ t[k] = parameters
+ return parameters
+ end
+ end)
+ --fonthashes.parameters = parameters
+end
+
+if not chardata then
+ chardata = { }
+ setmetatableindex(chardata, function(t, k)
+ if k == true then
+ return chardata[currentfont()]
+ else
+ local tfmdata = identifiers[k]
+ if not tfmdata then --- unsafe
+ tfmdata = font.fonts[k]
+ end
+ if tfmdata then
+ local characters = tfmdata.characters
+ t[k] = characters
+ return characters
+ end
+ end
+ end)
+ fonthashes.characters = chardata
+end
+
+if not quaddata then
+ quaddata = { }
+ setmetatableindex(quaddata, function(t, k)
+ if k == true then
+ return quads[currentfont()]
+ else
+ local parameters = parameters[k]
+ local quad = parameters and parameters.quad or 0
+ t[k] = quad
+ return quad
+ end
+ end)
+ --fonthashes.quads = quaddata
+end
+
+---=================================================================---
+--- character kerning functionality
+---=================================================================---
+
+local kern_injector = function (fillup, kern)
+ if fillup then
+ local g = new_glue(kern)
+ local s = g.spec
+ s.stretch = kern
+ s.stretch_order = 1
+ return g
+ else
+ return new_kern(kern)
+ end
+end
+
+--[[doc--
+
+ Caveat lector.
+ This is an adaptation of the Context character kerning mechanism
+ that emulates XeTeX-style fontwise letterspacing. Note that in its
+ present state it is far inferior to the original, which is
+ attribute-based and ignores font-boundaries. Nevertheless, due to
+ popular demand the following callback has been added.
+
+--doc]]--
+
+local kernfactors = { } --- fontid -> factor
+
+local kerncharacters
+kerncharacters = function (head)
+ local start, done = head, false
+ local lastfont = nil
+ local keepligature = letterspace.keepligature --- function
+ local keeptogether = letterspace.keeptogether --- function
+ local fillup = false
+
+ local identifiers = fonthashes.identifiers
+ local kernfactors = kernfactors
+
+ local firstkern = true
+
+ while start do
+ local id = start.id
+ if id == glyph_code then
+
+ --- 1) look up kern factor (slow, but cached rudimentarily)
+ local krn
+ local fontid = start.font
+ do
+ krn = kernfactors[fontid]
+ if not krn then
+ local tfmdata = identifiers[fontid]
+ if not tfmdata then -- unsafe
+ tfmdata = font.fonts[fontid]
+ end
+ if tfmdata then
+ fontproperties = tfmdata.properties
+ if fontproperties then
+ krn = fontproperties.kerncharacters
+ end
+ end
+ kernfactors[fontid] = krn
+ end
+ if not krn or krn == 0 then
+ firstkern = true
+ goto nextnode
+ elseif firstkern then
+ firstkern = false
+ if (id ~= disc_code) and (not start.components) then
+ --- not a ligature, skip node
+ goto nextnode
+ end
+ end
+ end
+
+ if krn == "max" then
+ krn = .25
+ fillup = true
+ else
+ fillup = false
+ end
+
+ lastfont = fontid
+
+ --- 2) resolve ligatures
+ local c = start.components
+
+ if c then
+ if keepligature and keepligature(start) then
+ -- keep 'm
+ else
+ --- c = kerncharacters (c) --> taken care of after replacing
+ local s = start
+ local p, n = s.prev, s.next
+ local tail = find_node_tail(c)
+ if p then
+ p.next = c
+ c.prev = p
+ else
+ head = c
+ end
+ if n then
+ n.prev = tail
+ end
+ tail.next = n
+ start = c
+ s.components = nil
+ -- we now leak nodes !
+ -- free_node(s)
+ done = true
+ end
+ end -- kern ligature
+
+ --- 3) apply the extra kerning
+ local prev = start.prev
+ if prev then
+ local pid = prev.id
+
+ if not pid then
+ -- nothing
+
+ elseif pid == kern_code then
+ if prev.subtype == kerning_code --- context does this by means of an
+ or prev.subtype == userkern_code --- attribute; we may need a test
+ then
+ if keeptogether and prev.prev.id == glyph_code and keeptogether(prev.prev,start) then
+ -- keep
+ else
+ prev.subtype = userkern_code
+ prev.kern = prev.kern + quaddata[lastfont]*krn -- here
+ done = true
+ end
+ end
+
+ elseif pid == glyph_code then
+ if prev.font == lastfont then
+ local prevchar, lastchar = prev.char, start.char
+ if keeptogether and keeptogether(prev,start) then
+ -- keep 'm
+ elseif identifiers[lastfont] then
+ local kerns = chardata[lastfont][prevchar].kerns
+ local kern = kerns and kerns[lastchar] or 0
+ krn = kern + quaddata[lastfont]*krn -- here
+ insert_node_before(head,start,kern_injector(fillup,krn))
+ done = true
+ end
+ else
+ krn = quaddata[lastfont]*krn -- here
+ insert_node_before(head,start,kern_injector(fillup,krn))
+ done = true
+ end
+
+ elseif pid == disc_code then
+ -- a bit too complicated, we can best not copy and just calculate
+ -- but we could have multiple glyphs involved so ...
+ local disc = prev -- disc
+ local pre, post, replace = disc.pre, disc.post, disc.replace
+ local prv, nxt = disc.prev, disc.next
+
+ if pre and prv then -- must pair with start.prev
+ -- this one happens in most cases
+ local before = copy_node(prv)
+ pre.prev = before
+ before.next = pre
+ before.prev = nil
+ pre = kerncharacters (before)
+ pre = pre.next
+ pre.prev = nil
+ disc.pre = pre
+ free_node(before)
+ end
+
+ if post and nxt then -- must pair with start
+ local after = copy_node(nxt)
+ local tail = find_node_tail(post)
+ tail.next = after
+ after.prev = tail
+ after.next = nil
+ post = kerncharacters (post)
+ tail.next = nil
+ disc.post = post
+ free_node(after)
+ end
+
+ if replace and prv and nxt then -- must pair with start and start.prev
+ local before = copy_node(prv)
+ local after = copy_node(nxt)
+ local tail = find_node_tail(replace)
+ replace.prev = before
+ before.next = replace
+ before.prev = nil
+ tail.next = after
+ after.prev = tail
+ after.next = nil
+ replace = kerncharacters (before)
+ replace = replace.next
+ replace.prev = nil
+ after.prev.next = nil
+ disc.replace = replace
+ free_node(after)
+ free_node(before)
+ elseif identifiers[lastfont] then
+ if prv and prv.id == glyph_code and prv.font == lastfont then
+ local prevchar, lastchar = prv.char, start.char
+ local kerns = chardata[lastfont][prevchar].kerns
+ local kern = kerns and kerns[lastchar] or 0
+ krn = kern + quaddata[lastfont]*krn -- here
+ else
+ krn = quaddata[lastfont]*krn -- here
+ end
+ disc.replace = kern_injector(false,krn) -- only kerns permitted, no glue
+ end
+
+ end
+ end
+ end
+
+ ::nextnode::
+ if start then
+ start = start.next
+ end
+ end
+ return head, done
+end
+
+---=================================================================---
+--- integration
+---=================================================================---
+
+--- · callback: kerncharacters
+--- · enabler: enablefontkerning
+--- · disabler: disablefontkerning
+
+--- callback wrappers
+
+--- (node_t -> node_t) -> string -> string list -> bool
+local registered_as = { } --- procname -> callbacks
+local add_processor = function (processor, name, ...)
+ local callbacks = { ... }
+ for i=1, #callbacks do
+ luatexbase.add_to_callback(callbacks[i], processor, name)
+ end
+ registered_as[name] = callbacks --- for removal
+ return true
+end
+
+--- string -> bool
+local remove_processor = function (name)
+ local callbacks = registered_as[name]
+ if callbacks then
+ for i=1, #callbacks do
+ luatexbase.remove_from_callback(callbacks[i], name)
+ end
+ return true
+ end
+ return false --> unregistered
+end
+
+--- now for the simplistic variant
+--- unit -> bool
+local enablefontkerning = function ( )
+ return add_processor( kerncharacters
+ , "luaotfload.letterspace"
+ , "pre_linebreak_filter"
+ , "hpack_filter")
+end
+
+--- unit -> bool
+local disablefontkerning = function ( )
+ return remove_processor "luaotfload.letterspace"
+end
+
+--[[doc--
+
+ Fontwise kerning is enabled via the “kernfactor” option at font
+ definition time. Unlike the Context implementation which relies on
+ Luatex attributes, it uses a font property for passing along the
+ letterspacing factor of a node.
+
+ The callback is activated the first time a letterspaced font is
+ requested and stays active until the end of the run. Since the font
+ is a property of individual glyphs, every glyph in the entire
+ document must be checked for the kern property. This is quite
+ inefficient compared to Context’s attribute based approach, but Xetex
+ compatibility reduces our options significantly.
+
+--doc]]--
+
+
+local fontkerning_enabled = false --- callback state
+
+--- fontobj -> float -> unit
+local initializefontkerning = function (tfmdata, factor)
+ if factor ~= "max" then
+ factor = tonumber (factor) or 0
+ end
+ if factor == "max" or factor ~= 0 then
+ local fontproperties = tfmdata.properties
+ if fontproperties then
+ --- hopefully this field stays unused otherwise
+ fontproperties.kerncharacters = factor
+ end
+ if not fontkerning_enabled then
+ fontkerning_enabled = enablefontkerning ()
+ end
+ end
+end
+
+--- like the font colorization, fontwise kerning is hooked into the
+--- feature mechanism
+
+otffeatures.register {
+ name = "kernfactor",
+ description = "kernfactor",
+ initializers = {
+ base = initializefontkerning,
+ node = initializefontkerning,
+ }
+}
+
+--[[doc--
+
+ The “letterspace” feature is essentially identical with the above
+ “kernfactor” method, but scales the factor to percentages to match
+ Xetex’s behavior. (See the Xetex reference, page 5, section 1.2.2.)
+
+ Since Xetex doesn’t appear to have a (documented) “max” keyword, we
+ assume all input values are numeric.
+
+--doc]]--
+
+local initializecompatfontkerning = function (tfmdata, percentage)
+ local factor = tonumber (percentage)
+ if not factor then
+ report ("both", 0, "letterspace",
+ "Invalid argument to letterspace: %s (type %q), " ..
+ "was expecting percentage as Lua number instead.",
+ percentage, type (percentage))
+ return
+ end
+ return initializefontkerning (tfmdata, factor * 0.01)
+end
+
+otffeatures.register {
+ name = "letterspace",
+ description = "letterspace",
+ initializers = {
+ base = initializecompatfontkerning,
+ node = initializecompatfontkerning,
+ }
+}
+
+--[[example--
+
+See https://bitbucket.org/phg/lua-la-tex-tests/src/tip/pln-letterspace-8-compare.tex
+for an example.
+
+--example]]--
+
+--- vim:sw=2:ts=2:expandtab:tw=71
+
diff --git a/src/luaotfload-loaders.lua b/src/luaotfload-loaders.lua
new file mode 100644
index 0000000..2aa8c7c
--- /dev/null
+++ b/src/luaotfload-loaders.lua
@@ -0,0 +1,30 @@
+if not modules then modules = { } end modules ["loaders"] = {
+ version = "2.5",
+ comment = "companion to luaotfload-main.lua",
+ author = "Hans Hagen, Khaled Hosny, Elie Roux, Philipp Gesang",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local fonts = fonts
+local readers = fonts.readers
+local handlers = fonts.handlers
+local formats = fonts.formats
+
+local pfb_reader = function (specification)
+ return readers.opentype (specification, "pfb", "type1")
+end
+
+local pfa_reader = function (specification)
+ return readers.opentype (specification, "pfa", "type1")
+end
+
+formats.pfa = "type1"
+readers.pfa = pfa_reader
+handlers.pfa = { }
+
+formats.pfb = "type1"
+readers.pfb = pfb_reader
+handlers.pfb = { }
+
+-- vim:tw=71:sw=2:ts=2:expandtab
diff --git a/src/luaotfload-log.lua b/src/luaotfload-log.lua
new file mode 100644
index 0000000..5698c84
--- /dev/null
+++ b/src/luaotfload-log.lua
@@ -0,0 +1,404 @@
+if not modules then modules = { } end modules ["luaotfload-log"] = {
+ version = "2.5",
+ comment = "companion to Luaotfload",
+ author = "Khaled Hosny, Elie Roux, Philipp Gesang",
+ copyright = "Luaotfload Development Team",
+ license = "GNU GPL v2.0"
+}
+
+--[[doc--
+The logging system is slow in general, as we always have the function
+call overhead even if we aren’t going to output anything. On the other
+hand, the more efficient approach followed by Context isn’t an option
+because we lack a user interface to toggle per-subsystem tracing.
+--doc]]--
+
+local module_name = "luaotfload" --- prefix for messages
+
+luaotfload = luaotfload or { }
+luaotfload.log = luaotfload.log or { }
+local log = luaotfload.log
+
+local ioopen = io.open
+local iowrite = io.write
+local lfsisdir = lfs.isdir
+local lfsisfile = lfs.isfile
+local osdate = os.date
+local ostime = os.time
+local osuuid = os.uuid
+local select = select
+local stringformat = string.format
+local stringsub = string.sub
+local tableconcat = table.concat
+local texiowrite_nl = texio.write_nl
+local texiowrite = texio.write
+local type = type
+
+local dummyfunction = function () end
+
+local texjob = false
+if tex and (tex.jobname or tex.formatname) then
+ --- TeX
+ texjob = true
+end
+
+local loglevel = 0 --- default
+local logout = "log"
+
+--- int -> bool
+local set_loglevel = function (n)
+ if type(n) == "number" then
+ loglevel = n
+ end
+ return true
+end
+log.set_loglevel = set_loglevel
+
+--- unit -> int
+local get_loglevel = function ( )
+ return loglevel
+end
+log.get_loglevel = get_loglevel
+
+local writeln --- pointer to terminal/log writer
+local statusln --- terminal writer that reuses the current line
+local first_status = true --- indicate the begin of a status region
+
+local log_msg = [[
+logging output redirected to %s
+to monitor the progress run "tail -f %s" in another terminal
+]]
+
+local tmppath = os.getenv "TMPDIR" or "/tmp"
+
+local choose_logfile = function ( )
+ if lfsisdir (tmppath) then
+ local fname
+ repeat --- ensure that file of that name doesn’t exist
+ fname = tmppath .. "/luaotfload-log-" .. osuuid()
+ until not lfsisfile (fname)
+ iowrite (stringformat (log_msg, fname, fname))
+ return ioopen (fname, "w")
+ end
+ --- missing /tmp
+ return false
+end
+
+local set_logout = function (s, finalizers)
+ if s == "stdout" then
+ logout = "redirect"
+ elseif s == "file" then --- inject custom logger
+ logout = "redirect"
+ local chan = choose_logfile ()
+ chan:write (stringformat ("logging initiated at %s",
+ osdate ("%F %T", ostime ())))
+ local writefile = function (...)
+ if select ("#", ...) == 2 then
+ chan:write (select (2, ...))
+ else
+ chan:write (select (1, ...))
+ end
+ end
+ local writefile_nl= function (...)
+ chan:write "\n"
+ if select ("#", ...) == 2 then
+ chan:write (select (2, ...))
+ else
+ chan:write (select (1, ...))
+ end
+ end
+
+ local writeln_orig = writeln
+
+ texiowrite = writefile
+ texiowrite_nl = writefile_nl
+ writeln = writefile_nl
+ statusln = dummyfunction
+
+ finalizers[#finalizers+1] = function ()
+ chan:write (stringformat ("\nlogging finished at %s\n",
+ osdate ("%F %T", ostime ())))
+ chan:close ()
+ texiowrite = texio.write
+ texiowrite_nl = texio.write_nl
+ writeln = writeln_orig
+ end
+ --else --- remains “log”
+ end
+ return finalizers
+end
+
+log.set_logout = set_logout
+
+local basic_logger = function (category, fmt, ...)
+ local res = { module_name, "|", category, ":" }
+ if fmt then
+ res [#res + 1] = stringformat (fmt, ...)
+ end
+ texiowrite_nl (logout, tableconcat(res, " "))
+end
+
+--- with faux db update with maximum verbosity:
+---
+--- --------- --------
+--- buffering time (s)
+--- --------- --------
+--- full 4.12
+--- line 4.20
+--- none 4.39
+--- --------- --------
+---
+
+io.stdout:setvbuf "no"
+io.stderr:setvbuf "no"
+
+local kill_line = "\r\x1b[K"
+
+if texjob == true then
+ --- We imitate the texio.* functions so the output is consistent.
+ writeln = function (str)
+ iowrite "\n"
+ iowrite(str)
+ end
+ statusln = function (str)
+ if first_status == false then
+ iowrite (kill_line)
+ else
+ iowrite "\n"
+ end
+ iowrite (str)
+ end
+else
+ writeln = function (str)
+ iowrite(str)
+ iowrite "\n"
+ end
+ statusln = function (str)
+ if first_status == false then
+ iowrite (kill_line)
+ end
+ iowrite (str)
+ end
+end
+
+stdout = function (writer, category, ...)
+ local res = { module_name, "|", category, ":" }
+ local nargs = select("#", ...)
+ if nargs == 0 then
+ --writeln tableconcat(res, " ")
+ --return
+ elseif nargs == 1 then
+ res[#res+1] = select(1, ...) -- around 30% faster than unpack()
+ else
+ res[#res+1] = stringformat(...)
+ end
+ writer (tableconcat(res, " "))
+end
+
+--- at default (zero), we aim to be quiet
+local level_ids = { common = 1, loading = 2, search = 3 }
+
+--[[doc--
+
+ The report() logger is used more or less all over luaotfload.
+ Its requirements are twofold:
+
+ 1) Provide two logging channels, the terminal and the log file;
+ 2) Allow for control over verbosity levels.
+
+ The first part is addressed by specifying the log *mode* as the
+ first argument that can be either “log”, meaning the log file, or
+ “both”: log file and stdout. Anything else is taken as referring to
+ stdout only.
+
+ Verbosity levels, though not as fine-grained as e.g. Context’s
+ system of tracers, allow keeping the logging spam caused by
+ different subsystems manageable. By default, luaotfload will not
+ emit anything if things are running smoothly on level zero. Only
+ warning messages are relayed, while the other messages are skipped
+ over. (This is a little sub-optimal performance-wise since the
+ function calls to the logger are executed regardless.) The log
+ level during a Luatex run can be adjusted by setting the “loglevel”
+ field in config.luaotfload, or by calling log.set_loglevel() as
+ defined above.
+
+--doc]]--
+
+local report = function (mode, lvl, ...)
+ if type(lvl) == "string" then
+ lvl = level_ids[lvl]
+ end
+ if not lvl then lvl = 0 end
+
+ if loglevel >= lvl then
+ if mode == "log" then
+ basic_logger (...)
+ elseif mode == "both" and logout ~= "redirect" then
+ basic_logger (...)
+ stdout (writeln, ...)
+ else
+ stdout (writeln, ...)
+ end
+ end
+end
+
+log.report = report
+
+--[[doc--
+
+ status_logger -- Overwrites the most recently printed line of the
+ terminal. Its purpose is to provide feedback without spamming
+ stdout with irrelevant messages, i.e. when building the database.
+
+ Status logging must be initialized by calling status_start() and
+ properly reset via status_stop().
+
+ The arguments low and high indicate the loglevel threshold at which
+ linewise and full logging is triggered, respectively. E.g.
+
+ names_status (1, 4, "term", "Hello, world!")
+
+ will print nothing if the loglevel is less than one, reuse the
+ current line if the loglevel ranges from one to three inclusively,
+ and output the message on a separate line otherwise.
+
+--doc]]--
+
+local status_logger = function (mode, ...)
+ if mode == "log" then
+ basic_logger (...)
+ else
+ if mode == "both" and logout ~= "redirect" then
+ basic_logger (...)
+ stdout (statusln, ...)
+ else
+ stdout (statusln, ...)
+ end
+ first_status = false
+ end
+end
+
+--[[doc--
+
+ status_start -- Initialize status logging. This installs the status
+ logger if the loglevel is in the specified range, and the normal
+ logger otherwise. It also resets the first line state which
+ causing the next line printed using the status logger to not kill
+ the current line.
+
+--doc]]--
+
+local status_writer
+local status_low = 99
+local status_high = 99
+
+local status_start = function (low, high)
+ first_status = true
+ status_low = low
+ status_high = high
+
+ if os.type == "windows" --- Assume broken terminal.
+ or os.getenv "TERM" == "dumb"
+ then
+ status_writer = function (mode, ...)
+ report (mode, high, ...)
+ end
+ return
+ end
+
+ if low <= loglevel and loglevel < high then
+ status_writer = status_logger
+ else
+ status_writer = function (mode, ...)
+ report (mode, high, ...)
+ end
+ end
+end
+
+--[[doc--
+
+ status_stop -- Finalize a status region by outputting a newline and
+ printing a message.
+
+--doc]]--
+
+local status_stop = function (...)
+ if first_status == false then
+ status_writer(...)
+ if texjob == false then
+ writeln ""
+ end
+ end
+end
+
+log.names_status = function (...) status_writer (...) end
+log.names_status_start = status_start
+log.names_status_stop = status_stop
+
+--[[doc--
+
+ The fontloader comes with the Context logging mechanisms
+ inaccessible. Instead, it provides dumb fallbacks based
+ on the functions in texio.write*() that can be overridden
+ by providing a function texio.reporter().
+
+ The fontloader output can be quite verbose, so we disable
+ it entirely by default.
+
+--doc]]--
+
+local texioreporter = function (message)
+ report ("log", 2, message)
+end
+
+texio.reporter = texioreporter
+
+--[[doc--
+
+ Adobe Glyph List.
+ -------------------------------------------------------------------
+
+ Context provides a somewhat different font-age.lua from an unclear
+ origin. Unfortunately, the file name it reads from is hard-coded
+ in font-enc.lua, so we have to replace the entire table.
+
+ This shouldn’t cause any complications. Due to its implementation
+ the glyph list will be loaded upon loading a OTF or TTF for the
+ first time during a TeX run. (If one sticks to TFM/OFM then it is
+ never read at all.) For this reason we can install a metatable that
+ looks up the file of our choosing and only falls back to the
+ Context one in case it cannot be found.
+
+--doc]]--
+
+if fonts then --- need to be running TeX
+ if next(fonts.encodings.agl) then
+ --- unnecessary because the file shouldn’t be loaded at this time
+ --- but we’re just making sure
+ fonts.encodings.agl = nil
+ collectgarbage"collect"
+ end
+
+
+ fonts.encodings.agl = { }
+
+ setmetatable(fonts.encodings.agl, { __index = function (t, k)
+ if k == "unicodes" then
+ local glyphlist = resolvers.findfile"luaotfload-glyphlist.lua"
+ if glyphlist then
+ report ("log", 1, "load", "loading the Adobe glyph list")
+ else
+ glyphlist = resolvers.findfile"font-age.lua"
+ report ("both", 0, "load",
+ "loading the extended glyph list from ConTeXt")
+ end
+ local unicodes = dofile(glyphlist)
+ fonts.encodings.agl = { unicodes = unicodes }
+ return unicodes
+ else
+ return nil
+ end
+ end })
+end
+
+-- vim:tw=71:sw=4:ts=4:expandtab
diff --git a/src/luaotfload-main.lua b/src/luaotfload-main.lua
new file mode 100644
index 0000000..6616468
--- /dev/null
+++ b/src/luaotfload-main.lua
@@ -0,0 +1,708 @@
+-----------------------------------------------------------------------
+-- FILE: luaotfload-main.lua
+-- DESCRIPTION: Luatex fontloader initialization
+-- REQUIREMENTS: luatex v.0.78 or later; the lualibs package
+-- AUTHOR: Élie Roux, Khaled Hosny, Philipp Gesang
+-- VERSION: same as Luaotfload
+-- MODIFIED: 2014-02-14 22:51:09+0100
+-----------------------------------------------------------------------
+--
+--- Note:
+--- This file was part of the original luaotfload.dtx and has been
+--- converted to a pure Lua file during the transition from Luaotfload
+--- version 2.4 to 2.5. Thus, the comments are still in TeX (Latex)
+--- markup.
+
+if not modules then modules = { } end modules ["luaotfload-main"] = {
+ version = "2.5",
+ comment = "fontloader initialization",
+ author = "Hans Hagen, Khaled Hosny, Elie Roux, Philipp Gesang",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "GNU General Public License v. 2.0"
+}
+
+
+--[[doc--
+
+ This file initializes the system and loads the font loader. To
+ minimize potential conflicts between other packages and the code
+ imported from \CONTEXT, several precautions are in order. Some of
+ the functionality that the font loader expects to be present, like
+ raw access to callbacks, are assumed to have been disabled by
+ \identifier{luatexbase} when this file is processed. In some cases
+ it is possible to trick it by putting dummies into place and
+ restoring the behavior from \identifier{luatexbase} after
+ initilization. Other cases such as attribute allocation require
+ that we hook the functionality from \identifier{luatexbase} into
+ locations where they normally wouldn’t be.
+
+ Anyways we can import the code base without modifications, which is
+ due mostly to the extra effort by Hans Hagen to make \LUATEX-Fonts
+ self-contained and encapsulate it, and especially due to his
+ willingness to incorporate our suggestions.
+
+--doc]]--
+
+local initial_log_level = 0
+
+luaotfload = luaotfload or { }
+local luaotfload = luaotfload
+luaotfload.log = luaotfload.log or { }
+
+luaotfload.module = {
+ name = "luaotfload-main",
+ version = 2.50000,
+ date = "2014/**/**",
+ description = "OpenType layout system.",
+ author = "Elie Roux & Hans Hagen",
+ copyright = "Elie Roux",
+ license = "GPL v2.0"
+}
+
+local luatexbase = luatexbase
+
+local setmetatable = setmetatable
+local type, next = type, next
+local stringlower = string.lower
+local stringformat = string.format
+
+local kpsefind_file = kpse.find_file
+local lfsisfile = lfs.isfile
+
+local add_to_callback = luatexbase.add_to_callback
+local create_callback = luatexbase.create_callback
+local reset_callback = luatexbase.reset_callback
+local call_callback = luatexbase.call_callback
+
+local dummy_function = function () end --- XXX this will be moved to the luaotfload namespace when we have the init module
+
+local error, warning, info, log =
+ luatexbase.provides_module(luaotfload.module)
+
+luaotfload.log.tex = {
+ error = error,
+ warning = warning,
+ info = info,
+ log = log,
+}
+
+--[[doc--
+
+ We set the minimum version requirement for \LUATEX to v0.76,
+ because the font loader requires recent features like direct
+ attribute indexing and \luafunction{node.end_of_math()} that aren’t
+ available in earlier versions.\footnote{%
+ See Taco’s announcement of v0.76:
+ \url{http://comments.gmane.org/gmane.comp.tex.luatex.user/4042}
+ and this commit by Hans that introduced those features.
+ \url{http://repo.or.cz/w/context.git/commitdiff/a51f6cf6ee087046a2ae5927ed4edff0a1acec1b}.
+ }
+
+--doc]]--
+
+local min_luatex_version = 76
+
+if tex.luatexversion < min_luatex_version then
+ warning ("LuaTeX v%.2f is old, v%.2f or later is recommended.",
+ tex.luatexversion / 100,
+ min_luatex_version / 100)
+ --- we install a fallback for older versions as a safety
+ if not node.end_of_math then
+ local math_t = node.id "math"
+ local traverse_nodes = node.traverse_id
+ node.end_of_math = function (n)
+ for n in traverse_nodes (math_t, n.next) do
+ return n
+ end
+ end
+ end
+end
+
+--[[doc--
+
+ \subsection{Module loading}
+ We load the files imported from \CONTEXT with this function. It
+ automatically prepends the prefix \fileent{luaotfload-} to its
+ argument, so we can refer to the files with their actual \CONTEXT
+ name.
+
+--doc]]--
+
+local fl_prefix = "luaotfload" -- “luatex” for luatex-plain
+local loadmodule = function (name)
+ require (fl_prefix .."-"..name)
+end
+
+loadmodule "log.lua" --- log messages
+--loadmodule "parsers.lua" --- new in 2.5; fonts.conf and syntax
+--loadmodule "configuration.lua" --- configuration options
+
+local log = luaotfload.log
+local logreport = log.report
+
+log.set_loglevel (default_log_level)
+
+--[[doc--
+
+ Before \TeX Live 2013 version, \LUATEX had a bug that made ofm fonts
+ fail when called with their extension. There was a side-effect making
+ ofm totally unloadable when luaotfload was present. The following
+ lines are a patch for this bug. The utility of these lines is
+ questionable as they are not necessary since \TeX Live 2013. They
+ should be removed in the next version.
+
+--doc]]--
+
+local Cs, P, lpegmatch = lpeg.Cs, lpeg.P, lpeg.match
+
+local p_dot, p_slash = P".", P"/"
+local p_suffix = (p_dot * (1 - p_dot - p_slash)^1 * P(-1)) / ""
+local p_removesuffix = Cs((p_suffix + 1)^1)
+
+local find_vf_file = function (name)
+ local fullname = kpsefind_file(name, "ovf")
+ if not fullname then
+ --fullname = kpsefind_file(file.removesuffix(name), "ovf")
+ fullname = kpsefind_file(lpegmatch(p_removesuffix, name), "ovf")
+ end
+ if fullname then
+ logreport ("log", 0, "main",
+ "loading virtual font file %s.", fullname)
+ end
+ return fullname
+end
+
+--[[doc--
+
+ \subsection{Preparing the Font Loader}
+ We treat the fontloader as a black box so behavior is consistent
+ between formats.
+ We load the fontloader code directly in the same fashion as the
+ Plain format \identifier{luatex-fonts} that is part of Context.
+ How this is executed depends on the presence on the
+ \emphasis{merged font loader code}.
+ In \identifier{luaotfload} this is contained in the file
+ \fileent{luaotfload-merged.lua}.
+ If this file cannot be found, the original libraries from \CONTEXT
+ of which the merged code was composed are loaded instead.
+ Since these files are not shipped with Luaotfload, an installation
+ of Context is required.
+ (Since we pull the fontloader directly from the Context minimals,
+ the necessary Context version is likely to be more recent than that
+ of other TeX distributions like Texlive.)
+ The imported font loader will call \luafunction{callback.register}
+ once while reading \fileent{font-def.lua}.
+ This is unavoidable unless we modify the imported files, but
+ harmless if we make it call a dummy instead.
+ However, this problem might vanish if we decide to do the merging
+ ourselves, like the \identifier{lualibs} package does.
+ With this step we would obtain the freedom to load our own
+ overrides in the process right where they are needed, at the cost
+ of losing encapsulation.
+ The decision on how to progress is currently on indefinite hold.
+
+--doc]]--
+
+local starttime = os.gettimeofday ()
+local trapped_register = callback.register
+callback.register = dummy_function
+
+--[[doc--
+
+ By default, the fontloader requires a number of \emphasis{private
+ attributes} for internal use.
+ These must be kept consistent with the attribute handling methods
+ as provided by \identifier{luatexbase}.
+ Our strategy is to override the function that allocates new
+ attributes before we initialize the font loader, making it a
+ wrapper around \luafunction{luatexbase.new_attribute}.\footnote{%
+ Many thanks, again, to Hans Hagen for making this part
+ configurable!
+ }
+ The attribute identifiers are prefixed “\fileent{luaotfload@}” to
+ avoid name clashes.
+
+--doc]]--
+
+do
+ local new_attribute = luatexbase.new_attribute
+ local the_attributes = luatexbase.attributes
+
+ attributes = attributes or { }
+
+ attributes.private = function (name)
+ local attr = "luaotfload@" .. name --- used to be: “otfl@”
+ local number = the_attributes[attr]
+ if not number then
+ number = new_attribute(attr)
+ end
+ return number
+ end
+end
+
+--[[doc--
+
+ These next lines replicate the behavior of
+ \fileent{luatex-fonts.lua}.
+
+--doc]]--
+
+local context_environment = { }
+
+local push_namespaces = function ()
+ logreport ("log", 1, "main", "push namespace for font loader")
+ local normalglobal = { }
+ for k, v in next, _G do
+ normalglobal[k] = v
+ end
+ return normalglobal
+end
+
+local pop_namespaces = function (normalglobal, isolate)
+ if normalglobal then
+ local _G = _G
+ local mode = "non-destructive"
+ if isolate then mode = "destructive" end
+ logreport ("log", 1, "main", "pop namespace from font loader -- " .. mode)
+ for k, v in next, _G do
+ if not normalglobal[k] then
+ context_environment[k] = v
+ if isolate then
+ _G[k] = nil
+ end
+ end
+ end
+ for k, v in next, normalglobal do
+ _G[k] = v
+ end
+ -- just to be sure:
+ setmetatable(context_environment,_G)
+ else
+ logreport ("both", 0, "main",
+ "irrecoverable error during pop_namespace: no globals to restore")
+ os.exit()
+ end
+end
+
+luaotfload.context_environment = context_environment
+luaotfload.push_namespaces = push_namespaces
+luaotfload.pop_namespaces = pop_namespaces
+
+local our_environment = push_namespaces()
+
+--[[doc--
+
+ The font loader requires that the attribute with index zero be
+ zero. We happily oblige.
+ (Cf. \fileent{luatex-fonts-nod.lua}.)
+
+--doc]]--
+
+tex.attribute[0] = 0
+
+--[[doc--
+
+ Now that things are sorted out we can finally load the fontloader.
+
+--doc]]--
+
+loadmodule "fontloader.lua"
+---loadmodule"font-odv.lua" --- <= Devanagari support from Context
+
+if fonts then
+
+ if not fonts._merge_loaded_message_done_ then
+ logreport ("log", 5, "main", [["I am using the merged fontloader here.]])
+ logreport ("log", 5, "main", [[ If you run into problems or experience unexpected]])
+ logreport ("log", 5, "main", [[ behaviour, and if you have ConTeXt installed you can try]])
+ logreport ("log", 5, "main", [[ to delete the file 'luaotfload-fontloader.lua' as I might]])
+ logreport ("log", 5, "main", [[ then use the possibly updated libraries. The merged]])
+ logreport ("log", 5, "main", [[ version is not supported as it is a frozen instance.]])
+ logreport ("log", 5, "main", [[ Problems can be reported to the ConTeXt mailing list."]])
+ end
+ fonts._merge_loaded_message_done_ = true
+
+else--- the loading sequence is known to change, so this might have to
+ --- be updated with future updates!
+ --- do not modify it though unless there is a change to the merged
+ --- package!
+ loadmodule("l-lua.lua")
+ loadmodule("l-lpeg.lua")
+ loadmodule("l-function.lua")
+ loadmodule("l-string.lua")
+ loadmodule("l-table.lua")
+ loadmodule("l-io.lua")
+ loadmodule("l-file.lua")
+ loadmodule("l-boolean.lua")
+ loadmodule("l-math.lua")
+ loadmodule("util-str.lua")
+ loadmodule('luatex-basics-gen.lua')
+ loadmodule('data-con.lua')
+ loadmodule('luatex-basics-nod.lua')
+ loadmodule('font-ini.lua')
+ loadmodule('font-con.lua')
+ loadmodule('luatex-fonts-enc.lua')
+ loadmodule('font-cid.lua')
+ loadmodule('font-map.lua')
+ loadmodule('luatex-fonts-syn.lua')
+ loadmodule('luatex-fonts-tfm.lua')
+ loadmodule('font-oti.lua')
+ loadmodule('font-otf.lua')
+ loadmodule('font-otb.lua')
+ loadmodule('luatex-fonts-inj.lua') --> since 2014-01-07, replaces node-inj.lua
+ loadmodule('font-ota.lua')
+ loadmodule('luatex-fonts-otn.lua') --> since 2014-01-07, replaces font-otn.lua
+ loadmodule('font-otp.lua') --> since 2013-04-23
+ loadmodule('luatex-fonts-lua.lua')
+ loadmodule('font-def.lua')
+ loadmodule('luatex-fonts-def.lua')
+ loadmodule('luatex-fonts-ext.lua')
+ loadmodule('luatex-fonts-cbk.lua')
+end --- non-merge fallback scope
+
+--[[doc--
+
+ Here we adjust the globals created during font loader
+ initialization. If the second argument to
+ \luafunction{pop_namespaces()} is \verb|true| this will restore the
+ state of \luafunction{_G}, eliminating every global generated since
+ the last call to \luafunction{push_namespaces()}. At the moment we
+ see no reason to do this, and since the font loader is considered
+ an essential part of \identifier{luatex} as well as a very well
+ organized piece of code, we happily concede it the right to add to
+ \luafunction{_G} if needed.
+
+--doc]]--
+
+pop_namespaces(our_environment, false)-- true)
+
+logreport ("both", 0, "main",
+ "fontloader loaded in %0.3f seconds", os.gettimeofday()-starttime)
+
+--[[doc--
+
+ \subsection{Callbacks}
+ After the fontloader is ready we can restore the callback trap from
+ \identifier{luatexbase}.
+
+--doc]]--
+
+callback.register = trapped_register
+
+--[[doc--
+
+ We do our own callback handling with the means provided by
+ luatexbase.
+ Note: \luafunction{pre_linebreak_filter} and
+ \luafunction{hpack_filter} are coupled in \CONTEXT in the concept
+ of \emphasis{node processor}.
+
+--doc]]--
+
+add_to_callback("pre_linebreak_filter",
+ nodes.simple_font_handler,
+ "luaotfload.node_processor",
+ 1)
+add_to_callback("hpack_filter",
+ nodes.simple_font_handler,
+ "luaotfload.node_processor",
+ 1)
+add_to_callback("find_vf_file",
+ find_vf_file, "luaotfload.find_vf_file")
+
+loadmodule "override.lua" --- load glyphlist on demand
+
+--[[doc--
+
+ Now we load the modules written for \identifier{luaotfload}.
+
+--doc]]--
+
+loadmodule "parsers.lua" --- fonts.conf and syntax
+loadmodule "configuration.lua" --- configuration options
+
+if not config.actions.apply_defaults () then
+ logreport ("log", 0, "load", "Configuration unsuccessful.")
+end
+
+loadmodule "loaders.lua" --- Type1 font wrappers
+loadmodule "database.lua" --- Font management.
+loadmodule "colors.lua" --- Per-font colors.
+
+if not config.actions.reconfigure () then
+ logreport ("log", 0, "load", "Post-configuration hooks failed.")
+end
+
+--[[doc--
+
+ Relying on the \verb|name:| resolver for everything has been the
+ source of permanent trouble with the database.
+ With the introduction of the new syntax parser we now have enough
+ granularity to distinguish between the \XETEX emulation layer and
+ the genuine \verb|name:| and \verb|file:| lookups of \LUATEX-Fonts.
+ Another benefit is that we can now easily plug in or replace new
+ lookup behaviors if necessary.
+ The name resolver remains untouched, but it calls
+ \luafunction{fonts.names.resolve()} internally anyways (see
+ \fileent{luaotfload-database.lua}).
+
+--doc]]--
+
+local filesuffix = file.suffix
+local fileremovesuffix = file.removesuffix
+local request_resolvers = fonts.definers.resolvers
+local formats = fonts.formats
+local names = fonts.names
+formats.ofm = "type1"
+
+fonts.encodings.known = fonts.encodings.known or { }
+
+--[[doc--
+
+ \identifier{luaotfload} promises easy access to system fonts.
+ Without additional precautions, this cannot be achieved by
+ \identifier{kpathsea} alone, because it searches only the
+ \fileent{texmf} directories by default.
+ Although it is possible for \identifier{kpathsea} to include extra
+ paths by adding them to the \verb|OSFONTDIR| environment variable,
+ this is still short of the goal »\emphasis{it just works!}«.
+ When building the font database \identifier{luaotfload} scans
+ system font directories anyways, so we already have all the
+ information for looking sytem fonts.
+ With the release version 2.2 the file names are indexed in the
+ database as well and we are ready to resolve \verb|file:| lookups
+ this way.
+ Thus we no longer need to call the \identifier{kpathsea} library in
+ most cases when looking up font files, only when generating the
+ database, and when verifying the existence of a file in the
+ \fileent{texmf} tree.
+
+--doc]]--
+
+local resolve_file = names.font_file_lookup
+
+local file_resolver = function (specification)
+ local name = resolve_file (specification.name)
+ local suffix = filesuffix(name)
+ if formats[suffix] then
+ specification.forced = stringlower (suffix)
+ specification.forcedname = file.removesuffix(name)
+ else
+ specification.name = name
+ end
+end
+
+request_resolvers.file = file_resolver
+
+--[[doc--
+
+ We classify as \verb|anon:| those requests that have neither a
+ prefix nor brackets. According to Khaled\footnote{%
+ \url{https://github.com/phi-gamma/luaotfload/issues/4#issuecomment-17090553}.
+ }
+ they are the \XETEX equivalent of a \verb|name:| request, so we
+ will be treating them as such.
+
+--doc]]--
+
+--request_resolvers.anon = request_resolvers.name
+
+--[[doc--
+
+ There is one drawback, though.
+ This syntax is also used for requesting fonts in \identifier{Type1}
+ (\abbrev{tfm}, \abbrev{ofm}) format.
+ These are essentially \verb|file:| lookups and must be caught
+ before the \verb|name:| resolver kicks in, lest they cause the
+ database to update.
+ Even if we were to require the \verb|file:| prefix for all
+ \identifier{Type1} requests, tests have shown that certain fonts
+ still include further fonts (e.~g. \fileent{omlgcb.ofm} will ask
+ for \fileent{omsecob.tfm}) \emphasis{using the old syntax}.
+ For this reason, we introduce an extra check with an early return.
+
+--doc]]--
+
+local type1_formats = { "tfm", "ofm", "TFM", "OFM", }
+
+request_resolvers.anon = function (specification)
+ local name = specification.name
+ for i=1, #type1_formats do
+ local format = type1_formats[i]
+ if resolvers.findfile(name, format) then
+ specification.forcedname = file.addsuffix(name, format)
+ specification.forced = format
+ return
+ end
+ end
+ --- under some weird circumstances absolute paths get
+ --- passed to the definer; we have to catch them
+ --- before the name: resolver misinterprets them.
+ name = specification.specification
+ local exists, _ = lfsisfile(name)
+ if exists then --- garbage; we do this because we are nice,
+ --- not because it is correct
+ logreport ("log", 1, "load", "file %q exists", name)
+ logreport ("log", 1, "load",
+ "... overriding borked anon: lookup with path: lookup")
+ specification.name = name
+ request_resolvers.path(specification)
+ return
+ end
+ request_resolvers.name(specification)
+end
+
+--[[doc--
+
+ Prior to version 2.2, \identifier{luaotfload} did not distinguish
+ \verb|file:| and \verb|path:| lookups, causing complications with
+ the resolver.
+ Now we test if the requested name is an absolute path in the file
+ system, otherwise we fall back to the \verb|file:| lookup.
+
+--doc]]--
+
+request_resolvers.path = function (specification)
+ local name = specification.name
+ local exists, _ = lfsisfile(name)
+ if not exists then -- resort to file: lookup
+ logreport ("log", 0, "load",
+ "path lookup of %q unsuccessful, falling back to file:",
+ name)
+ file_resolver (specification)
+ else
+ local suffix = filesuffix (name)
+ if formats[suffix] then
+ specification.forced = stringlower (suffix)
+ specification.name = file.removesuffix(name)
+ specification.forcedname = name
+ else
+ specification.name = name
+ end
+ end
+end
+
+--[[doc--
+
+ {\bfseries EXPERIMENTAL}:
+ \identifier{kpse}-only resolver, for those who can do without
+ system fonts.
+
+--doc]]--
+
+request_resolvers.kpse = function (specification)
+ local name = specification.name
+ local suffix = filesuffix(name)
+ if suffix and formats[suffix] then
+ name = file.removesuffix(name)
+ if resolvers.findfile(name, suffix) then
+ specification.forced = stringlower (suffix)
+ specification.forcedname = name
+ return
+ end
+ end
+ for t, format in next, formats do --- brute force
+ if kpse.find_file (name, format) then
+ specification.forced = t
+ specification.name = name
+ return
+ end
+ end
+end
+
+--[[doc--
+
+ The \verb|name:| resolver.
+
+--doc]]--
+
+--- fonts.names.resolvers.name -- Customized version of the
+--- generic name resolver.
+
+request_resolvers.name = function (specification)
+ local resolver = names.resolve_cached
+ if config.luaotfload.run.resolver == "normal" then
+ resolver = names.resolve_name
+ end
+ local resolved, subfont = resolver (specification)
+ if resolved then
+ logreport ("log", 0, "load", "Lookup/name: %q -> \"%s%s\"",
+ specification.name,
+ resolved,
+ subfont and stringformat ("(%d)", subfont) or "")
+ specification.resolved = resolved
+ specification.sub = subfont
+ specification.forced = stringlower (filesuffix (resolved) or "")
+ specification.forcedname = resolved
+ specification.name = fileremovesuffix (resolved)
+ else
+ file_resolver (specification)
+ end
+end
+
+--[[doc--
+
+ Also {\bfseries EXPERIMENTAL}: custom file resolvers via callback.
+
+--doc]]--
+create_callback("luaotfload.resolve_font", "simple", dummy_function)
+
+request_resolvers.my = function (specification)
+ call_callback("luaotfload.resolve_font", specification)
+end
+
+--[[doc--
+
+ We create a callback for patching fonts on the fly, to be used by
+ other packages.
+ It initially contains the empty function that we are going to
+ override below.
+
+--doc]]--
+
+create_callback("luaotfload.patch_font", "simple", dummy_function)
+
+--[[doc--
+
+ \subsection{\CONTEXT override}
+ \label{define-font}
+ We provide a simplified version of the original font definition
+ callback.
+
+--doc]]--
+
+local read_font_file = fonts.definers.read
+
+local definers = {
+ generic = read_font_file,
+ --- spec -> size -> id -> tmfdata
+ patch = function (specification, size, id)
+ local tfmdata = read_font_file (specification, size, id)
+ if type (tfmdata) == "table" and tfmdata.shared then
+ --- We need to test for the “shared” field here
+ --- or else the fontspec capheight callback will
+ --- operate on tfm fonts.
+ call_callback ("luaotfload.patch_font", tfmdata, specification)
+ end
+ return tfmdata
+ end,
+}
+
+reset_callback "define_font"
+
+--[[doc--
+
+ Finally we register the callbacks.
+
+--doc]]--
+
+local definer = config.luaotfload.run.definer
+add_to_callback ("define_font", definers[definer], "luaotfload.define_font", 1)
+
+loadmodule "features.lua" --- font request and feature handling
+loadmodule "letterspace.lua" --- extra character kerning
+loadmodule "auxiliary.lua" --- additional high-level functionality
+
+luaotfload.aux.start_rewrite_fontname () --- to be migrated to fontspec
+
+-- vim:tw=79:sw=4:ts=4:et
diff --git a/src/luaotfload-override.lua b/src/luaotfload-override.lua
new file mode 100644
index 0000000..b75530b
--- /dev/null
+++ b/src/luaotfload-override.lua
@@ -0,0 +1,52 @@
+if not modules then modules = { } end modules ["luaotfload-override"] = {
+ version = "2.5",
+ comment = "companion to Luaotfload",
+ author = "Khaled Hosny, Elie Roux, Philipp Gesang",
+ copyright = "Luaotfload Development Team",
+ license = "GNU GPL v2.0"
+}
+
+local findfile = resolvers.findfile
+local encodings = fonts.encodings
+
+local log = luaotfload.log
+local report = log.report
+
+--[[doc--
+
+ Adobe Glyph List.
+ -------------------------------------------------------------------
+
+ Context provides a somewhat different font-age.lua from an unclear
+ origin. Unfortunately, the file name it reads from is hard-coded
+ in font-enc.lua, so we have to replace the entire table.
+
+ This shouldn’t cause any complications. Due to its implementation
+ the glyph list will be loaded upon loading a OTF or TTF for the
+ first time during a TeX run. (If one sticks to TFM/OFM then it is
+ never read at all.) For this reason we can install a metatable that
+ looks up the file of our choosing and only falls back to the
+ Context one in case it cannot be found.
+
+--doc]]--
+
+encodings.agl = { }
+
+setmetatable(fonts.encodings.agl, { __index = function (t, k)
+ if k ~= "unicodes" then
+ return nil
+ end
+ local glyphlist = findfile "luaotfload-glyphlist.lua"
+ if glyphlist then
+ report ("log", 1, "load", "loading the Adobe glyph list")
+ else
+ glyphlist = findfile "font-age.lua"
+ report ("both", 0, "load",
+ "loading the extended glyph list from ConTeXt")
+ end
+ local unicodes = dofile(glyphlist)
+ encodings.agl = { unicodes = unicodes }
+ return unicodes
+end })
+
+-- vim:tw=71:sw=4:ts=4:expandtab
diff --git a/src/luaotfload-parsers.lua b/src/luaotfload-parsers.lua
new file mode 100644
index 0000000..3eeb614
--- /dev/null
+++ b/src/luaotfload-parsers.lua
@@ -0,0 +1,701 @@
+#!/usr/bin/env texlua
+-------------------------------------------------------------------------------
+-- FILE: luaotfload-parsers.lua
+-- DESCRIPTION: various lpeg-based parsers used in Luaotfload
+-- REQUIREMENTS: Luaotfload > 2.4
+-- AUTHOR: Philipp Gesang (Phg), <phg42.2a@gmail.com>
+-- VERSION: same as Luaotfload
+-- CREATED: 2014-01-14 10:15:20+0100
+-------------------------------------------------------------------------------
+--
+
+if not modules then modules = { } end modules ['luaotfload-parsers'] = {
+ version = "2.5",
+ comment = "companion to luaotfload-main.lua",
+ author = "Philipp Gesang",
+ copyright = "Luaotfload Development Team",
+ license = "GNU GPL v2.0"
+}
+
+luaotfload = luaotfload or { }
+luaotfload.parsers = luaotfload.parsers or { }
+local parsers = luaotfload.parsers
+
+local rawset = rawset
+
+local lpeg = require "lpeg"
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+local lpegmatch = lpeg.match
+local C, Cc, Cf = lpeg.C, lpeg.Cc, lpeg.Cf
+local Cg, Cmt, Cs, Ct = lpeg.Cg, lpeg.Cmt, lpeg.Cs, lpeg.Ct
+
+local kpse = kpse
+local kpseexpand_path = kpse.expand_path
+local kpsereadable_file = kpse.readable_file
+
+local file = file
+local filejoin = file.join
+local filedirname = file.dirname
+
+local io = io
+local ioopen = io.open
+
+local log = luaotfload.log
+local logreport = log.report
+
+local string = string
+local stringsub = string.sub
+local stringfind = string.find
+local stringlower = string.lower
+
+local mathceil = math.ceil
+
+local lfs = lfs
+local lfsisfile = lfs.isfile
+local lfsisdir = lfs.isdir
+
+-------------------------------------------------------------------------------
+--- COMMON PATTERNS
+-------------------------------------------------------------------------------
+
+local dot = P"."
+local colon = P":"
+local semicolon = P";"
+local comma = P","
+local noncomma = 1 - comma
+local slash = P"/"
+local backslash = P"\\"
+local equals = P"="
+local dash = P"-"
+local gartenzaun = P"#"
+local lbrk, rbrk = P"[", P"]"
+local squote = P"'"
+local dquote = P"\""
+
+local newline = P"\n"
+local returnchar = P"\r"
+local spacing = S" \t\v"
+local linebreak = S"\n\r"
+local whitespace = spacing + linebreak
+local ws = spacing^0
+local xmlws = whitespace^1
+local eol = P"\n\r" + P"\r\n" + linebreak
+
+local digit = R"09"
+local alpha = R("az", "AZ")
+local anum = alpha + digit
+local decimal = digit^1 * (dot * digit^0)^-1
+
+-------------------------------------------------------------------------------
+--- FONTCONFIG
+-------------------------------------------------------------------------------
+
+--[[doc--
+
+ For fonts installed on the operating system, there are several
+ options to make Luaotfload index them:
+
+ - If OSFONTDIR is set (which is the case under windows by default
+ but not on the other OSs), it scans it at the same time as the
+ texmf tree, in the function scan_texmf_fonts().
+
+ - Otherwise
+ - under Windows and Mac OSX, we take a look at some hardcoded
+ directories,
+ - under Unix, it reads /etc/fonts/fonts.conf and processes the
+ directories specified there.
+
+ This means that if you have fonts in fancy directories, you need to
+ set them in OSFONTDIR.
+
+ Beware: OSFONTDIR is a kpathsea variable, so fonts found in these
+ paths, though technically system fonts, are registered in the
+ category “texmf”, not “system”. This may have consequences for the
+ lookup order when a font file (or a font with the same name
+ information) is located in both the system and the texmf tree.
+
+--doc]]--
+
+local tag_name = C(alpha^1)
+local comment = P"<!--" * (1 - P"--")^0 * P"-->"
+
+---> header specifica
+local xml_declaration = P"<?xml" * (1 - P"?>")^0 * P"?>"
+local xml_doctype = P"<!DOCTYPE" * xmlws
+ * "fontconfig" * (1 - P">")^0 * P">"
+local header = xml_declaration^-1
+ * (xml_doctype + comment + xmlws)^0
+
+---> enforce root node
+local root_start = P"<" * xmlws^-1 * P"fontconfig" * xmlws^-1 * P">"
+local root_stop = P"</" * xmlws^-1 * P"fontconfig" * xmlws^-1 * P">"
+
+local dquote, squote = P[["]], P"'"
+local xml_namestartchar = S":_" + alpha --- ascii only, funk the rest
+local xml_namechar = S":._" + alpha + digit
+local xml_name = xmlws^-1
+ * C(xml_namestartchar * xml_namechar^0)
+local xml_attvalue = dquote * C((1 - S[[%&"]])^1) * dquote * xmlws^-1
+ + squote * C((1 - S[[%&']])^1) * squote * xmlws^-1
+local xml_attr = Cg(xml_name * P"=" * xml_attvalue)
+local xml_attr_list = Cf(Ct"" * xml_attr^1, rawset)
+
+--[[doc--
+ scan_node creates a parser for a given xml tag.
+--doc]]--
+--- string -> bool -> lpeg_t
+local scan_node = function (tag)
+ --- Node attributes go into a table with the index “attributes”
+ --- (relevant for “prefix="xdg"” and the likes).
+ local p_tag = P(tag)
+ local with_attributes = P"<" * p_tag
+ * Cg(xml_attr_list, "attributes")^-1
+ * xmlws^-1
+ * P">"
+ local plain = P"<" * p_tag * xmlws^-1 * P">"
+ local node_start = plain + with_attributes
+ local node_stop = P"</" * p_tag * xmlws^-1 * P">"
+ --- there is no nesting, the earth is flat ...
+ local node = node_start
+ * Cc(tag) * C(comment + (1 - node_stop)^1)
+ * node_stop
+ return Ct(node) -- returns {string, string [, attributes = { key = val }] }
+end
+
+--[[doc--
+ At the moment, the interesting tags are “dir” for
+ directory declarations, and “include” for including
+ further configuration files.
+
+ spec: http://freedesktop.org/software/fontconfig/fontconfig-user.html
+--doc]]--
+local include_node = scan_node"include"
+local dir_node = scan_node"dir"
+
+local element = dir_node
+ + include_node
+ + comment --> ignore
+ + P(1-root_stop) --> skip byte
+
+local root = root_start * Ct(element^0) * root_stop
+local p_cheapxml = header * root
+
+--lpeg.print(p_cheapxml) ---> 757 rules with v0.10
+
+--[[doc--
+ fonts_conf_scanner() handles configuration files.
+ It is called on an abolute path to a config file (e.g.
+ /home/luser/.config/fontconfig/fonts.conf) and returns a list
+ of the nodes it managed to extract from the file.
+--doc]]--
+--- string -> path list
+local fonts_conf_scanner = function (path)
+ local fh = ioopen(path, "r")
+ if not fh then
+ logreport("both", 3, "db", "Cannot open fontconfig file %s.", path)
+ return
+ end
+ local raw = fh:read"*all"
+ fh:close()
+
+ local confdata = lpegmatch(p_cheapxml, raw)
+ if not confdata then
+ logreport("both", 3, "db", "Cannot scan fontconfig file %s.", path)
+ return
+ end
+ return confdata
+end
+
+local p_conf = P".conf" * P(-1)
+local p_filter = (1 - p_conf)^1 * p_conf
+
+local conf_filter = function (path)
+ if lpegmatch (p_filter, path) then
+ return true
+ end
+ return false
+end
+
+--[[doc--
+ read_fonts_conf_indeed() is called with seven arguments; the
+ latter three are tables that represent the state and are
+ always returned.
+ The first four are
+ · the path to the file
+ · the expanded $HOME
+ · the expanded $XDG_CONFIG_HOME
+ · the expanded $XDG_DATA_HOME
+--doc]]--
+--- string -> string -> string -> tab -> tab -> (tab * tab * tab)
+local read_fonts_conf_indeed
+read_fonts_conf_indeed = function (start, home, xdg_config_home,
+ xdg_data_home,
+ acc, done, dirs_done,
+ find_files)
+
+ local paths = fonts_conf_scanner(start)
+ if not paths then --- nothing to do
+ return acc, done, dirs_done
+ end
+
+ for i=1, #paths do
+ local pathobj = paths[i]
+ local kind, path = pathobj[1], pathobj[2]
+ local attributes = pathobj.attributes
+
+ if kind == "dir" then
+ if attributes and attributes.prefix == "xdg" then
+ path = filejoin(xdg_data_home, path)
+ end
+ if stringsub(path, 1, 1) == "~" then
+ path = filejoin(home, stringsub(path, 2))
+ end
+ --- We exclude paths with texmf in them, as they should be
+ --- found anyway; also duplicates are ignored by checking
+ --- if they are elements of dirs_done.
+ ---
+ --- FIXME does this mean we cannot access paths from
+ --- distributions (e.g. Context minimals) installed
+ --- separately?
+ if not (stringfind(path, "texmf") or dirs_done[path]) then
+ acc[#acc+1] = path
+ dirs_done[path] = true
+ end
+
+ elseif kind == "include" then
+ if attributes and attributes.prefix == "xdg" then
+ path = filejoin(xdg_config_home, path)
+ end
+ --- here the path can be four things: a directory or a file,
+ --- in absolute or relative path.
+ if stringsub(path, 1, 1) == "~" then
+ path = filejoin(home, stringsub(path, 2))
+ elseif --- if the path is relative, we make it absolute
+ not ( lfsisfile(path) or lfsisdir(path) )
+ then
+ path = filejoin(filedirname(start), path)
+ end
+ if lfsisfile(path)
+ and kpsereadable_file(path)
+ and not done[path]
+ then
+ --- we exclude path with texmf in them, as they should
+ --- be found otherwise
+ acc = read_fonts_conf_indeed(
+ path, home, xdg_config_home, xdg_data_home,
+ acc, done, dirs_done)
+ elseif lfsisdir(path) then --- arrow code ahead
+ local config_files = find_files (path, conf_filter)
+ for _, filename in next, config_files do
+ if not done[filename] then
+ acc = read_fonts_conf_indeed(
+ filename, home, xdg_config_home, xdg_data_home,
+ acc, done, dirs_done)
+ end
+ end
+ end --- match “kind”
+ end --- iterate paths
+ end
+
+ --inspect(acc)
+ --inspect(done)
+ return acc, done, dirs_done
+ end --- read_fonts_conf_indeed()
+
+--[[doc--
+ read_fonts_conf() sets up an accumulator and two sets
+ for tracking what’s been done.
+
+ Also, the environment variables HOME, XDG_DATA_HOME and
+ XDG_CONFIG_HOME -- which are constants anyways -- are expanded
+ so we don’t have to repeat that over and over again as with the
+ old parser. Now they’re just passed on to every call of
+ read_fonts_conf_indeed().
+--doc]]--
+
+--- list -> (string -> function option -> string list) -> list
+
+local read_fonts_conf = function (path_list, find_files)
+ local home = kpseexpand_path"~" --- could be os.getenv"HOME"
+ local xdg_config_home = kpseexpand_path"$XDG_CONFIG_HOME"
+ if xdg_config_home == "" then xdg_config_home = filejoin(home, ".config") end
+ local xdg_data_home = kpseexpand_path"$XDG_DATA_HOME"
+ if xdg_data_home == "" then xdg_data_home = filejoin(home, ".local/share") end
+ local acc = { } ---> list: paths collected
+ local done = { } ---> set: files inspected
+ local dirs_done = { } ---> set: dirs in list
+ for i=1, #path_list do --- we keep the state between files
+ acc, done, dirs_done = read_fonts_conf_indeed(
+ path_list[i], home, xdg_config_home,
+ xdg_data_home,
+ acc, done, dirs_done,
+ find_files)
+ end
+ return acc
+end
+
+luaotfload.parsers.read_fonts_conf = read_fonts_conf
+
+
+
+-------------------------------------------------------------------------------
+--- MISC PARSERS
+-------------------------------------------------------------------------------
+
+
+local trailingslashes = slash^1 * P(-1)
+local stripslashes = C((1 - trailingslashes)^0)
+parsers.stripslashes = stripslashes
+
+local splitcomma = Ct((C(noncomma^1) + comma)^1)
+parsers.splitcomma = splitcomma
+
+
+
+-------------------------------------------------------------------------------
+--- FONT REQUEST
+-------------------------------------------------------------------------------
+
+
+--[[doc------------------------------------------------------------------------
+
+ The luaotfload font request syntax (see manual)
+ has a canonical form:
+
+ \font<csname>=<prefix>:<identifier>:<features>
+
+ where
+ <csname> is the control sequence that activates the font
+ <prefix> is either “file” or “name”, determining the lookup
+ <identifer> is either a file name (no path) or a font
+ name, depending on the lookup
+ <features> is a list of switches or options, separated by
+ semicolons or commas; a switch is of the form “+” foo
+ or “-” foo, options are of the form lhs “=” rhs
+
+ however, to ensure backward compatibility we also have
+ support for Xetex-style requests.
+
+ for the Xetex emulation see:
+ · The XeTeX Reference Guide by Will Robertson, 2011
+ · The XeTeX Companion by Michel Goosens, 2010
+ · About XeTeX by Jonathan Kew, 2005
+
+
+ caueat emptor.
+
+ the request is parsed into one of **four** different lookup
+ categories: the regular ones, file and name, as well as the
+ Xetex compatibility ones, path and anon. (maybe a better choice
+ of identifier would be “ambig”.)
+
+ according to my reconstruction, the correct chaining of the
+ lookups for each category is as follows:
+
+ | File -> ( db/filename lookup )
+
+ | Name -> ( db/name lookup,
+ db/filename lookup )
+
+ | Path -> ( db/filename lookup,
+ fullpath lookup )
+
+ | Anon -> ( kpse.find_file(), // <- for tfm, ofm
+ db/name lookup,
+ db/filename lookup,
+ fullpath lookup )
+
+ caching of successful lookups is essential. we now as of v2.2
+ have a lookup cache that is stored in a separate file. it
+ pertains only to name: lookups, and is described in more detail
+ in luaotfload-database.lua.
+
+-------------------------------------------------------------------------------
+
+ One further incompatibility between Xetex and Luatex-Fonts consists
+ in their option list syntax: apparently, Xetex requires key-value
+ options to be prefixed by a "+" (ascii “plus”) character. We
+ silently accept this as well, dropping the first byte if it is a
+ plus or minus character.
+
+ Reference: https://github.com/lualatex/luaotfload/issues/79#issuecomment-18104483
+
+--doc]]------------------------------------------------------------------------
+
+
+local handle_normal_option = function (key, val)
+ val = stringlower(val)
+ --- the former “toboolean()” handler
+ if val == "true" then
+ val = true
+ elseif val == "false" then
+ val = false
+ end
+ return key, val
+end
+
+--[[doc--
+
+ Xetex style indexing begins at zero which we just increment before
+ passing it along to the font loader. Ymmv.
+
+--doc]]--
+
+local handle_xetex_option = function (key, val)
+ val = stringlower(val)
+ local numeric = tonumber(val) --- decimal only; keeps colors intact
+ if numeric then --- ugh
+ if mathceil(numeric) == numeric then -- integer, possible index
+ val = tostring(numeric + 1)
+ end
+ elseif val == "true" then
+ val = true
+ elseif val == "false" then
+ val = false
+ end
+ return key, val
+end
+
+--[[doc--
+
+ Instead of silently ignoring invalid options we emit a warning to
+ the log.
+
+ Note that we have to return a pair to please rawset(). This creates
+ an entry on the resulting features hash which will later be removed
+ during set_default_features().
+
+--doc]]--
+
+local handle_invalid_option = function (opt)
+ logreport("log", 0, "load", "font option %q unknown.", opt)
+ return "", false
+end
+
+--[[doc--
+
+ Dirty test if a file: request is actually a path: lookup; don’t
+ ask! Note this fails on Windows-style absolute paths. These will
+ *really* have to use the correct request.
+
+--doc]]--
+
+local check_garbage = function (_,i, garbage)
+ if stringfind(garbage, "/") then
+ logreport("log", 0, "load", --- ffs use path!
+ "warning: path in file: lookups is deprecated; ")
+ logreport("log", 0, "load", "use bracket syntax instead!")
+ logreport("log", 0, "load",
+ "position: %d; full match: %q",
+ i, garbage)
+ return true
+ end
+ return false
+end
+
+local featuresep = comma + semicolon
+
+--- modifiers ---------------------------------------------------------
+--[[doc--
+ The slash notation: called “modifiers” (Kew) or “font options”
+ (Robertson, Goosens)
+ we only support the shorthands for italic / bold / bold italic
+ shapes, as well as setting optical size, the rest is ignored.
+--doc]]--
+local style_modifier = (P"BI" + P"IB" + P"bi" + P"ib" + S"biBI")
+ / stringlower
+local size_modifier = S"Ss" * P"=" --- optical size
+ * Cc"optsize" * C(decimal)
+local other_modifier = P"AAT" + P"aat" --- apple stuff; unsupported
+ + P"ICU" + P"icu" --- not applicable
+ + P"GR" + P"gr" --- sil stuff; unsupported
+local garbage_modifier = ((1 - colon - slash)^0 * Cc(false))
+local modifier = slash * (other_modifier --> ignore
+ + Cs(style_modifier) --> collect
+ + Ct(size_modifier) --> collect
+ + garbage_modifier) --> warn
+local modifier_list = Cg(Ct(modifier^0), "modifiers")
+
+--- lookups -----------------------------------------------------------
+local fontname = C((1-S":(/")^1) --- like luatex-fonts
+local unsupported = Cmt((1-S":(")^1, check_garbage)
+local prefixed = P"name:" * ws * Cg(fontname, "name")
+--- initially we intended file: to emulate the behavior of
+--- luatex-fonts, i.e. no paths allowed. after all, we do have XeTeX
+--- emulation with the path lookup and it interferes with db lookups.
+--- turns out fontspec and other widely used packages rely on file:
+--- with paths already, so we’ll add a less strict rule here. anyways,
+--- we’ll emit a warning.
+ + P"file:" * ws * Cg(unsupported, "path")
+ + P"file:" * ws * Cg(fontname, "file")
+--- EXPERIMENTAL: kpse lookup
+ + P"kpse:" * ws * Cg(fontname, "kpse")
+--- EXPERIMENTAL: custom lookup
+ + P"my:" * ws * Cg(fontname, "my")
+local unprefixed = Cg(fontname, "anon")
+local path_lookup = lbrk * Cg(C((1-rbrk)^1), "path") * rbrk
+
+--- features ----------------------------------------------------------
+local field_char = anum + S"+-." --- sic!
+local field = field_char^1
+--- assignments are “lhs=rhs”
+--- or “+lhs=rhs” (Xetex-style)
+--- switches are “+key” | “-key”
+local normal_option = C(field) * ws * equals * ws * C(field) * ws
+local xetex_option = P"+" * ws * normal_option
+local ignore_option = (1 - equals - featuresep)^1
+ * equals
+ * (1 - featuresep)^1
+local assignment = xetex_option / handle_xetex_option
+ + normal_option / handle_normal_option
+ + ignore_option / handle_invalid_option
+local switch = P"+" * ws * C(field) * Cc(true)
+ + P"-" * ws * C(field) * Cc(false)
+ + C(field) * Cc(true) --- default
+local feature_expr = ws * Cg(assignment + switch) * ws
+local option = feature_expr
+local feature_list = Cf(Ct""
+ * option
+ * (featuresep * option^-1)^0
+ , rawset)
+ * featuresep^-1
+
+--- other -------------------------------------------------------------
+--- This rule is present in the original parser. It sets the “sub”
+--- field of the specification which allows addressing a specific
+--- font inside a TTC container. Neither in Luatex-Fonts nor in
+--- Luaotfload is this documented, so we might as well silently drop
+--- it. However, as backward compatibility is one of our prime goals we
+--- just insert it here and leave it undocumented until someone cares
+--- to ask. (Note: afair subfonts are numbered, but this rule matches a
+--- string; I won’t mess with it though until someone reports a
+--- problem.)
+--- local subvalue = P("(") * (C(P(1-S("()"))^1)/issub) * P(")") -- for Kim
+--- Note to self: subfonts apparently start at index 0. Tested with
+--- Cambria.ttc that includes “Cambria Math” at 0 and “Cambria” at 1.
+--- Other values cause luatex to segfault.
+local subfont = P"(" * Cg((1 - S"()")^1, "sub") * P")"
+--- top-level rules ---------------------------------------------------
+--- \font\foo=<specification>:<features>
+local features = Cg(feature_list, "features")
+local specification = (prefixed + unprefixed)
+ * subfont^-1
+ * modifier_list^-1
+local font_request = Ct(path_lookup * (colon^-1 * features)^-1
+ + specification * (colon * features)^-1)
+
+-- lpeg.print(font_request)
+--- v2.5 parser: 1065 rules
+--- v1.2 parser: 230 rules
+
+luaotfload.parsers.font_request = font_request
+
+-------------------------------------------------------------------------------
+--- INI FILES
+-------------------------------------------------------------------------------
+
+--[[doc--
+
+ Luaotfload uses the pervasive flavor of the INI files that allows '#' in
+ addition to ';' to indicate comment lines (see git-config(1) for a
+ description of the syntax we’re targeting).
+
+--doc]]--
+
+local truth_ids = {
+ ["true"] = true,
+ ["1"] = true,
+ yes = true,
+ on = true,
+ ["false"] = false,
+ ["2"] = false,
+ no = false,
+ off = false,
+}
+
+local maybe_cast = function (var)
+ local bool = truth_ids[var]
+ if bool ~= nil then
+ return bool
+ end
+ return tonumber (var) or var
+end
+local escape = function (chr, repl)
+ return (backslash * P(chr) / (repl or chr))
+end
+local valid_escapes = escape "\""
+ + escape "\\"
+ + escape ("n", "\n")
+ + escape ("t", "\t")
+ + escape ("b", "\b")
+local comment_char = semicolon + gartenzaun
+local comment_line = ws * comment_char * (1 - eol)^0 * eol
+local blank_line = ws * eol
+local skip_line = comment_line + blank_line
+local ini_id_char = alpha + (dash / "_")
+local ini_id = Cs(alpha * ini_id_char^0) / stringlower
+local ini_value_char = (valid_escapes + (1 - newline - backslash - comment_char))
+local ini_value = (Cs (ini_value_char^0) / string.strip)
+ * (comment_char * (1 - eol)^0)^-1
+local ini_string_char = (valid_escapes + (1 - newline - dquote - backslash))
+local ini_string = dquote
+ * Cs (ini_string_char^0)
+ * dquote
+
+local ini_heading_title = Ct (Cg (ini_id, "title")
+ * (ws * Cg (ini_string / stringlower, "subtitle"))^-1)
+local ini_heading = lbrk * ws
+ * Cg (ini_heading_title, "section")
+ * ws * rbrk * ws * eol
+
+local ini_variable_full = Cg (ws
+ * ini_id
+ * ws
+ * equals
+ * ws
+ * (ini_string + (ini_value / maybe_cast))
+ * ws
+ * eol)
+local ini_variable_true = Cg (ws * ini_id * ws * eol * Cc (true))
+local ini_variable = ini_variable_full
+ + ini_variable_true
+ + skip_line
+local ini_variables = Cg (Cf (Ct "" * ini_variable^0, rawset), "variables")
+
+local ini_section = Ct (ini_heading * ini_variables)
+local ini_sections = skip_line^0 * ini_section^0
+local config = Ct (ini_sections)
+
+--[=[doc--
+
+ The INI parser converts an input of the form
+
+ [==[
+ [foo]
+ bar = baz
+ xyzzy = no
+ buzz
+
+ [lavernica "brutalitops"]
+ # It’s a locomotive that runs on us.
+ laan-ev = zip zop zooey ; jib-jab
+ Crouton = "Fibrosis \"\\ # "
+
+ ]==]
+
+ to a Lua table of the form
+
+ { { section = { title = "foo" },
+ variables = { bar = "baz",
+ xyzzy = false,
+ buzz = true } },
+ { section = { title = "boing",
+ subtitle = "brutalitops" },
+ variables = { ["laan-ev"] = "zip zop zooey",
+ crouton = "Fibrosis \"\\ # " } } }
+
+--doc]=]--
+
+luaotfload.parsers.config = config
+
+-- vim:ft=lua:tw=71:et:sts=4:ts=8
diff --git a/src/luaotfload-tool.lua b/src/luaotfload-tool.lua
new file mode 100755
index 0000000..8cfcac0
--- /dev/null
+++ b/src/luaotfload-tool.lua
@@ -0,0 +1,1617 @@
+#!/usr/bin/env texlua
+-----------------------------------------------------------------------
+-- FILE: luaotfload-tool.lua
+-- DESCRIPTION: database functionality
+-- REQUIREMENTS: luaotfload 2.5
+-- AUTHOR: Khaled Hosny, Élie Roux, Philipp Gesang
+-- VERSION: 2.5
+-- LICENSE: GPL v2.0
+-- MODIFIED: 2014-05-15 21:47:39+0200
+-----------------------------------------------------------------------
+
+luaotfload = luaotfload or { }
+local version = "2.5" --- <int: major>.<int: minor>-<int: fixes>
+luaotfload.version = version
+luaotfload.self = "luaotfload-tool"
+
+--[[doc--
+
+luaotfload-tool(1)
+
+This file was originally written (as \fileent{mkluatexfontdb.lua}) by
+Elie Roux and Khaled Hosny and, as a derived work of ConTeXt, is
+provided under the terms of the GPL v2.0 license as printed in full
+text in the manual (luaotfload.pdf).
+
+ \url{http://www.gnu.org/licenses/old-licenses/gpl-2.0.html}.
+
+This file is a wrapper for the luaotfload font names module
+(luaotfload-database.lua). It is part of the luaotfload bundle, please
+see the luaotfload documentation for more info. Report bugs to
+
+ \url{https://github.com/lualatex/luaotfload/issues}.
+
+--doc]]--
+
+kpse.set_program_name "luatex"
+
+--[[doc--
+
+ We test for Lua 5.1 by means of capability detection to see if
+ we’re running an outdated Luatex. If so, we bail.
+
+ \url{http://lua-users.org/wiki/LuaVersionCompatibility}
+
+--doc]]--
+
+
+local iowrite = io.write
+local kpsefind_file = kpse.find_file
+local mathfloor = math.floor
+local next = next
+local osdate = os.date
+local ostype = os.type
+local stringexplode = string.explode
+local stringformat = string.format
+local stringlower = string.lower
+local stringrep = string.rep
+local tableconcat = table.concat
+local texiowrite_nl = texio.write_nl
+local texiowrite = texio.write
+local tonumber = tonumber
+local type = type
+
+local runtime
+if _G.getfenv ~= nil then -- 5.1 or LJ
+ if _G.jit ~= nil then
+ runtime = { "jit", jit.version }
+ else
+ runtime = { "stock", _VERSION }
+ print "FATAL ERROR"
+ print "Luaotfload requires a Luatex version >=0.76."
+ print "Please update your TeX distribution!"
+ os.exit (-1)
+ end
+else -- 5.2
+ runtime = { "stock", _VERSION }
+end
+
+
+local C, Ct, P, S = lpeg.C, lpeg.Ct, lpeg.P, lpeg.S
+local lpegmatch = lpeg.match
+
+local loader_file = "luatexbase.loader.lua"
+local loader_path = assert(kpsefind_file(loader_file, "lua"),
+ "File '"..loader_file.."' not found")
+
+
+string.quoted = string.quoted or function (str)
+ return string.format("%q",str)
+end
+
+require (loader_path)
+
+--[[doc--
+
+ XXX:
+ Creating the config table will be moved to the common
+ initialization when the times comes.
+
+--doc]]--
+
+config = config or { }
+local config = config
+config.luaotfload = config.luaotfload or { }
+
+config.lualibs = config.lualibs or { }
+config.lualibs.verbose = false
+config.lualibs.prefer_merged = true
+config.lualibs.load_extended = true
+
+require "lualibs"
+local iosavedata = io.savedata
+local lfsisdir = lfs.isdir
+local lfsisfile = lfs.isfile
+local stringsplit = string.split
+local tableserialize = table.serialize
+local tablesortedkeys = table.sortedkeys
+local tabletohash = table.tohash
+
+--[[doc--
+\fileent{luatex-basics-gen.lua} calls functions from the
+\luafunction{texio.*} library; too much for our taste.
+We intercept them with dummies.
+
+Also, it sets up dummies in place of the tables created by the Context
+libraries. Since we have loaded the lualibs already this would cause
+collateral damage for some libraries whose namespace would be
+overridden. We employ our usual backup-restore strategy to work around
+this. (Postponing the loading of the lualibs code is not an option
+because the functionality is needed by basics-gen itself.)
+--doc]]--
+
+local dummy_function = function ( ) end
+local backup = {
+ write = texio.write,
+ write_nl = texio.write_nl,
+ utilities = utilities,
+}
+
+texio.write, texio.write_nl = dummy_function, dummy_function
+require"luaotfload-basics-gen.lua"
+
+texio.write, texio.write_nl = backup.write, backup.write_nl
+utilities = backup.utilities
+
+require "luaotfload-log.lua" --- this populates the luaotfload.log.* namespace
+require "luaotfload-parsers" --- fonts.conf, configuration, and request syntax
+require "luaotfload-configuration" --- configuration file handling
+require "luaotfload-database"
+require "alt_getopt"
+
+local names = fonts.names
+local sanitize_fontname = names.sanitize_fontname
+
+local log = luaotfload.log
+local report = log.report
+
+local help_messages = {
+ ["luaotfload-tool"] = [[
+
+Usage: %s [OPTIONS...]
+
+ Luaotfload font management and diagnostic utility.
+ This program is part of the Luaotfload package.
+
+ Valid options are:
+
+-------------------------------------------------------------------------------
+ VERBOSITY AND DIAGNOSTICS
+
+ -q --quiet don't output anything
+ -v --verbose=LEVEL be more verbose (print the searched directories)
+ -vv print the loaded fonts
+ -vvv print all steps of directory searching
+ --log=stdout redirect log output to stdout
+
+ -V --version print version and exit
+ -h --help print this message
+ --diagnose=CHECK run a self test procedure; one of "files",
+ "environment", "index", "permissions", or
+ "repository"
+
+-------------------------------------------------------------------------------
+ DATABASE
+
+ -u --update update the database
+ -n --no-reload suppress db update
+ --no-strip keep redundant information in db
+ -f --force force re-indexing all fonts
+ -L --local scan font files in $PWD
+ -c --no-compress do not gzip index file (text version only)
+ -l --flush-lookups empty lookup cache of font requests
+ -D --dry-run skip loading of fonts, just scan
+ --formats=[+|-]EXTENSIONS set, add, or subtract formats to index
+ -p --prefer-texmf prefer fonts in the TEXMF over system fonts
+ --max-fonts=N process at most N font files
+
+ --find="font name" query the database for a font name
+ -F --fuzzy look for approximate matches if --find fails
+ --limit=n limit display of fuzzy matches to <n>
+ (default: n = 1)
+
+ -i --info display basic font metadata
+ -I --inspect display detailed font metadata
+ -w --warnings display warnings generated by the
+ fontloader library
+
+ --list=<criterion> output list of entries by field <criterion>
+ --list=<criterion>:<value> restrict to entries with <criterion>=<value>
+ --fields=<f1>,<f2>,…,<fn> which fields <f> to print with --list
+ -b --show-blacklist show blacklisted files
+
+ --bisect=<directive> control database bisection: valid
+ directives are "start", "stop", "run", "status",
+ "good", "bad"
+
+The font database will be saved to
+ %s
+ %s
+
+-------------------------------------------------------------------------------
+ FONT CACHE
+
+ --cache=<directive> operate on font cache, where <directive> is
+ "show", "purge", or "erase"
+
+The font cache will be written to
+ %s
+
+]],
+ mkluatexfontdb = [[
+FATAL ERROR
+As of Luaotfload v2.5, legacy behavior is not supported anymore. Please
+update your scripts and/or habits! Kthxbye.
+]],
+ short = [[
+Usage: luaotfload-tool [--help] [--version] [--verbose=<lvl>]
+ [--update] [--force] [--prefer-texmf] [--local]
+ [--dry-run] [--formats=<extension list>]
+ [--find=<font name>] [--fuzzy] [--info] [--inspect]
+ [--list=<criterion>] [--fields=<field list>]
+ [--cache=<directive>] [--flush-lookups]
+ [--show-blacklist] [--diagnose=<procedure>]
+ [--no-compress] [--no-strip] [--local]
+ [--max-fonts=<n>] [--bisect=<directive>]
+
+Enter 'luaotfload-tool --help' for a larger list of options.
+]]
+}
+
+local help_msg = function (version)
+ local template = help_messages[version]
+ local paths = config.luaotfload.paths
+ local names_plain = paths.index_path_lua
+ local names_gzip = names_plain .. ".gz"
+ local names_bin = paths.index_path_luc
+
+ iowrite(stringformat(template,
+ luaotfload.self,
+ names_gzip,
+ names_bin,
+ caches.getwritablepath (config.luaotfload.cache_dir)))
+end
+
+local about = [[
+%s:
+ Luaotfload font management and diagnostic utility.
+ License: GNU GPL v2.0.
+ Report problems to <https://github.com/lualatex/luaotfload/issues>
+]]
+
+local version_msg = function ( )
+ local out = function (...) texiowrite_nl (stringformat (...)) end
+ local uname = os.uname ()
+ local meta = names.getmetadata ()
+ out (about, luaotfload.self)
+ out ("%s version: %q", luaotfload.self, version)
+ out ("Revision: %q", config.luaotfload.status.notes.revision)
+ out ("Lua interpreter: %s; version %q", runtime[1], runtime[2])
+ out ("Luatex SVN revision: %d", status.luatex_svn)
+ out ("Luatex version: %.2f.%d",
+ status.luatex_version / 100,
+ status.luatex_revision)
+ out ("Platform: type=%s name=%s", os.type, os.name)
+
+ local uname_vars = tablesortedkeys (uname)
+ for i = 1, #uname_vars do
+ local var = uname_vars[i]
+ out (" + %8s: %s", var, uname[var])
+ end
+ out ("Index: version=%q created=%q modified=%q",
+ config.luaotfload.status.notes.revision,
+ meta.created or "ages ago",
+ meta.modified or "ages ago")
+ out ""
+end
+
+
+--- makeshift formatting
+
+local head_adornchars = {
+ [1] = "*", [2] = "=", [3] = "~", [4] = "-", [5] = "·",
+}
+
+local textwidth = 80
+local wd_leftcolumn = mathfloor(textwidth * .25)
+local key_fmt = stringformat([[%%%ds]], wd_leftcolumn)
+local val_fmt = [[%s]]
+local fieldseparator = ":"
+local info_fmt = key_fmt .. fieldseparator .. " " .. val_fmt
+
+local currentdepth = 0
+local counterstack = { } -- counters per level
+local counterformat = "%d"
+
+local format_counter = function (stack)
+ local acc = { }
+ for lvl=1, #stack do
+ acc[#acc+1] = stringformat(counterformat, stack[lvl])
+ end
+ return tableconcat(acc, ".")
+end
+
+local print_heading = function (title, level)
+ local structuredata
+ if currentdepth == level then -- top is current
+ counterstack[#counterstack] = counterstack[#counterstack] + 1
+ elseif currentdepth < level then -- push new
+ counterstack[#counterstack+1] = 1
+ else -- pop
+ local diff = currentdepth - level
+ while diff > 0 do
+ counterstack[#counterstack] = nil
+ diff = diff - 1
+ end
+ counterstack[#counterstack] = counterstack[#counterstack] + 1
+ end
+ currentdepth = level
+
+ texiowrite_nl ""
+ if not level or level > #head_adornchars then
+ level = #head_adornchars
+ end
+ local adornchar = head_adornchars[level]
+
+ local counter = format_counter(counterstack)
+
+ local s = adornchar .. adornchar .. " "
+ .. counter .. " "
+ .. title .. " "
+ texiowrite_nl (s .. stringrep(adornchar, textwidth-utf.len(s)))
+end
+
+local baseindent = " "
+
+--[[doc--
+
+ show_info_items -- Together with show_info_table prints the table returned by
+ fontloader.info(), recursing into nested tables if appropriate (as necessitated
+ by Luatex versions 0.78+ which include the pfminfo table in the result.
+
+--doc]]--
+
+local show_info_table show_info_table = function (t, depth)
+ depth = depth or 0
+ local indent = stringrep (baseindent, depth)
+ local keys = tablesortedkeys (t)
+ for n = 1, #keys do
+ local key = keys [n]
+ local val = t [key]
+ if type (val) == "table" then
+ texiowrite_nl (indent .. stringformat (info_fmt, key, "<table>"))
+ show_info_table (val, depth + 1)
+ else
+ texiowrite_nl (indent .. stringformat (info_fmt, key, val))
+ end
+ end
+end
+
+local show_info_items = function (fontinfo)
+ print_heading (fontinfo.fullname, 1)
+ texiowrite_nl ""
+ show_info_table (fontinfo)
+ texiowrite_nl ""
+end
+
+local p_eol = S"\n\r"^1
+local p_space = S" \t\v"^0
+local p_line = p_space * C((1 - p_eol)^1)^-1
+local p_lines = Ct(p_line * (p_eol^1 * p_line^-1)^0)
+
+local show_fontloader_warnings = function (ws)
+ local nws = #ws
+ print_heading(stringformat(
+ [[the fontloader emitted %d warnings]],
+ nws), 2)
+ texiowrite_nl ""
+ for i=1, nws do
+ local w = ws[i]
+ texiowrite_nl (stringformat("%d:", i))
+ local lines = lpegmatch(p_lines, w)
+ for i=1, #lines do
+ local line = lines[i]
+ texiowrite_nl(" · " .. line)
+ end
+ texiowrite_nl ""
+ end
+end
+
+local p_spacechar = S" \n\r\t\v"
+local p_wordchar = (1 - p_spacechar)
+local p_whitespace = p_spacechar^1
+local p_word = C(p_wordchar^1)
+local p_words = Ct(p_word * (p_whitespace * p_word)^0)
+
+--- string -> int -> string list
+local reflow = function (text, width)
+ local words
+ if type(text) == "string" then
+ words = lpegmatch(p_words, text)
+ if #words < 2 then
+ return { text }
+ end
+ else
+ words = text
+ if #words < 2 then
+ return words
+ end
+ end
+
+ local space = " "
+ local utflen = utf.len
+ local reflowed = { }
+
+ local first = words[1]
+ local linelen = #first
+ local line = { first }
+
+ for i=2, #words do
+ local word = words[i]
+ local lword = utflen(word)
+ linelen = linelen + lword + 1
+ if linelen > width then
+ reflowed[#reflowed+1] = tableconcat(line)
+ linelen = #word
+ line = { word }
+ else
+ line[#line+1] = space
+ line[#line+1] = word
+ end
+ end
+ reflowed[#reflowed+1] = tableconcat(line)
+ return reflowed
+end
+
+--- string -> 'a -> string list
+local print_field = function (key, val)
+ val = tostring(val)
+ local lhs = stringformat(key_fmt, key) .. fieldseparator .. " "
+ local wd_lhs = #lhs
+ local lines = reflow(val, textwidth - wd_lhs)
+
+ texiowrite_nl(lhs)
+ texiowrite(lines[1])
+ if #lines > 1 then
+ local indent = stringrep(" ", wd_lhs)
+ for i=2, #lines do
+ texiowrite_nl(indent)
+ texiowrite (lines[i])
+ end
+ end
+end
+
+local display_names = function (names)
+ print_heading("Font Metadata", 2)
+ for i=1, #names do
+ local lang, namedata = names[i].lang, names[i].names
+ print_heading(stringformat("Language: %s ", i, lang), 3)
+ texiowrite_nl ""
+ if namedata then
+ for field, value in next, namedata do
+ print_field(field, value)
+ end
+ end
+ end
+end
+
+--- see luafflib.c
+local general_fields = {
+ --- second: l -> literal | n -> length | d -> date
+ { "fullname", "l", "font name" },
+ { "version", "l", "font version" },
+ { "creationtime", "d", "creation time" },
+ { "modificationtime", "d", "modification time" },
+ { "subfonts", "n", "number of subfonts" },
+ { "glyphcnt", "l", "number of glyphs" },
+ { "weight", "l", "weight indicator" },
+ { "design_size", "l", "design size" },
+ { "design_range_bottom", "l", "design size min" },
+ { "design_range_top", "l", "design size max" },
+ { "fontstyle_id", "l", "font style id" },
+ { "fontstyle_name", "S", "font style name" },
+ { "strokewidth", "l", "stroke width" },
+ { "units_per_em", "l", "units per em" },
+ { "ascent", "l", "ascender height" },
+ { "descent", "l", "descender height" },
+ { "comments", "l", "comments" },
+ { "os2_version", "l", "os2 version" },
+ { "sfd_version", "l", "sfd version" },
+}
+
+local display_general = function (fullinfo)
+ texiowrite_nl ""
+ print_heading("General Information", 2)
+ texiowrite_nl ""
+ for i=1, #general_fields do
+ local field = general_fields[i]
+ local key, mode, desc = unpack(field)
+ local val
+ if mode == "l" then
+ val = fullinfo[key]
+ elseif mode == "S" then --- style names table
+ local data = fullinfo[key]
+ if type (data) == "table" then
+ if #data > 0 then
+ for n = 1, #data do
+ local nth = data[n]
+ if nth.lang == 1033 then
+ val = nth.name
+ goto found
+ end
+ end
+ val = next (data).name
+ else
+ val = ""
+ end
+ ::found::
+ else
+ val = data
+ end
+ elseif mode == "n" then
+ local v = fullinfo[key]
+ if v then
+ val = #fullinfo[key]
+ end
+ elseif mode == "d" then
+ if ostype == "unix" then
+ val = osdate("%F %T", fullinfo[key])
+ else
+ --- the MS compiler doesn’t support C99, so
+ --- strftime is missing some functionality;
+ --- see loslib.c for details.
+ val = osdate("%Y-%m-d %H:%M:%S", fullinfo[key])
+ end
+ end
+ if not val then
+ val = "<none>"
+ end
+ print_field(desc, val)
+ end
+end
+
+local print_features = function (features)
+ for tag, data in next, features do
+ print_heading(tag, 4)
+ for script, languages in next, data do
+ local field = stringformat(key_fmt, script).. fieldseparator .. " "
+ local wd_field = #field
+ local lines = reflow(languages.list, textwidth - wd_field)
+ local indent = stringrep(" ", wd_field)
+ texiowrite_nl(field)
+ texiowrite(lines[1])
+ if #lines > 1 then
+ for i=1, #lines do
+ texiowrite_nl(indent .. lines[i])
+ end
+ end
+ end
+ end
+end
+
+local extract_feature_info = function (set)
+ local collected = { }
+ for i=1, #set do
+ local features = set[i].features
+ if features then
+ for j=1, #features do
+ local feature = features[j]
+ local scripts = feature.scripts
+ local tagname = stringlower(feature.tag)
+ local entry = collected[tagname] or { }
+
+ for k=1, #scripts do
+ local script = scripts[k]
+ local scriptname = stringlower(script.script)
+ local c_script = entry[scriptname] or {
+ list = { },
+ set = { },
+ }
+ local list, set = c_script.list, c_script.set
+
+ for l=1, #script.langs do
+ local langname = stringlower(script.langs[l])
+ if not set[langname] then
+ list[#list+1] = langname
+ set[langname] = true
+ end
+ end
+ entry[scriptname] = c_script
+ end
+ collected[tagname] = entry
+ end
+ end
+ end
+ return collected
+end
+
+local display_feature_set = function (set)
+ local collected = extract_feature_info(set)
+ print_features(collected)
+end
+
+local display_features = function (gsub, gpos)
+ texiowrite_nl ""
+
+ if gsub or gpos then
+ print_heading("Features", 2)
+
+ if gsub then
+ print_heading("GSUB Features", 3)
+ display_feature_set(gsub)
+ end
+
+ if gpos then
+ print_heading("GPOS Features", 3)
+ display_feature_set(gpos)
+ end
+ end
+end
+
+local show_full_info = function (path, subfont, warnings)
+ local rawinfo, warn = fontloader.open(path, subfont)
+ if warnings then
+ show_fontloader_warnings(warn)
+ end
+ if not rawinfo then
+ texiowrite_nl(stringformat([[cannot open font %s]], path))
+ return
+ end
+ local fontdata = { }
+ local fullinfo = fontloader.to_table(rawinfo)
+ local fields = fontloader.fields(rawinfo)
+ fontloader.close(rawinfo)
+ display_names(fullinfo.names)
+ display_general(fullinfo)
+ display_features(fullinfo.gsub, fullinfo.gpos)
+end
+
+--- Subfonts returned by fontloader.info() do not correspond
+--- to the actual indices required by fontloader.open(), so
+--- we try and locate the correct one by matching the request
+--- against the full name.
+
+local subfont_by_name
+subfont_by_name = function (lst, askedname, n)
+ if not n then
+ return subfont_by_name (lst, askedname, 1)
+ end
+
+ local font = lst[n]
+ if font then
+ if sanitize_fontname (font.fullname) == askedname then
+ return font
+ end
+ return subfont_by_name (lst, askedname, n+1)
+ end
+ return false
+end
+
+--[[doc--
+The font info knows two levels of detail:
+
+ a) basic information returned by fontloader.info(); and
+ b) detailed information that is a subset of the font table
+ returned by fontloader.open().
+--doc]]--
+
+local show_font_info = function (basename, askedname, detail, warnings)
+ local filenames = names.data().files
+ local index = filenames.base[basename]
+ local fullname = filenames.full[index]
+ askedname = sanitize_fontname (askedname)
+ if not fullname then -- texmf
+ fullname = resolvers.findfile(basename)
+ end
+ if fullname then
+ local shortinfo = fontloader.info(fullname)
+ local nfonts = #shortinfo
+ if nfonts > 0 then -- true type collection
+ local subfont
+ if askedname then
+ report (true, 1, "resolve",
+ [[%s is part of the font collection %s]],
+ askedname, basename)
+ subfont = subfont_by_name(shortinfo, askedname)
+ end
+ if subfont then
+ show_info_items(subfont)
+ if detail == true then
+ show_full_info(fullname, subfont, warnings)
+ end
+ else -- list all subfonts
+ report (true, 1, "resolve",
+ [[%s is a font collection]], basename)
+ for subfont = 1, nfonts do
+ report (true, 1, "resolve",
+ [[Showing info for font no. %d]], n)
+ show_info_items(shortinfo[subfont])
+ if detail == true then
+ show_full_info(fullname, subfont, warnings)
+ end
+ end
+ end
+ else
+ show_info_items(shortinfo)
+ if detail == true then
+ show_full_info(fullname, subfont, warnings)
+ end
+ end
+ else
+ report (true, 1, "resolve", "Font %s not found", filename)
+ end
+end
+
+--[[--
+Running the scripts triggers one or more actions that have to be
+executed in the correct order. To avoid duplication we track them in a
+set.
+--]]--
+
+local action_sequence = {
+ "config", "loglevel", "help", "version",
+ "diagnose", "blacklist", "cache", "flush",
+ "bisect", "generate", "list", "query",
+}
+
+local action_pending = tabletohash(action_sequence, false)
+
+action_pending.config = true --- always read the configuration
+action_pending.loglevel = true --- always set the loglevel
+action_pending.generate = false --- this is the default action
+
+local actions = { } --- (jobspec -> (bool * bool)) list
+
+actions.loglevel = function (job)
+ local lvl = job.log_level
+ if lvl then
+ log.set_loglevel(lvl)
+ report ("info", 3, "util", "Setting the log level to %d.", lvl)
+ report ("log", 2, "util", "Lua=%q", _VERSION)
+ end
+ return true, true
+end
+
+actions.config = function (job)
+ local defaults = luaotfload.default_config
+ local vars = config.actions.read (job.extra_config)
+ config.luaotfload = config.actions.apply (defaults, vars)
+ config.luaotfload = config.actions.apply (config.luaotfload, job.config)
+
+ --inspect(config.luaotfload)
+ --os.exit()
+ if not config.actions.reconfigure () then
+ return false, false
+ end
+ return true, true
+end
+
+actions.version = function (job)
+ version_msg()
+ return true, false
+end
+
+actions.help = function (job)
+ help_msg (job.help_version or "luaotfload-tool")
+ return true, false
+end
+
+actions.blacklist = function (job)
+ names.read_blacklist()
+ local n = 0
+ for n, entry in next, tablesortedkeys(names.blacklist) do
+ iowrite (stringformat("(%d %s)\n", n, entry))
+ end
+ return true, false
+end
+
+actions.generate = function (job)
+ local fontnames = names.update(fontnames, job.force_reload, job.dry_run)
+ report ("info", 2, "db", "Fonts in the database: %i", #fontnames.mappings)
+ if names.data() then
+ return true, true
+ end
+ return false, false
+end
+
+-------------------------------------------------------------------------------
+--- bisect mode
+-------------------------------------------------------------------------------
+
+local bisect_status_path = caches.getwritablepath "bisect"
+local bisect_status_file = bisect_status_path .."/" .. "luaotfload-bisect-status.lua"
+local bisect_status_fmt = [[
+--[==[-------------------------------------------------------------------------
+ This file is generated by Luaotfload. It can be safely deleted.
+ Creation date: %s.
+-------------------------------------------------------------------------]==]--
+
+%s
+
+--- vim:ft=lua:ts=8:et:sw=2
+]]
+
+--[[doc--
+
+ write_bisect_status -- Write the history of the current bisection to disk.
+
+--doc]]--
+
+--- state list -> bool
+local write_bisect_status = function (data)
+ local payload = tableserialize (data, true)
+ local status = stringformat (bisect_status_fmt,
+ osdate ("%Y-%m-d %H:%M:%S", os.time ()),
+ payload)
+ if status and iosavedata (bisect_status_file, status) then
+ report ("info", 4, "bisect",
+ "Bisection state written to %s.", bisect_status_file)
+ return true
+ end
+ report ("info", 0, "bisect",
+ "Failed to write bisection state to %s.", bisect_status_file)
+ return false
+end
+
+--[[doc--
+
+ read_bisect_status -- Read the bisect log from disk.
+
+--doc]]--
+
+--- unit -> state list
+local read_bisect_status = function ()
+ report ("info", 4, "bisect", "Testing for status file: %q.", bisect_status_file)
+ if not lfsisfile (bisect_status_file) then
+ report ("info", 2, "bisect", "No such file: %q.", bisect_status_file)
+ report ("info", 0, "bisect", "Not in bisect mode.")
+ return false
+ end
+ report ("info", 4, "bisect", "Reading status file: %q.", bisect_status_file)
+ local success, status = pcall (dofile, bisect_status_file)
+ if not success then
+ report ("info", 0, "bisect", "Could not read status file.")
+ return false
+ end
+ return status
+end
+
+--[[doc--
+
+ bisect_start -- Begin a bisect session. Determines the number of
+ fonts and sets the initial high, low, and pivot values.
+
+--doc]]--
+
+local bisect_start = function ()
+ if lfsisfile (bisect_status_file) then
+ report ("info", 0, "bisect",
+ "Bisect session in progress.",
+ bisect_status_file)
+ report ("info", 0, "bisect",
+ "Use --bisect=stop to erase it before starting over.")
+ return false, false
+ end
+ report ("info", 2, "bisect",
+ "Starting bisection of font database %q.", bisect_status_file)
+ local n = names.count_font_files ()
+ local pivot = mathfloor (n / 2)
+ local data = { { 1, n, pivot } }
+ report ("info", 0, "bisect", "Initializing pivot to %d.", pivot)
+ if write_bisect_status (data) then
+ return true, false
+ end
+ return false, false
+end
+
+--[[doc--
+
+ bisect_stop -- Terminate bisection session by removing all state info.
+
+--doc]]--
+
+local bisect_stop = function ()
+ report ("info", 3, "bisect", "Erasing bisection state at %s.", bisect_status_file)
+ if lfsisfile (bisect_status_file) then
+ local success, msg = os.remove (bisect_status_file)
+ if not success then
+ report ("info", 2, "bisect",
+ "Failed to erase file %s (%s).",
+ bisect_status_file, msg)
+ end
+ end
+ if lfsisdir (bisect_status_path) then
+ local success, msg = os.remove (bisect_status_path)
+ if not success then
+ report ("info", 2, "bisect",
+ "Failed to erase directory %s (%s).",
+ bisect_status_path, msg)
+ end
+ end
+ if lfsisfile (bisect_status_file) then
+ return false, false
+ end
+ return true, false
+end
+
+--[[doc--
+
+ bisect_terminate -- Wrap up a bisect session by printing the
+ offending font and removing the state file.
+
+--doc]]--
+
+local bisect_terminate = function (nsteps, culprit)
+ report ("info", 1, "bisect",
+ "Bisection completed after %d steps.", nsteps)
+ report ("info", 0, "bisect",
+ "Bad file: %s.", names.nth_font_filename (culprit))
+ report ("info", 0, "bisect",
+ "Run with --bisect=stop to finish bisection.")
+ return true, false
+end
+
+--[[doc--
+
+ list_remainder -- Show remaining fonts in bisect slice.
+
+--doc]]--
+
+local list_remainder = function (lo, hi)
+ local fonts = names.font_slice (lo, hi)
+ report ("info", 0, "bisect", "%d fonts left.", hi - lo + 1)
+ for i = 1, #fonts do
+ report ("info", 1, "bisect", " · %2d: %s", lo, fonts[i])
+ lo = lo + 1
+ end
+end
+
+--[[doc--
+
+ bisect_set -- Prepare the next bisection step by setting high, low,
+ and pivot to new values.
+
+ The “run” directive always picks the segment below the pivot so we
+ can rely on the “outcome parameter” to be referring to that.
+
+--doc]]--
+
+local bisect_set = function (outcome)
+ local status = read_bisect_status ()
+ if not status then
+ return false, false
+ end
+
+ local nsteps = #status
+ local previous = status[nsteps]
+ if previous == true then
+ --- Bisection already completed; we exit early through
+ --- bisect_terminate() to avoid further writes to the
+ --- state files that mess up step counting.
+ nsteps = nsteps - 1
+ return bisect_terminate (nsteps, status[nsteps][1])
+ end
+
+ local lo, hi, pivot = unpack (previous)
+
+ report ("info", 3, "bisect", "Previous step %d: lo=%d, hi=%d, pivot=%d.",
+ nsteps, lo, hi, pivot)
+
+ if outcome == "bad" then
+ hi = pivot
+ if lo >= hi then --- complete
+ status[nsteps + 1] = { lo, lo, lo }
+ status[nsteps + 2] = true
+ write_bisect_status (status)
+ return bisect_terminate (nsteps, lo)
+ end
+ pivot = mathfloor ((lo + hi) / 2)
+ report ("info", 0, "bisect",
+ "Continuing with the lower segment: lo=%d, hi=%d, pivot=%d.",
+ lo, hi, pivot)
+ elseif outcome == "good" then
+ lo = pivot + 1
+ if lo >= hi then --- complete
+ status[nsteps + 1] = { lo, lo, lo }
+ write_bisect_status (status)
+ status[nsteps + 2] = true
+ return bisect_terminate (nsteps, lo)
+ end
+ pivot = mathfloor ((lo + hi) / 2)
+ report ("info", 0, "bisect",
+ "Continuing with the upper segment: lo=%d, hi=%d, pivot=%d.",
+ lo, hi, pivot)
+ else -- can’t happen
+ report ("info", 0, "bisect", "What the hell?", lo, hi, pivot)
+ return false, false
+ end
+
+ status[nsteps + 1] = { lo, hi, pivot }
+ write_bisect_status (status)
+ if hi - lo <= 10 then
+ list_remainder (lo, hi)
+ end
+ return true, false
+end
+
+--[[doc--
+
+ bisect_status -- Output information about the current bisect session.
+
+--doc]]--
+
+local bisect_status = function ()
+ local status = read_bisect_status ()
+ if not status then
+ return false, false
+ end
+ local nsteps = #status
+ if nsteps > 1 then
+ for i = nsteps - 1, 1, -1 do
+ local step = status[i]
+ report ("info", 2, "bisect", "Step %d: lo=%d, hi=%d, pivot=%d.",
+ i, unpack (step))
+ end
+ end
+ local current = status[nsteps]
+ report ("info", 0, "bisect", "Step %d: lo=%d, hi=%d, pivot=%d.",
+ nsteps, unpack (current))
+ return true, false
+end
+
+--[[doc--
+
+ bisect_run -- Run Luaotfload utilizing the current bisection state.
+ This should be combined with the --update mode, possibly with the
+ --force option.
+
+ Luaotfload always tests the segment below the pivot first.
+
+--doc]]--
+
+local bisect_run = function ()
+ local status = read_bisect_status ()
+ if not status then
+ return false, false
+ end
+ local nsteps = #status
+ local currentstep = nsteps + 1
+ local current = status[nsteps]
+ if current == true then -- final step
+ current = status[nsteps - 1]
+ end
+ local lo, hi, pivot = unpack (current)
+ report ("info", 3, "bisect", "Previous step %d: lo=%d, hi=%d, pivot=%d.",
+ nsteps, lo, hi, pivot)
+ report ("info", 1, "bisect", "Step %d: Testing fonts from %d to %d.",
+ currentstep, lo, pivot)
+ config.luaotfload.misc.bisect = { lo, pivot }
+ return true, true
+end
+
+local bisect_modes = {
+ start = bisect_start,
+ good = function () return bisect_set "good" end,
+ bad = function () return bisect_set "bad" end,
+ stop = bisect_stop,
+ status = bisect_status,
+ run = bisect_run,
+}
+
+actions.bisect = function (job)
+ local mode = job.bisect
+ local runner = bisect_modes[mode]
+ if not runner then
+ report ("info", 0, "bisect", "Unknown directive %q.", mode)
+ return false, false
+ end
+ return runner (job)
+end
+
+actions.flush = function (job)
+ local success = names.flush_lookup_cache()
+ if success then
+ local success = names.save_lookups()
+ if success then
+ report ("info", 2, "cache", "Lookup cache emptied")
+ return true, true
+ end
+ end
+ return false, false
+end
+
+local cache_directives = {
+ ["purge"] = names.purge_cache,
+ ["erase"] = names.erase_cache,
+ ["show"] = names.show_cache,
+}
+
+actions.cache = function (job)
+ local directive = cache_directives[job.cache]
+ if not directive or type(directive) ~= "function" then
+ report ("info", 2, "cache",
+ "Invalid font cache directive %s.", job.cache)
+ return false, false
+ end
+ if directive() then
+ return true, true
+ end
+ return false, false
+end
+
+actions.query = function (job)
+
+ require "luaotfload-features"
+
+ local query = job.query
+
+ local tmpspec = {
+ name = query,
+ lookup = "name",
+ specification = query,
+ optsize = 0,
+ features = { },
+ }
+
+ tmpspec = names.handle_request (tmpspec)
+
+ if not tmpspec.size then
+ tmpspec.size = 655360 --- assume 10pt
+ end
+
+ local foundname, subfont, success
+
+ if tmpspec.lookup == "name"
+ or tmpspec.lookup == "anon" --- not *exactly* as resolvers.anon
+ then
+ foundname, subfont = names.resolve_name (tmpspec)
+ if foundname then
+ foundname, _, success = names.font_file_lookup (foundname)
+ end
+ elseif tmpspec.lookup == "file" then
+ foundname, _, success =
+ names.font_file_lookup (tmpspec.name)
+ end
+
+ if success then
+ report (false, 0, "resolve", "Font %q found!", query)
+ if subfont then
+ report (false, 0, "resolve",
+ "Resolved file name %q, subfont nr. %q",
+ foundname, subfont)
+ else
+ report (false, 0, "resolve",
+ "Resolved file name %q", foundname)
+ end
+ if job.show_info then
+ show_font_info (foundname, query, job.full_info, job.warnings)
+ iowrite "\n"
+ end
+ else
+ report (false, 0, "resolve", "Cannot find %q in index.", query)
+ report (false, 0, "resolve",
+ "Hint: use the --fuzzy option to display suggestions.",
+ query)
+ if job.fuzzy == true then
+ report (false, 0, "resolve",
+ "Looking for close matches, this may take a while ...")
+ local _success = names.find_closest(query, job.fuzzy_limit)
+ end
+ end
+ return true, true
+end
+
+--- --list=<criterion>
+--- --list=<criterion>:<value>
+---
+--- --list=<criterion> --fields=<f1>,<f2>,<f3>,...<fn>
+
+local get_fields get_fields = function (entry, fields, acc, n)
+ if not acc then
+ return get_fields (entry, fields, { }, 1)
+ end
+
+ local field = fields [n]
+ if field then
+ local chain = stringsplit (field, "->")
+ local tmp = entry
+ for i = 1, #chain - 1 do
+ tmp = tmp [chain [i]]
+ if not tmp then
+ --- invalid field
+ break
+ end
+ end
+ if tmp then
+ local value = tmp [chain [#chain]]
+ acc[#acc+1] = value or false
+ else
+ acc[#acc+1] = false
+ end
+ return get_fields (entry, fields, acc, n+1)
+ end
+ return acc
+end
+
+local separator = "\t" --- could be “,” for csv
+
+local format_fields format_fields = function (fields, acc, n)
+ if not acc then
+ return format_fields(fields, { }, 1)
+ end
+
+ local field = fields[n]
+ if field ~= nil then
+ if field == false then
+ acc[#acc+1] = "<none>"
+ else
+ acc[#acc+1] = tostring(field)
+ end
+ return format_fields(fields, acc, n+1)
+ end
+ return tableconcat(acc, separator)
+end
+
+local set_primary_field
+set_primary_field = function (fields, addme, acc, n)
+ if not acc then
+ return set_primary_field(fields, addme, { addme }, 1)
+ end
+
+ local field = fields[n]
+ if field then
+ if field ~= addme then
+ acc[#acc+1] = field
+ end
+ return set_primary_field(fields, addme, acc, n+1)
+ end
+ return acc
+end
+
+local splitcomma = luaotfload.parsers.splitcomma
+
+actions.list = function (job)
+ local criterion = job.criterion
+ local asked_fields = job.asked_fields
+ local name_index = names.data ()
+
+ if asked_fields then
+ asked_fields = lpegmatch(splitcomma, asked_fields)
+ end
+
+ if not asked_fields then
+ --- some defaults
+ asked_fields = { "plainname", "version", }
+ end
+
+ if not name_index then
+ name_index = names.load()
+ end
+
+ local mappings = name_index.mappings
+ local nmappings = #mappings
+
+ if criterion == "*" then
+ report (false, 1, "list", "All %d entries", nmappings)
+ for i=1, nmappings do
+ local entry = mappings[i]
+ local fields = get_fields(entry, asked_fields)
+ --- we could collect these instead ...
+ local formatted = format_fields(fields)
+ texiowrite_nl(formatted)
+ end
+
+ else
+ criterion = stringexplode(criterion, ":") --> { field, value }
+ local asked_value = criterion[2]
+ criterion = criterion[1]
+ asked_fields = set_primary_field(asked_fields, criterion)
+
+ report (false, 1, "list", "By %s", criterion)
+
+ --- firstly, build a list of fonts to operate on
+ local targets = { }
+ if asked_value then --- only those whose value matches
+ report (false, 2, "list", "Restricting to value %s", asked_value)
+ for i=1, nmappings do
+ local entry = mappings[i]
+ if entry[criterion]
+ and tostring(entry[criterion]) == asked_value
+ then
+ targets[#targets+1] = entry
+ end
+ end
+
+ else --- whichever have the field, sorted
+ local categories, by_category = { }, { }
+ for i=1, nmappings do
+ local entry = mappings[i]
+ local tmp = entry
+ local chain = stringsplit (criterion, "->")
+ for i = 1, #chain - 1 do
+ tmp = tmp [chain [i]]
+ if not tmp then
+ break
+ end
+ end
+ local value = tmp and tmp [chain [#chain]] or "<none>"
+ if value then
+ --value = tostring(value)
+ local entries = by_category[value]
+ if not entries then
+ entries = { entry }
+ categories[#categories+1] = value
+ else
+ entries[#entries+1] = entry
+ end
+ by_category[value] = entries
+ end
+ end
+ table.sort(categories)
+
+ for i=1, #categories do
+ local entries = by_category[categories[i]]
+ for j=1, #entries do
+ targets[#targets+1] = entries[j]
+ end
+ end
+ end
+ local ntargets = #targets
+ report (false, 2, "list", "%d entries", ntargets)
+
+ --- now, output the collection
+ for i=1, ntargets do
+ local entry = targets[i]
+ local fields = get_fields(entry, asked_fields)
+ local formatted = format_fields(fields)
+ texiowrite_nl(formatted)
+ end
+ end
+
+ texiowrite_nl ""
+
+ return true, true
+end
+
+actions.diagnose = function (job)
+ --- diagnostics are loaded on demand
+ local diagnose = require "luaotfload-diagnostics.lua"
+ return diagnose (job)
+end
+
+--- stuff to be carried out prior to exit
+
+local finalizers = { }
+
+--- returns false if at least one of the actions failed, mainly
+--- for closing io channels
+local finalize = function ()
+ local success = true
+ for _, fun in next, finalizers do
+ if type (fun) == "function" then
+ if fun () == false then success = false end
+ end
+ end
+ return success
+end
+
+--[[--
+Command-line processing.
+luaotfload-tool relies on the script alt_getopt to process argv and
+analyzes its output.
+
+TODO with extended lualibs we have the functionality from the
+environment.* namespace that could eliminate the dependency on
+alt_getopt.
+--]]--
+
+local process_cmdline = function ( ) -- unit -> jobspec
+ local result = { -- jobspec
+ force_reload = nil,
+ full_info = false,
+ warnings = false,
+ criterion = "",
+ query = "",
+ log_level = nil,
+ bisect = nil,
+ config = { db = { }, misc = { }, run = { }, paths = { } },
+ }
+
+ local long_options = {
+ ["bisect"] = 1,
+ cache = 1,
+ conf = 1,
+ diagnose = 1,
+ ["dry-run"] = "D",
+ ["flush-lookups"] = "l",
+ fields = 1,
+ find = 1,
+ force = "f",
+ formats = 1,
+ fuzzy = "F",
+ help = "h",
+ info = "i",
+ inspect = "I",
+ limit = 1,
+ list = 1,
+ ["local"] = "L",
+ log = 1,
+ ["max-fonts"] = 1,
+ ["no-compress"] = "c",
+ ["no-reload"] = "n",
+ ["no-strip"] = 0,
+ ["skip-read"] = "R",
+ ["prefer-texmf"] = "p",
+ ["print-conf"] = 0,
+ quiet = "q",
+ ["show-blacklist"] = "b",
+ stats = "S",
+ update = "u",
+ verbose = 1,
+ version = "V",
+ warnings = "w",
+ }
+
+ local short_options = "bcDfFiIlLnpqRSuvVhw"
+
+ local options, _, optarg =
+ alt_getopt.get_ordered_opts (arg, short_options, long_options)
+
+ local nopts = #options
+ for n=1, nopts do
+ local v = options[n]
+ if v == "q" then
+ result.log_level = 0
+ elseif v == "u" then
+ action_pending["generate"] = true
+ elseif v == "v" then
+ local lvl = result.log_level
+ if not lvl or lvl < 1 then
+ lvl = 1
+ else
+ lvl = lvl + 1
+ end
+ result.log_level = lvl
+ elseif v == "V" then
+ action_pending["version"] = true
+ elseif v == "h" then
+ action_pending["help"] = true
+ elseif v == "f" then
+ result.update = true
+ result.force_reload = 1
+ elseif v == "verbose" then
+ local lvl = optarg[n]
+ if lvl then
+ lvl = tonumber(lvl)
+ result.log_level = lvl
+ if lvl > 2 then
+ result.warnings = true
+ end
+ end
+ elseif v == "w" then
+ result.warnings = true
+ elseif v == "log" then
+ local str = optarg[n]
+ if str then
+ finalizers = log.set_logout(str, finalizers)
+ end
+ elseif v == "find" then
+ action_pending["query"] = true
+ result.query = optarg[n]
+ elseif v == "F" then
+ result.fuzzy = true
+ elseif v == "limit" then
+ local lim = optarg[n]
+ if lim then
+ result.fuzzy_limit = tonumber(lim)
+ end
+ elseif v == "i" then
+ result.show_info = true
+ elseif v == "I" then
+ result.show_info = true
+ result.full_info = true
+ elseif v == "l" then
+ action_pending["flush"] = true
+ elseif v == "L" then
+ action_pending["generate"] = true
+ config.luaotfload.db.scan_local = true
+ elseif v == "list" then
+ action_pending["list"] = true
+ result.criterion = optarg[n]
+ elseif v == "fields" then
+ result.asked_fields = optarg[n]
+ elseif v == "cache" then
+ action_pending["cache"] = true
+ result.cache = optarg[n]
+ elseif v == "D" then
+ result.dry_run = true
+ elseif v == "p" then
+ names.set_location_precedence {
+ "local", "texmf", "system"
+ }
+ elseif v == "b" then
+ action_pending["blacklist"] = true
+ elseif v == "diagnose" then
+ action_pending["diagnose"] = true
+ result.asked_diagnostics = optarg[n]
+ elseif v == "formats" then
+ result.config.db.formats = optarg[n]
+ --names.set_font_filter (optarg[n])
+ elseif v == "n" then
+ config.luaotfload.db.update_live = false
+ elseif v == "S" then
+ config.luaotfload.misc.statistics = true
+ elseif v == "R" then
+ --- dev only, undocumented
+ config.luaotfload.db.skip_read = true
+ elseif v == "c" then
+ config.luaotfload.db.compress = false
+ elseif v == "no-strip" then
+ config.luaotfload.db.strip = false
+ elseif v == "max-fonts" then
+ local n = optarg[n]
+ if n then
+ n = tonumber(n)
+ if n and n > 0 then
+ config.luaotfload.db.max_fonts = n
+ end
+ end
+ elseif v == "bisect" then
+ result.bisect = optarg[n]
+ action_pending.bisect = true
+ elseif v == "conf" then
+ local extra = stringexplode (optarg[n], ",+")
+ if extra then
+ local extra_config = result.extra_config
+ if extra_config then
+ table.append (extra_config, extra)
+ else
+ result.extra_config = extra
+ end
+ end
+ elseif v == "print-conf" then
+ result.print_config = true
+ end
+ end
+
+ if nopts == 0 then
+ action_pending["help"] = true
+ result.help_version = "short"
+ end
+ return result
+end
+
+local main = function ( ) -- unit -> int
+ local retval = 0
+ local job = process_cmdline()
+
+-- inspect(action_pending)
+-- inspect(job)
+
+ for i=1, #action_sequence do
+ local actionname = action_sequence[i]
+ local exit = false
+ if action_pending[actionname] then
+ report ("log", 3, "util", "Preparing for task", "%s", actionname)
+
+ local action = actions[actionname]
+ local success, continue = action(job)
+
+ if not success then
+ report (false, 0, "util",
+ "Failed to execute task.", "%s", actionname)
+ retval = -1
+ exit = true
+ elseif not continue then
+ report (false, 3, "util",
+ "Task completed, exiting.", "%s", actionname)
+ exit = true
+ else
+ report (false, 3, "util",
+ "Task completed successfully.", "%s", actionname)
+ end
+ end
+ if exit then break end
+ end
+
+ if finalize () == false then
+ retval = -1
+ end
+
+ --texiowrite_nl""
+ return retval
+end
+
+return main()
+
+-- vim:tw=71:sw=4:ts=4:expandtab
diff --git a/src/luaotfload.sty b/src/luaotfload.sty
new file mode 100644
index 0000000..a235d6b
--- /dev/null
+++ b/src/luaotfload.sty
@@ -0,0 +1,45 @@
+%% Copyright (C) 2009-2014
+%%
+%% by Elie Roux <elie.roux@telecom-bretagne.eu>
+%% and Khaled Hosny <khaledhosny@eglug.org>
+%% and Philipp Gesang <philipp.gesang@alumni.uni-heidelberg.de>
+%%
+%% This file is part of Luaotfload.
+%%
+%% Home: https://github.com/lualatex/luaotfload
+%% Support: <lualatex-dev@tug.org>.
+%%
+%% Luaotfload is under the GPL v2.0 (exactly) license.
+%%
+%% ----------------------------------------------------------------------------
+%%
+%% Luaotfload is free software; you can redistribute it and/or
+%% modify it under the terms of the GNU General Public License
+%% as published by the Free Software Foundation; version 2
+%% of the License.
+%%
+%% Luaotfload is distributed in the hope that it will be useful,
+%% but WITHOUT ANY WARRANTY; without even the implied warranty of
+%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+%% GNU General Public License for more details.
+%%
+%% You should have received a copy of the GNU General Public License
+%% along with Luaotfload; if not, see <http://www.gnu.org/licenses/>.
+%%
+%% ----------------------------------------------------------------------------
+%%
+%% Classical Plain+\LATEX package initialization.
+%%
+\csname ifluaotfloadloaded\endcsname
+\let\ifluaotfloadloaded\endinput
+\bgroup\expandafter\expandafter\expandafter\egroup
+\expandafter\ifx\csname ProvidesPackage\endcsname\relax
+ \input luatexbase.sty
+\else
+ \NeedsTeXFormat{LaTeX2e}
+ \ProvidesPackage{luaotfload}%
+ [2014/42/42 v2.5 OpenType layout system]
+ \RequirePackage{luatexbase}
+\fi
+\RequireLuaModule{luaotfload-main}
+