summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--luaotfload.dtx54
-rw-r--r--otfl-data-con.lua13
-rw-r--r--otfl-font-agl.lua3699
-rw-r--r--otfl-font-cid.lua48
-rw-r--r--otfl-font-def.lua293
-rw-r--r--otfl-font-dum.lua156
-rw-r--r--otfl-font-ini.lua47
-rw-r--r--otfl-font-map.lua180
-rw-r--r--otfl-font-ota.lua62
-rw-r--r--otfl-font-otb.lua59
-rw-r--r--otfl-font-otc.lua173
-rw-r--r--otfl-font-otd.lua42
-rw-r--r--otfl-font-otf.lua1867
-rw-r--r--otfl-font-oti.lua37
-rw-r--r--otfl-font-otn.lua338
-rw-r--r--otfl-font-ott.lua160
-rw-r--r--otfl-font-tfm.lua241
-rw-r--r--otfl-font-xtx.lua81
-rw-r--r--otfl-luat-dum.lua28
-rw-r--r--otfl-node-dum.lua85
-rw-r--r--otfl-node-inj.lua90
21 files changed, 5792 insertions, 1961 deletions
diff --git a/luaotfload.dtx b/luaotfload.dtx
index 586b8be..68750df 100644
--- a/luaotfload.dtx
+++ b/luaotfload.dtx
@@ -485,6 +485,23 @@ if tex.luatexversion < luatex_version then
end
% \end{macrocode}
%
+%
+% \begin{macrocode}
+function table.reversed(t)
+ if t then
+ local tt, tn = { }, #t
+ if tn > 0 then
+ local ttn = 0
+ for i=tn,1,-1 do
+ ttn = ttn + 1
+ tt[ttn] = t[i]
+ end
+ end
+ return tt
+ end
+end
+% \end{macrocode}
+%
% \subsection{Module loading}
%
% We load the \context files with this function. It automatically adds the
@@ -524,9 +541,8 @@ tex.attribute[0] = 0
% Node support modules.
%
% \begin{macrocode}
-luaotfload.loadmodule("font-ini.lua")
-luaotfload.loadmodule("node-dum.lua")
-luaotfload.loadmodule("node-inj.lua")
+luaotfload.loadmodule('node-dum.lua')
+luaotfload.loadmodule('node-inj.lua')
% \end{macrocode}
%
% By default \context takes some private attributes for internal use. To
@@ -549,20 +565,22 @@ end
% Font handling modules.
%
% \begin{macrocode}
-luaotfload.loadmodule("font-tfm.lua")
-luaotfload.loadmodule("font-cid.lua")
-luaotfload.loadmodule("font-ott.lua")
-luaotfload.loadmodule("font-map.lua")
-luaotfload.loadmodule("font-otf.lua")
-luaotfload.loadmodule("font-otd.lua")
-luaotfload.loadmodule("font-oti.lua")
-luaotfload.loadmodule("font-otb.lua")
-luaotfload.loadmodule("font-otn.lua")
-luaotfload.loadmodule("font-ota.lua")
-luaotfload.loadmodule("font-otc.lua")
-luaotfload.loadmodule("font-def.lua")
-luaotfload.loadmodule("font-xtx.lua")
-luaotfload.loadmodule("font-dum.lua")
+luaotfload.loadmodule('font-ini.lua')
+luaotfload.loadmodule('font-tfm.lua')
+luaotfload.loadmodule('font-cid.lua')
+luaotfload.loadmodule('font-ott.lua')
+luaotfload.loadmodule('font-map.lua')
+luaotfload.loadmodule('font-otf.lua')
+luaotfload.loadmodule('font-otd.lua')
+luaotfload.loadmodule('font-oti.lua')
+luaotfload.loadmodule('font-otb.lua')
+luaotfload.loadmodule('font-otn.lua')
+luaotfload.loadmodule('font-ota.lua')
+luaotfload.loadmodule('font-otc.lua')
+luaotfload.loadmodule('font-agl.lua')
+luaotfload.loadmodule('font-def.lua')
+luaotfload.loadmodule('font-xtx.lua')
+luaotfload.loadmodule('font-dum.lua')
% \end{macrocode}
%
% This is a patch for |otfl-font-def.lua|, that defines a reader for ofm
@@ -645,7 +663,7 @@ fonts.mode = "node"
% but \textsf{luaotfload} does not recognize them in |base| mode.
%
% \begin{macrocode}
-local register_base_sub = fonts.otf.features.register_base_substitution
+local register_base_sub = fonts.otf.features.registerbasesubstitution
local gsubs = {
"ss01", "ss02", "ss03", "ss04", "ss05",
"ss06", "ss07", "ss08", "ss09", "ss10",
diff --git a/otfl-data-con.lua b/otfl-data-con.lua
index e7bb8af..5d9650f 100644
--- a/otfl-data-con.lua
+++ b/otfl-data-con.lua
@@ -25,13 +25,15 @@ table structures without bothering about the disk cache.</p>
<p>Examples of usage can be found in the font related code.</p>
--ldx]]--
-containers = containers or { }
-
+containers = containers or { }
+local containers = containers
containers.usecache = true
+local report_cache = logs.new("cache")
+
local function report(container,tag,name)
if trace_cache or trace_containers then
- logs.report(format("%s cache",container.subcategory),"%s: %s",tag,name or 'invalid')
+ report_cache("container: %s, tag: %s, name: %s",container.subcategory,tag,name or 'invalid')
end
end
@@ -48,7 +50,8 @@ local mt = {
t.readables = readables
return readables
end
- end
+ end,
+ __storage__ = true
}
function containers.define(category, subcategory, version, enabled)
@@ -78,7 +81,7 @@ function containers.define(category, subcategory, version, enabled)
end
function containers.is_usable(container, name)
- return container.enabled and caches and caches.iswritable(container.writable, name)
+ return container.enabled and caches and caches.is_writable(container.writable, name)
end
function containers.is_valid(container, name)
diff --git a/otfl-font-agl.lua b/otfl-font-agl.lua
new file mode 100644
index 0000000..820600a
--- /dev/null
+++ b/otfl-font-agl.lua
@@ -0,0 +1,3699 @@
+if not modules then modules = { } end modules ['font-map'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "derived from http://www.adobe.com/devnet/opentype/archives/glyphlist.txt",
+ comment = "Adobe Glyph List, version 2.0, September 20, 2002",
+}
+
+local allocate = utilities.storage.allocate
+
+fonts.enc = fonts.enc or { }
+local enc = fonts.enc
+local agl = { }
+enc.agl = agl
+
+agl.names = allocate { -- to name
+ "controlSTX",
+ "controlSOT",
+ "controlETX",
+ "controlEOT",
+ "controlENQ",
+ "controlACK",
+ "controlBEL",
+ "controlBS",
+ "controlHT",
+ "controlLF",
+ "controlVT",
+ "controlFF",
+ "controlCR",
+ "controlSO",
+ "controlSI",
+ "controlDLE",
+ "controlDC1",
+ "controlDC2",
+ "controlDC3",
+ "controlDC4",
+ "controlNAK",
+ "controlSYN",
+ "controlETB",
+ "controlCAN",
+ "controlEM",
+ "controlSUB",
+ "controlESC",
+ "controlFS",
+ "controlGS",
+ "controlRS",
+ "controlUS",
+ "spacehackarabic",
+ "exclam",
+ "quotedbl",
+ "numbersign",
+ "dollar",
+ "percent",
+ "ampersand",
+ "quotesingle",
+ "parenleft",
+ "parenright",
+ "asterisk",
+ "plus",
+ "comma",
+ "hyphen",
+ "period",
+ "slash",
+ "zero",
+ "one",
+ "two",
+ "three",
+ "four",
+ "five",
+ "six",
+ "seven",
+ "eight",
+ "nine",
+ "colon",
+ "semicolon",
+ "less",
+ "equal",
+ "greater",
+ "question",
+ "at",
+ "A",
+ "B",
+ "C",
+ "D",
+ "E",
+ "F",
+ "G",
+ "H",
+ "I",
+ "J",
+ "K",
+ "L",
+ "M",
+ "N",
+ "O",
+ "P",
+ "Q",
+ "R",
+ "S",
+ "T",
+ "U",
+ "V",
+ "W",
+ "X",
+ "Y",
+ "Z",
+ "bracketleft",
+ "backslash",
+ "bracketright",
+ "asciicircum",
+ "underscore",
+ "grave",
+ "a",
+ "b",
+ "c",
+ "d",
+ "e",
+ "f",
+ "g",
+ "h",
+ "i",
+ "j",
+ "k",
+ "l",
+ "m",
+ "n",
+ "o",
+ "p",
+ "q",
+ "r",
+ "s",
+ "t",
+ "u",
+ "v",
+ "w",
+ "x",
+ "y",
+ "z",
+ "braceleft",
+ "verticalbar",
+ "braceright",
+ "asciitilde",
+ "controlDEL",
+ [0x00A0] = "nonbreakingspace",
+ [0x00A1] = "exclamdown",
+ [0x00A2] = "cent",
+ [0x00A3] = "sterling",
+ [0x00A4] = "currency",
+ [0x00A5] = "yen",
+ [0x00A6] = "brokenbar",
+ [0x00A7] = "section",
+ [0x00A8] = "dieresis",
+ [0x00A9] = "copyright",
+ [0x00AA] = "ordfeminine",
+ [0x00AB] = "guillemotleft",
+ [0x00AC] = "logicalnot",
+ [0x00AD] = "softhyphen",
+ [0x00AE] = "registered",
+ [0x00AF] = "overscore",
+ [0x00B0] = "degree",
+ [0x00B1] = "plusminus",
+ [0x00B2] = "twosuperior",
+ [0x00B3] = "threesuperior",
+ [0x00B4] = "acute",
+ [0x00B5] = "mu1",
+ [0x00B6] = "paragraph",
+ [0x00B7] = "periodcentered",
+ [0x00B8] = "cedilla",
+ [0x00B9] = "onesuperior",
+ [0x00BA] = "ordmasculine",
+ [0x00BB] = "guillemotright",
+ [0x00BC] = "onequarter",
+ [0x00BD] = "onehalf",
+ [0x00BE] = "threequarters",
+ [0x00BF] = "questiondown",
+ [0x00C0] = "Agrave",
+ [0x00C1] = "Aacute",
+ [0x00C2] = "Acircumflex",
+ [0x00C3] = "Atilde",
+ [0x00C4] = "Adieresis",
+ [0x00C5] = "Aring",
+ [0x00C6] = "AE",
+ [0x00C7] = "Ccedilla",
+ [0x00C8] = "Egrave",
+ [0x00C9] = "Eacute",
+ [0x00CA] = "Ecircumflex",
+ [0x00CB] = "Edieresis",
+ [0x00CC] = "Igrave",
+ [0x00CD] = "Iacute",
+ [0x00CE] = "Icircumflex",
+ [0x00CF] = "Idieresis",
+ [0x00D0] = "Eth",
+ [0x00D1] = "Ntilde",
+ [0x00D2] = "Ograve",
+ [0x00D3] = "Oacute",
+ [0x00D4] = "Ocircumflex",
+ [0x00D5] = "Otilde",
+ [0x00D6] = "Odieresis",
+ [0x00D7] = "multiply",
+ [0x00D8] = "Oslash",
+ [0x00D9] = "Ugrave",
+ [0x00DA] = "Uacute",
+ [0x00DB] = "Ucircumflex",
+ [0x00DC] = "Udieresis",
+ [0x00DD] = "Yacute",
+ [0x00DE] = "Thorn",
+ [0x00DF] = "germandbls",
+ [0x00E0] = "agrave",
+ [0x00E1] = "aacute",
+ [0x00E2] = "acircumflex",
+ [0x00E3] = "atilde",
+ [0x00E4] = "adieresis",
+ [0x00E5] = "aring",
+ [0x00E6] = "ae",
+ [0x00E7] = "ccedilla",
+ [0x00E8] = "egrave",
+ [0x00E9] = "eacute",
+ [0x00EA] = "ecircumflex",
+ [0x00EB] = "edieresis",
+ [0x00EC] = "igrave",
+ [0x00ED] = "iacute",
+ [0x00EE] = "icircumflex",
+ [0x00EF] = "idieresis",
+ [0x00F0] = "eth",
+ [0x00F1] = "ntilde",
+ [0x00F2] = "ograve",
+ [0x00F3] = "oacute",
+ [0x00F4] = "ocircumflex",
+ [0x00F5] = "otilde",
+ [0x00F6] = "odieresis",
+ [0x00F7] = "divide",
+ [0x00F8] = "oslash",
+ [0x00F9] = "ugrave",
+ [0x00FA] = "uacute",
+ [0x00FB] = "ucircumflex",
+ [0x00FC] = "udieresis",
+ [0x00FD] = "yacute",
+ [0x00FE] = "thorn",
+ [0x00FF] = "ydieresis",
+ [0x0100] = "Amacron",
+ [0x0101] = "amacron",
+ [0x0102] = "Abreve",
+ [0x0103] = "abreve",
+ [0x0104] = "Aogonek",
+ [0x0105] = "aogonek",
+ [0x0106] = "Cacute",
+ [0x0107] = "cacute",
+ [0x0108] = "Ccircumflex",
+ [0x0109] = "ccircumflex",
+ [0x010A] = "Cdotaccent",
+ [0x010B] = "cdotaccent",
+ [0x010C] = "Ccaron",
+ [0x010D] = "ccaron",
+ [0x010E] = "Dcaron",
+ [0x010F] = "dcaron",
+ [0x0110] = "Dslash",
+ [0x0111] = "dmacron",
+ [0x0112] = "Emacron",
+ [0x0113] = "emacron",
+ [0x0114] = "Ebreve",
+ [0x0115] = "ebreve",
+ [0x0116] = "Edotaccent",
+ [0x0117] = "edotaccent",
+ [0x0118] = "Eogonek",
+ [0x0119] = "eogonek",
+ [0x011A] = "Ecaron",
+ [0x011B] = "ecaron",
+ [0x011C] = "Gcircumflex",
+ [0x011D] = "gcircumflex",
+ [0x011E] = "Gbreve",
+ [0x011F] = "gbreve",
+ [0x0120] = "Gdotaccent",
+ [0x0121] = "gdotaccent",
+ [0x0122] = "Gcommaaccent",
+ [0x0123] = "gcommaaccent",
+ [0x0124] = "Hcircumflex",
+ [0x0125] = "hcircumflex",
+ [0x0126] = "Hbar",
+ [0x0127] = "hbar",
+ [0x0128] = "Itilde",
+ [0x0129] = "itilde",
+ [0x012A] = "Imacron",
+ [0x012B] = "imacron",
+ [0x012C] = "Ibreve",
+ [0x012D] = "ibreve",
+ [0x012E] = "Iogonek",
+ [0x012F] = "iogonek",
+ [0x0130] = "Idotaccent",
+ [0x0131] = "dotlessi",
+ [0x0132] = "IJ",
+ [0x0133] = "ij",
+ [0x0134] = "Jcircumflex",
+ [0x0135] = "jcircumflex",
+ [0x0136] = "Kcommaaccent",
+ [0x0137] = "kcommaaccent",
+ [0x0138] = "kgreenlandic",
+ [0x0139] = "Lacute",
+ [0x013A] = "lacute",
+ [0x013B] = "Lcommaaccent",
+ [0x013C] = "lcommaaccent",
+ [0x013D] = "Lcaron",
+ [0x013E] = "lcaron",
+ [0x013F] = "Ldotaccent",
+ [0x0140] = "ldotaccent",
+ [0x0141] = "Lslash",
+ [0x0142] = "lslash",
+ [0x0143] = "Nacute",
+ [0x0144] = "nacute",
+ [0x0145] = "Ncommaaccent",
+ [0x0146] = "ncommaaccent",
+ [0x0147] = "Ncaron",
+ [0x0148] = "ncaron",
+ [0x0149] = "quoterightn",
+ [0x014A] = "Eng",
+ [0x014B] = "eng",
+ [0x014C] = "Omacron",
+ [0x014D] = "omacron",
+ [0x014E] = "Obreve",
+ [0x014F] = "obreve",
+ [0x0150] = "Ohungarumlaut",
+ [0x0151] = "ohungarumlaut",
+ [0x0152] = "OE",
+ [0x0153] = "oe",
+ [0x0154] = "Racute",
+ [0x0155] = "racute",
+ [0x0156] = "Rcommaaccent",
+ [0x0157] = "rcommaaccent",
+ [0x0158] = "Rcaron",
+ [0x0159] = "rcaron",
+ [0x015A] = "Sacute",
+ [0x015B] = "sacute",
+ [0x015C] = "Scircumflex",
+ [0x015D] = "scircumflex",
+ [0x015E] = "Scedilla",
+ [0x015F] = "scedilla",
+ [0x0160] = "Scaron",
+ [0x0161] = "scaron",
+ [0x0162] = "Tcommaaccent",
+ [0x0163] = "tcommaaccent",
+ [0x0164] = "Tcaron",
+ [0x0165] = "tcaron",
+ [0x0166] = "Tbar",
+ [0x0167] = "tbar",
+ [0x0168] = "Utilde",
+ [0x0169] = "utilde",
+ [0x016A] = "Umacron",
+ [0x016B] = "umacron",
+ [0x016C] = "Ubreve",
+ [0x016D] = "ubreve",
+ [0x016E] = "Uring",
+ [0x016F] = "uring",
+ [0x0170] = "Uhungarumlaut",
+ [0x0171] = "uhungarumlaut",
+ [0x0172] = "Uogonek",
+ [0x0173] = "uogonek",
+ [0x0174] = "Wcircumflex",
+ [0x0175] = "wcircumflex",
+ [0x0176] = "Ycircumflex",
+ [0x0177] = "ycircumflex",
+ [0x0178] = "Ydieresis",
+ [0x0179] = "Zacute",
+ [0x017A] = "zacute",
+ [0x017B] = "Zdotaccent",
+ [0x017C] = "zdotaccent",
+ [0x017D] = "Zcaron",
+ [0x017E] = "zcaron",
+ [0x017F] = "slong",
+ [0x0180] = "bstroke",
+ [0x0181] = "Bhook",
+ [0x0182] = "Btopbar",
+ [0x0183] = "btopbar",
+ [0x0184] = "Tonesix",
+ [0x0185] = "tonesix",
+ [0x0186] = "Oopen",
+ [0x0187] = "Chook",
+ [0x0188] = "chook",
+ [0x0189] = "Dafrican",
+ [0x018A] = "Dhook",
+ [0x018B] = "Dtopbar",
+ [0x018C] = "dtopbar",
+ [0x018D] = "deltaturned",
+ [0x018E] = "Ereversed",
+ [0x018F] = "Schwa",
+ [0x0190] = "Eopen",
+ [0x0191] = "Fhook",
+ [0x0192] = "florin",
+ [0x0193] = "Ghook",
+ [0x0194] = "Gammaafrican",
+ [0x0195] = "hv",
+ [0x0196] = "Iotaafrican",
+ [0x0197] = "Istroke",
+ [0x0198] = "Khook",
+ [0x0199] = "khook",
+ [0x019A] = "lbar",
+ [0x019B] = "lambdastroke",
+ [0x019C] = "Mturned",
+ [0x019D] = "Nhookleft",
+ [0x019E] = "nlegrightlong",
+ [0x019F] = "Ocenteredtilde",
+ [0x01A0] = "Ohorn",
+ [0x01A1] = "ohorn",
+ [0x01A2] = "Oi",
+ [0x01A3] = "oi",
+ [0x01A4] = "Phook",
+ [0x01A5] = "phook",
+ [0x01A6] = "yr",
+ [0x01A7] = "Tonetwo",
+ [0x01A8] = "tonetwo",
+ [0x01A9] = "Esh",
+ [0x01AA] = "eshreversedloop",
+ [0x01AB] = "tpalatalhook",
+ [0x01AC] = "Thook",
+ [0x01AD] = "thook",
+ [0x01AE] = "Tretroflexhook",
+ [0x01AF] = "Uhorn",
+ [0x01B0] = "uhorn",
+ [0x01B1] = "Upsilonafrican",
+ [0x01B2] = "Vhook",
+ [0x01B3] = "Yhook",
+ [0x01B4] = "yhook",
+ [0x01B5] = "Zstroke",
+ [0x01B6] = "zstroke",
+ [0x01B7] = "Ezh",
+ [0x01B8] = "Ezhreversed",
+ [0x01B9] = "ezhreversed",
+ [0x01BA] = "ezhtail",
+ [0x01BB] = "twostroke",
+ [0x01BC] = "Tonefive",
+ [0x01BD] = "tonefive",
+ [0x01BE] = "glottalinvertedstroke",
+ [0x01BF] = "wynn",
+ [0x01C0] = "clickdental",
+ [0x01C1] = "clicklateral",
+ [0x01C2] = "clickalveolar",
+ [0x01C3] = "clickretroflex",
+ [0x01C4] = "DZcaron",
+ [0x01C5] = "Dzcaron",
+ [0x01C6] = "dzcaron",
+ [0x01C7] = "LJ",
+ [0x01C8] = "Lj",
+ [0x01C9] = "lj",
+ [0x01CA] = "NJ",
+ [0x01CB] = "Nj",
+ [0x01CC] = "nj",
+ [0x01CD] = "Acaron",
+ [0x01CE] = "acaron",
+ [0x01CF] = "Icaron",
+ [0x01D0] = "icaron",
+ [0x01D1] = "Ocaron",
+ [0x01D2] = "ocaron",
+ [0x01D3] = "Ucaron",
+ [0x01D4] = "ucaron",
+ [0x01D5] = "Udieresismacron",
+ [0x01D6] = "udieresismacron",
+ [0x01D7] = "Udieresisacute",
+ [0x01D8] = "udieresisacute",
+ [0x01D9] = "Udieresiscaron",
+ [0x01DA] = "udieresiscaron",
+ [0x01DB] = "Udieresisgrave",
+ [0x01DC] = "udieresisgrave",
+ [0x01DD] = "eturned",
+ [0x01DE] = "Adieresismacron",
+ [0x01DF] = "adieresismacron",
+ [0x01E0] = "Adotmacron",
+ [0x01E1] = "adotmacron",
+ [0x01E2] = "AEmacron",
+ [0x01E3] = "aemacron",
+ [0x01E4] = "Gstroke",
+ [0x01E5] = "gstroke",
+ [0x01E6] = "Gcaron",
+ [0x01E7] = "gcaron",
+ [0x01E8] = "Kcaron",
+ [0x01E9] = "kcaron",
+ [0x01EA] = "Oogonek",
+ [0x01EB] = "oogonek",
+ [0x01EC] = "Oogonekmacron",
+ [0x01ED] = "oogonekmacron",
+ [0x01EE] = "Ezhcaron",
+ [0x01EF] = "ezhcaron",
+ [0x01F0] = "jcaron",
+ [0x01F1] = "DZ",
+ [0x01F2] = "Dz",
+ [0x01F3] = "dz",
+ [0x01F4] = "Gacute",
+ [0x01F5] = "gacute",
+ [0x01FA] = "Aringacute",
+ [0x01FB] = "aringacute",
+ [0x01FC] = "AEacute",
+ [0x01FD] = "aeacute",
+ [0x01FE] = "Ostrokeacute",
+ [0x01FF] = "ostrokeacute",
+ [0x0200] = "Adblgrave",
+ [0x0201] = "adblgrave",
+ [0x0202] = "Ainvertedbreve",
+ [0x0203] = "ainvertedbreve",
+ [0x0204] = "Edblgrave",
+ [0x0205] = "edblgrave",
+ [0x0206] = "Einvertedbreve",
+ [0x0207] = "einvertedbreve",
+ [0x0208] = "Idblgrave",
+ [0x0209] = "idblgrave",
+ [0x020A] = "Iinvertedbreve",
+ [0x020B] = "iinvertedbreve",
+ [0x020C] = "Odblgrave",
+ [0x020D] = "odblgrave",
+ [0x020E] = "Oinvertedbreve",
+ [0x020F] = "oinvertedbreve",
+ [0x0210] = "Rdblgrave",
+ [0x0211] = "rdblgrave",
+ [0x0212] = "Rinvertedbreve",
+ [0x0213] = "rinvertedbreve",
+ [0x0214] = "Udblgrave",
+ [0x0215] = "udblgrave",
+ [0x0216] = "Uinvertedbreve",
+ [0x0217] = "uinvertedbreve",
+ [0x0218] = "Scommaaccent",
+ [0x0219] = "scommaaccent",
+ [0x0250] = "aturned",
+ [0x0251] = "ascript",
+ [0x0252] = "ascriptturned",
+ [0x0253] = "bhook",
+ [0x0254] = "oopen",
+ [0x0255] = "ccurl",
+ [0x0256] = "dtail",
+ [0x0257] = "dhook",
+ [0x0258] = "ereversed",
+ [0x0259] = "schwa",
+ [0x025A] = "schwahook",
+ [0x025B] = "eopen",
+ [0x025C] = "eopenreversed",
+ [0x025D] = "eopenreversedhook",
+ [0x025E] = "eopenreversedclosed",
+ [0x025F] = "jdotlessstroke",
+ [0x0260] = "ghook",
+ [0x0261] = "gscript",
+ [0x0263] = "gammalatinsmall",
+ [0x0264] = "ramshorn",
+ [0x0265] = "hturned",
+ [0x0266] = "hhook",
+ [0x0267] = "henghook",
+ [0x0268] = "istroke",
+ [0x0269] = "iotalatin",
+ [0x026B] = "lmiddletilde",
+ [0x026C] = "lbelt",
+ [0x026D] = "lhookretroflex",
+ [0x026E] = "lezh",
+ [0x026F] = "mturned",
+ [0x0270] = "mlonglegturned",
+ [0x0271] = "mhook",
+ [0x0272] = "nhookleft",
+ [0x0273] = "nhookretroflex",
+ [0x0275] = "obarred",
+ [0x0277] = "omegalatinclosed",
+ [0x0278] = "philatin",
+ [0x0279] = "rturned",
+ [0x027A] = "rlonglegturned",
+ [0x027B] = "rhookturned",
+ [0x027C] = "rlongleg",
+ [0x027D] = "rhook",
+ [0x027E] = "rfishhook",
+ [0x027F] = "rfishhookreversed",
+ [0x0281] = "Rsmallinverted",
+ [0x0282] = "shook",
+ [0x0283] = "esh",
+ [0x0284] = "dotlessjstrokehook",
+ [0x0285] = "eshsquatreversed",
+ [0x0286] = "eshcurl",
+ [0x0287] = "tturned",
+ [0x0288] = "tretroflexhook",
+ [0x0289] = "ubar",
+ [0x028A] = "upsilonlatin",
+ [0x028B] = "vhook",
+ [0x028C] = "vturned",
+ [0x028D] = "wturned",
+ [0x028E] = "yturned",
+ [0x0290] = "zretroflexhook",
+ [0x0291] = "zcurl",
+ [0x0292] = "ezh",
+ [0x0293] = "ezhcurl",
+ [0x0294] = "glottalstop",
+ [0x0295] = "glottalstopreversed",
+ [0x0296] = "glottalstopinverted",
+ [0x0297] = "cstretched",
+ [0x0298] = "bilabialclick",
+ [0x029A] = "eopenclosed",
+ [0x029B] = "Gsmallhook",
+ [0x029D] = "jcrossedtail",
+ [0x029E] = "kturned",
+ [0x02A0] = "qhook",
+ [0x02A1] = "glottalstopstroke",
+ [0x02A2] = "glottalstopstrokereversed",
+ [0x02A3] = "dzaltone",
+ [0x02A4] = "dezh",
+ [0x02A5] = "dzcurl",
+ [0x02A6] = "ts",
+ [0x02A7] = "tesh",
+ [0x02A8] = "tccurl",
+ [0x02B0] = "hsuperior",
+ [0x02B1] = "hhooksuperior",
+ [0x02B2] = "jsuperior",
+ [0x02B4] = "rturnedsuperior",
+ [0x02B5] = "rhookturnedsuperior",
+ [0x02B6] = "Rsmallinvertedsuperior",
+ [0x02B7] = "wsuperior",
+ [0x02B8] = "ysuperior",
+ [0x02B9] = "primemod",
+ [0x02BA] = "dblprimemod",
+ [0x02BB] = "commaturnedmod",
+ [0x02BC] = "apostrophemod",
+ [0x02BD] = "commareversedmod",
+ [0x02BE] = "ringhalfright",
+ [0x02BF] = "ringhalfleft",
+ [0x02C0] = "glottalstopmod",
+ [0x02C1] = "glottalstopreversedmod",
+ [0x02C2] = "arrowheadleftmod",
+ [0x02C3] = "arrowheadrightmod",
+ [0x02C4] = "arrowheadupmod",
+ [0x02C5] = "arrowheaddownmod",
+ [0x02C6] = "circumflex",
+ [0x02C7] = "caron",
+ [0x02C8] = "verticallinemod",
+ [0x02C9] = "firsttonechinese",
+ [0x02CA] = "secondtonechinese",
+ [0x02CB] = "fourthtonechinese",
+ [0x02CC] = "verticallinelowmod",
+ [0x02CD] = "macronlowmod",
+ [0x02CE] = "gravelowmod",
+ [0x02CF] = "acutelowmod",
+ [0x02D0] = "colontriangularmod",
+ [0x02D1] = "colontriangularhalfmod",
+ [0x02D2] = "ringhalfrightcentered",
+ [0x02D3] = "ringhalfleftcentered",
+ [0x02D4] = "uptackmod",
+ [0x02D5] = "downtackmod",
+ [0x02D6] = "plusmod",
+ [0x02D7] = "minusmod",
+ [0x02D8] = "breve",
+ [0x02D9] = "dotaccent",
+ [0x02DA] = "ring",
+ [0x02DB] = "ogonek",
+ [0x02DC] = "tilde",
+ [0x02DD] = "hungarumlaut",
+ [0x02DE] = "rhotichookmod",
+ [0x02E0] = "gammasuperior",
+ [0x02E3] = "xsuperior",
+ [0x02E4] = "glottalstopreversedsuperior",
+ [0x02E5] = "tonebarextrahighmod",
+ [0x02E6] = "tonebarhighmod",
+ [0x02E7] = "tonebarmidmod",
+ [0x02E8] = "tonebarlowmod",
+ [0x02E9] = "tonebarextralowmod",
+ [0x0300] = "gravecomb",
+ [0x0301] = "acutecomb",
+ [0x0302] = "circumflexcmb",
+ [0x0303] = "tildecomb",
+ [0x0304] = "macroncmb",
+ [0x0305] = "overlinecmb",
+ [0x0306] = "brevecmb",
+ [0x0307] = "dotaccentcmb",
+ [0x0308] = "dieresiscmb",
+ [0x0309] = "hookcmb",
+ [0x030A] = "ringcmb",
+ [0x030B] = "hungarumlautcmb",
+ [0x030C] = "caroncmb",
+ [0x030D] = "verticallineabovecmb",
+ [0x030E] = "dblverticallineabovecmb",
+ [0x030F] = "dblgravecmb",
+ [0x0310] = "candrabinducmb",
+ [0x0311] = "breveinvertedcmb",
+ [0x0312] = "commaturnedabovecmb",
+ [0x0313] = "commaabovecmb",
+ [0x0314] = "commareversedabovecmb",
+ [0x0315] = "commaaboverightcmb",
+ [0x0316] = "gravebelowcmb",
+ [0x0317] = "acutebelowcmb",
+ [0x0318] = "lefttackbelowcmb",
+ [0x0319] = "righttackbelowcmb",
+ [0x031A] = "leftangleabovecmb",
+ [0x031B] = "horncmb",
+ [0x031C] = "ringhalfleftbelowcmb",
+ [0x031D] = "uptackbelowcmb",
+ [0x031E] = "downtackbelowcmb",
+ [0x031F] = "plusbelowcmb",
+ [0x0320] = "minusbelowcmb",
+ [0x0321] = "hookpalatalizedbelowcmb",
+ [0x0322] = "hookretroflexbelowcmb",
+ [0x0323] = "dotbelowcomb",
+ [0x0324] = "dieresisbelowcmb",
+ [0x0325] = "ringbelowcmb",
+ [0x0327] = "cedillacmb",
+ [0x0328] = "ogonekcmb",
+ [0x0329] = "verticallinebelowcmb",
+ [0x032A] = "bridgebelowcmb",
+ [0x032B] = "dblarchinvertedbelowcmb",
+ [0x032C] = "caronbelowcmb",
+ [0x032D] = "circumflexbelowcmb",
+ [0x032E] = "brevebelowcmb",
+ [0x032F] = "breveinvertedbelowcmb",
+ [0x0330] = "tildebelowcmb",
+ [0x0331] = "macronbelowcmb",
+ [0x0332] = "lowlinecmb",
+ [0x0333] = "dbllowlinecmb",
+ [0x0334] = "tildeoverlaycmb",
+ [0x0335] = "strokeshortoverlaycmb",
+ [0x0336] = "strokelongoverlaycmb",
+ [0x0337] = "solidusshortoverlaycmb",
+ [0x0338] = "soliduslongoverlaycmb",
+ [0x0339] = "ringhalfrightbelowcmb",
+ [0x033A] = "bridgeinvertedbelowcmb",
+ [0x033B] = "squarebelowcmb",
+ [0x033C] = "seagullbelowcmb",
+ [0x033D] = "xabovecmb",
+ [0x033E] = "tildeverticalcmb",
+ [0x033F] = "dbloverlinecmb",
+ [0x0340] = "gravetonecmb",
+ [0x0341] = "acutetonecmb",
+ [0x0342] = "perispomenigreekcmb",
+ [0x0343] = "koroniscmb",
+ [0x0344] = "dialytikatonoscmb",
+ [0x0345] = "ypogegrammenigreekcmb",
+ [0x0360] = "tildedoublecmb",
+ [0x0361] = "breveinverteddoublecmb",
+ [0x0374] = "numeralsigngreek",
+ [0x0375] = "numeralsignlowergreek",
+ [0x037A] = "ypogegrammeni",
+ [0x037E] = "questiongreek",
+ [0x0384] = "tonos",
+ [0x0385] = "dieresistonos",
+ [0x0386] = "Alphatonos",
+ [0x0387] = "anoteleia",
+ [0x0388] = "Epsilontonos",
+ [0x0389] = "Etatonos",
+ [0x038A] = "Iotatonos",
+ [0x038C] = "Omicrontonos",
+ [0x038E] = "Upsilontonos",
+ [0x038F] = "Omegatonos",
+ [0x0390] = "iotadieresistonos",
+ [0x0391] = "Alpha",
+ [0x0392] = "Beta",
+ [0x0393] = "Gamma",
+ [0x0394] = "Deltagreek",
+ [0x0395] = "Epsilon",
+ [0x0396] = "Zeta",
+ [0x0397] = "Eta",
+ [0x0398] = "Theta",
+ [0x0399] = "Iota",
+ [0x039A] = "Kappa",
+ [0x039B] = "Lambda",
+ [0x039C] = "Mu",
+ [0x039D] = "Nu",
+ [0x039E] = "Xi",
+ [0x039F] = "Omicron",
+ [0x03A0] = "Pi",
+ [0x03A1] = "Rho",
+ [0x03A3] = "Sigma",
+ [0x03A4] = "Tau",
+ [0x03A5] = "Upsilon",
+ [0x03A6] = "Phi",
+ [0x03A7] = "Chi",
+ [0x03A8] = "Psi",
+ [0x03A9] = "Omegagreek",
+ [0x03AA] = "Iotadieresis",
+ [0x03AB] = "Upsilondieresis",
+ [0x03AC] = "alphatonos",
+ [0x03AD] = "epsilontonos",
+ [0x03AE] = "etatonos",
+ [0x03AF] = "iotatonos",
+ [0x03B0] = "upsilondieresistonos",
+ [0x03B1] = "alpha",
+ [0x03B2] = "beta",
+ [0x03B3] = "gamma",
+ [0x03B4] = "delta",
+ [0x03B5] = "epsilon",
+ [0x03B6] = "zeta",
+ [0x03B7] = "eta",
+ [0x03B8] = "theta",
+ [0x03B9] = "iota",
+ [0x03BA] = "kappa",
+ [0x03BB] = "lambda",
+ [0x03BC] = "mugreek",
+ [0x03BD] = "nu",
+ [0x03BE] = "xi",
+ [0x03BF] = "omicron",
+ [0x03C0] = "pi",
+ [0x03C1] = "rho",
+ [0x03C2] = "sigmafinal",
+ [0x03C3] = "sigma",
+ [0x03C4] = "tau",
+ [0x03C5] = "upsilon",
+ [0x03C6] = "phi",
+ [0x03C7] = "chi",
+ [0x03C8] = "psi",
+ [0x03C9] = "omega",
+ [0x03CA] = "iotadieresis",
+ [0x03CB] = "upsilondieresis",
+ [0x03CC] = "omicrontonos",
+ [0x03CD] = "upsilontonos",
+ [0x03CE] = "omegatonos",
+ [0x03D0] = "betasymbolgreek",
+ [0x03D1] = "thetasymbolgreek",
+ [0x03D2] = "Upsilonhooksymbol",
+ [0x03D3] = "Upsilonacutehooksymbolgreek",
+ [0x03D4] = "Upsilondieresishooksymbolgreek",
+ [0x03D5] = "phisymbolgreek",
+ [0x03D6] = "pisymbolgreek",
+ [0x03DA] = "Stigmagreek",
+ [0x03DC] = "Digammagreek",
+ [0x03DE] = "Koppagreek",
+ [0x03E0] = "Sampigreek",
+ [0x03E2] = "Sheicoptic",
+ [0x03E3] = "sheicoptic",
+ [0x03E4] = "Feicoptic",
+ [0x03E5] = "feicoptic",
+ [0x03E6] = "Kheicoptic",
+ [0x03E7] = "kheicoptic",
+ [0x03E8] = "Horicoptic",
+ [0x03E9] = "horicoptic",
+ [0x03EA] = "Gangiacoptic",
+ [0x03EB] = "gangiacoptic",
+ [0x03EC] = "Shimacoptic",
+ [0x03ED] = "shimacoptic",
+ [0x03EE] = "Deicoptic",
+ [0x03EF] = "deicoptic",
+ [0x03F0] = "kappasymbolgreek",
+ [0x03F1] = "rhosymbolgreek",
+ [0x03F2] = "sigmalunatesymbolgreek",
+ [0x03F3] = "yotgreek",
+ [0x0401] = "afii10023",
+ [0x0402] = "afii10051",
+ [0x0403] = "afii10052",
+ [0x0404] = "afii10053",
+ [0x0405] = "afii10054",
+ [0x0406] = "afii10055",
+ [0x0407] = "afii10056",
+ [0x0408] = "afii10057",
+ [0x0409] = "afii10058",
+ [0x040A] = "afii10059",
+ [0x040B] = "afii10060",
+ [0x040C] = "afii10061",
+ [0x040E] = "afii10062",
+ [0x040F] = "afii10145",
+ [0x0410] = "afii10017",
+ [0x0411] = "afii10018",
+ [0x0412] = "afii10019",
+ [0x0413] = "afii10020",
+ [0x0414] = "afii10021",
+ [0x0415] = "afii10022",
+ [0x0416] = "afii10024",
+ [0x0417] = "afii10025",
+ [0x0418] = "afii10026",
+ [0x0419] = "afii10027",
+ [0x041A] = "afii10028",
+ [0x041B] = "afii10029",
+ [0x041C] = "afii10030",
+ [0x041D] = "afii10031",
+ [0x041E] = "afii10032",
+ [0x041F] = "afii10033",
+ [0x0420] = "afii10034",
+ [0x0421] = "afii10035",
+ [0x0422] = "afii10036",
+ [0x0423] = "afii10037",
+ [0x0424] = "afii10038",
+ [0x0425] = "afii10039",
+ [0x0426] = "afii10040",
+ [0x0427] = "afii10041",
+ [0x0428] = "afii10042",
+ [0x0429] = "afii10043",
+ [0x042A] = "afii10044",
+ [0x042B] = "afii10045",
+ [0x042C] = "afii10046",
+ [0x042D] = "afii10047",
+ [0x042E] = "afii10048",
+ [0x042F] = "afii10049",
+ [0x0430] = "afii10065",
+ [0x0431] = "becyrillic",
+ [0x0432] = "vecyrillic",
+ [0x0433] = "gecyrillic",
+ [0x0434] = "decyrillic",
+ [0x0435] = "iecyrillic",
+ [0x0436] = "zhecyrillic",
+ [0x0437] = "zecyrillic",
+ [0x0438] = "iicyrillic",
+ [0x0439] = "iishortcyrillic",
+ [0x043A] = "kacyrillic",
+ [0x043B] = "elcyrillic",
+ [0x043C] = "emcyrillic",
+ [0x043D] = "encyrillic",
+ [0x043E] = "ocyrillic",
+ [0x043F] = "pecyrillic",
+ [0x0440] = "ercyrillic",
+ [0x0441] = "escyrillic",
+ [0x0442] = "tecyrillic",
+ [0x0443] = "ucyrillic",
+ [0x0444] = "efcyrillic",
+ [0x0445] = "khacyrillic",
+ [0x0446] = "tsecyrillic",
+ [0x0447] = "checyrillic",
+ [0x0448] = "shacyrillic",
+ [0x0449] = "shchacyrillic",
+ [0x044A] = "hardsigncyrillic",
+ [0x044B] = "yericyrillic",
+ [0x044C] = "softsigncyrillic",
+ [0x044D] = "ereversedcyrillic",
+ [0x044E] = "iucyrillic",
+ [0x044F] = "iacyrillic",
+ [0x0451] = "iocyrillic",
+ [0x0452] = "djecyrillic",
+ [0x0453] = "gjecyrillic",
+ [0x0454] = "ecyrillic",
+ [0x0455] = "dzecyrillic",
+ [0x0456] = "icyrillic",
+ [0x0457] = "yicyrillic",
+ [0x0458] = "jecyrillic",
+ [0x0459] = "ljecyrillic",
+ [0x045A] = "njecyrillic",
+ [0x045B] = "tshecyrillic",
+ [0x045C] = "kjecyrillic",
+ [0x045E] = "ushortcyrillic",
+ [0x045F] = "dzhecyrillic",
+ [0x0460] = "Omegacyrillic",
+ [0x0461] = "omegacyrillic",
+ [0x0462] = "afii10146",
+ [0x0463] = "yatcyrillic",
+ [0x0464] = "Eiotifiedcyrillic",
+ [0x0465] = "eiotifiedcyrillic",
+ [0x0466] = "Yuslittlecyrillic",
+ [0x0467] = "yuslittlecyrillic",
+ [0x0468] = "Yuslittleiotifiedcyrillic",
+ [0x0469] = "yuslittleiotifiedcyrillic",
+ [0x046A] = "Yusbigcyrillic",
+ [0x046B] = "yusbigcyrillic",
+ [0x046C] = "Yusbigiotifiedcyrillic",
+ [0x046D] = "yusbigiotifiedcyrillic",
+ [0x046E] = "Ksicyrillic",
+ [0x046F] = "ksicyrillic",
+ [0x0470] = "Psicyrillic",
+ [0x0471] = "psicyrillic",
+ [0x0472] = "afii10147",
+ [0x0473] = "fitacyrillic",
+ [0x0474] = "afii10148",
+ [0x0475] = "izhitsacyrillic",
+ [0x0476] = "Izhitsadblgravecyrillic",
+ [0x0477] = "izhitsadblgravecyrillic",
+ [0x0478] = "Ukcyrillic",
+ [0x0479] = "ukcyrillic",
+ [0x047A] = "Omegaroundcyrillic",
+ [0x047B] = "omegaroundcyrillic",
+ [0x047C] = "Omegatitlocyrillic",
+ [0x047D] = "omegatitlocyrillic",
+ [0x047E] = "Otcyrillic",
+ [0x047F] = "otcyrillic",
+ [0x0480] = "Koppacyrillic",
+ [0x0481] = "koppacyrillic",
+ [0x0482] = "thousandcyrillic",
+ [0x0483] = "titlocyrilliccmb",
+ [0x0484] = "palatalizationcyrilliccmb",
+ [0x0485] = "dasiapneumatacyrilliccmb",
+ [0x0486] = "psilipneumatacyrilliccmb",
+ [0x0490] = "afii10050",
+ [0x0491] = "gheupturncyrillic",
+ [0x0492] = "Ghestrokecyrillic",
+ [0x0493] = "ghestrokecyrillic",
+ [0x0494] = "Ghemiddlehookcyrillic",
+ [0x0495] = "ghemiddlehookcyrillic",
+ [0x0496] = "Zhedescendercyrillic",
+ [0x0497] = "zhedescendercyrillic",
+ [0x0498] = "Zedescendercyrillic",
+ [0x0499] = "zedescendercyrillic",
+ [0x049A] = "Kadescendercyrillic",
+ [0x049B] = "kadescendercyrillic",
+ [0x049C] = "Kaverticalstrokecyrillic",
+ [0x049D] = "kaverticalstrokecyrillic",
+ [0x049E] = "Kastrokecyrillic",
+ [0x049F] = "kastrokecyrillic",
+ [0x04A0] = "Kabashkircyrillic",
+ [0x04A1] = "kabashkircyrillic",
+ [0x04A2] = "Endescendercyrillic",
+ [0x04A3] = "endescendercyrillic",
+ [0x04A4] = "Enghecyrillic",
+ [0x04A5] = "enghecyrillic",
+ [0x04A6] = "Pemiddlehookcyrillic",
+ [0x04A7] = "pemiddlehookcyrillic",
+ [0x04A8] = "Haabkhasiancyrillic",
+ [0x04A9] = "haabkhasiancyrillic",
+ [0x04AA] = "Esdescendercyrillic",
+ [0x04AB] = "esdescendercyrillic",
+ [0x04AC] = "Tedescendercyrillic",
+ [0x04AD] = "tedescendercyrillic",
+ [0x04AE] = "Ustraightcyrillic",
+ [0x04AF] = "ustraightcyrillic",
+ [0x04B0] = "Ustraightstrokecyrillic",
+ [0x04B1] = "ustraightstrokecyrillic",
+ [0x04B2] = "Hadescendercyrillic",
+ [0x04B3] = "hadescendercyrillic",
+ [0x04B4] = "Tetsecyrillic",
+ [0x04B5] = "tetsecyrillic",
+ [0x04B6] = "Chedescendercyrillic",
+ [0x04B7] = "chedescendercyrillic",
+ [0x04B8] = "Cheverticalstrokecyrillic",
+ [0x04B9] = "cheverticalstrokecyrillic",
+ [0x04BA] = "Shhacyrillic",
+ [0x04BB] = "shhacyrillic",
+ [0x04BC] = "Cheabkhasiancyrillic",
+ [0x04BD] = "cheabkhasiancyrillic",
+ [0x04BE] = "Chedescenderabkhasiancyrillic",
+ [0x04BF] = "chedescenderabkhasiancyrillic",
+ [0x04C0] = "palochkacyrillic",
+ [0x04C1] = "Zhebrevecyrillic",
+ [0x04C2] = "zhebrevecyrillic",
+ [0x04C3] = "Kahookcyrillic",
+ [0x04C4] = "kahookcyrillic",
+ [0x04C7] = "Enhookcyrillic",
+ [0x04C8] = "enhookcyrillic",
+ [0x04CB] = "Chekhakassiancyrillic",
+ [0x04CC] = "chekhakassiancyrillic",
+ [0x04D0] = "Abrevecyrillic",
+ [0x04D1] = "abrevecyrillic",
+ [0x04D2] = "Adieresiscyrillic",
+ [0x04D3] = "adieresiscyrillic",
+ [0x04D4] = "Aiecyrillic",
+ [0x04D5] = "aiecyrillic",
+ [0x04D6] = "Iebrevecyrillic",
+ [0x04D7] = "iebrevecyrillic",
+ [0x04D8] = "Schwacyrillic",
+ [0x04D9] = "schwacyrillic",
+ [0x04DA] = "Schwadieresiscyrillic",
+ [0x04DB] = "schwadieresiscyrillic",
+ [0x04DC] = "Zhedieresiscyrillic",
+ [0x04DD] = "zhedieresiscyrillic",
+ [0x04DE] = "Zedieresiscyrillic",
+ [0x04DF] = "zedieresiscyrillic",
+ [0x04E0] = "Dzeabkhasiancyrillic",
+ [0x04E1] = "dzeabkhasiancyrillic",
+ [0x04E2] = "Imacroncyrillic",
+ [0x04E3] = "imacroncyrillic",
+ [0x04E4] = "Idieresiscyrillic",
+ [0x04E5] = "idieresiscyrillic",
+ [0x04E6] = "Odieresiscyrillic",
+ [0x04E7] = "odieresiscyrillic",
+ [0x04E8] = "Obarredcyrillic",
+ [0x04E9] = "obarredcyrillic",
+ [0x04EA] = "Obarreddieresiscyrillic",
+ [0x04EB] = "obarreddieresiscyrillic",
+ [0x04EE] = "Umacroncyrillic",
+ [0x04EF] = "umacroncyrillic",
+ [0x04F0] = "Udieresiscyrillic",
+ [0x04F1] = "udieresiscyrillic",
+ [0x04F2] = "Uhungarumlautcyrillic",
+ [0x04F3] = "uhungarumlautcyrillic",
+ [0x04F4] = "Chedieresiscyrillic",
+ [0x04F5] = "chedieresiscyrillic",
+ [0x04F8] = "Yerudieresiscyrillic",
+ [0x04F9] = "yerudieresiscyrillic",
+ [0x0531] = "Aybarmenian",
+ [0x0532] = "Benarmenian",
+ [0x0533] = "Gimarmenian",
+ [0x0534] = "Daarmenian",
+ [0x0535] = "Echarmenian",
+ [0x0536] = "Zaarmenian",
+ [0x0537] = "Eharmenian",
+ [0x0538] = "Etarmenian",
+ [0x0539] = "Toarmenian",
+ [0x053A] = "Zhearmenian",
+ [0x053B] = "Iniarmenian",
+ [0x053C] = "Liwnarmenian",
+ [0x053D] = "Xeharmenian",
+ [0x053E] = "Caarmenian",
+ [0x053F] = "Kenarmenian",
+ [0x0540] = "Hoarmenian",
+ [0x0541] = "Jaarmenian",
+ [0x0542] = "Ghadarmenian",
+ [0x0543] = "Cheharmenian",
+ [0x0544] = "Menarmenian",
+ [0x0545] = "Yiarmenian",
+ [0x0546] = "Nowarmenian",
+ [0x0547] = "Shaarmenian",
+ [0x0548] = "Voarmenian",
+ [0x0549] = "Chaarmenian",
+ [0x054A] = "Peharmenian",
+ [0x054B] = "Jheharmenian",
+ [0x054C] = "Raarmenian",
+ [0x054D] = "Seharmenian",
+ [0x054E] = "Vewarmenian",
+ [0x054F] = "Tiwnarmenian",
+ [0x0550] = "Reharmenian",
+ [0x0551] = "Coarmenian",
+ [0x0552] = "Yiwnarmenian",
+ [0x0553] = "Piwrarmenian",
+ [0x0554] = "Keharmenian",
+ [0x0555] = "Oharmenian",
+ [0x0556] = "Feharmenian",
+ [0x0559] = "ringhalfleftarmenian",
+ [0x055A] = "apostrophearmenian",
+ [0x055B] = "emphasismarkarmenian",
+ [0x055C] = "exclamarmenian",
+ [0x055D] = "commaarmenian",
+ [0x055E] = "questionarmenian",
+ [0x055F] = "abbreviationmarkarmenian",
+ [0x0561] = "aybarmenian",
+ [0x0562] = "benarmenian",
+ [0x0563] = "gimarmenian",
+ [0x0564] = "daarmenian",
+ [0x0565] = "echarmenian",
+ [0x0566] = "zaarmenian",
+ [0x0567] = "eharmenian",
+ [0x0568] = "etarmenian",
+ [0x0569] = "toarmenian",
+ [0x056A] = "zhearmenian",
+ [0x056B] = "iniarmenian",
+ [0x056C] = "liwnarmenian",
+ [0x056D] = "xeharmenian",
+ [0x056E] = "caarmenian",
+ [0x056F] = "kenarmenian",
+ [0x0570] = "hoarmenian",
+ [0x0571] = "jaarmenian",
+ [0x0572] = "ghadarmenian",
+ [0x0573] = "cheharmenian",
+ [0x0574] = "menarmenian",
+ [0x0575] = "yiarmenian",
+ [0x0576] = "nowarmenian",
+ [0x0577] = "shaarmenian",
+ [0x0578] = "voarmenian",
+ [0x0579] = "chaarmenian",
+ [0x057A] = "peharmenian",
+ [0x057B] = "jheharmenian",
+ [0x057C] = "raarmenian",
+ [0x057D] = "seharmenian",
+ [0x057E] = "vewarmenian",
+ [0x057F] = "tiwnarmenian",
+ [0x0580] = "reharmenian",
+ [0x0581] = "coarmenian",
+ [0x0582] = "yiwnarmenian",
+ [0x0583] = "piwrarmenian",
+ [0x0584] = "keharmenian",
+ [0x0585] = "oharmenian",
+ [0x0586] = "feharmenian",
+ [0x0587] = "echyiwnarmenian",
+ [0x0589] = "periodarmenian",
+ [0x0591] = "etnahtalefthebrew",
+ [0x0592] = "segoltahebrew",
+ [0x0593] = "shalshelethebrew",
+ [0x0594] = "zaqefqatanhebrew",
+ [0x0595] = "zaqefgadolhebrew",
+ [0x0596] = "tipehalefthebrew",
+ [0x0597] = "reviamugrashhebrew",
+ [0x0598] = "zarqahebrew",
+ [0x0599] = "pashtahebrew",
+ [0x059A] = "yetivhebrew",
+ [0x059B] = "tevirlefthebrew",
+ [0x059C] = "gereshaccenthebrew",
+ [0x059D] = "gereshmuqdamhebrew",
+ [0x059E] = "gershayimaccenthebrew",
+ [0x059F] = "qarneyparahebrew",
+ [0x05A0] = "telishagedolahebrew",
+ [0x05A1] = "pazerhebrew",
+ [0x05A3] = "munahlefthebrew",
+ [0x05A4] = "mahapakhlefthebrew",
+ [0x05A5] = "merkhalefthebrew",
+ [0x05A6] = "merkhakefulalefthebrew",
+ [0x05A7] = "dargalefthebrew",
+ [0x05A8] = "qadmahebrew",
+ [0x05A9] = "telishaqetanahebrew",
+ [0x05AA] = "yerahbenyomolefthebrew",
+ [0x05AB] = "olehebrew",
+ [0x05AC] = "iluyhebrew",
+ [0x05AD] = "dehihebrew",
+ [0x05AE] = "zinorhebrew",
+ [0x05AF] = "masoracirclehebrew",
+ [0x05B0] = "shevawidehebrew",
+ [0x05B1] = "hatafsegolwidehebrew",
+ [0x05B2] = "hatafpatahwidehebrew",
+ [0x05B3] = "hatafqamatswidehebrew",
+ [0x05B4] = "hiriqwidehebrew",
+ [0x05B5] = "tserewidehebrew",
+ [0x05B6] = "segolwidehebrew",
+ [0x05B7] = "patahwidehebrew",
+ [0x05B8] = "qamatswidehebrew",
+ [0x05B9] = "holamwidehebrew",
+ [0x05BB] = "qubutswidehebrew",
+ [0x05BC] = "dageshhebrew",
+ [0x05BD] = "siluqlefthebrew",
+ [0x05BE] = "maqafhebrew",
+ [0x05BF] = "rafehebrew",
+ [0x05C0] = "paseqhebrew",
+ [0x05C1] = "shindothebrew",
+ [0x05C2] = "sindothebrew",
+ [0x05C3] = "sofpasuqhebrew",
+ [0x05C4] = "upperdothebrew",
+ [0x05D0] = "alefhebrew",
+ [0x05D1] = "bethebrew",
+ [0x05D2] = "gimelhebrew",
+ [0x05D3] = "dalettserehebrew",
+ [0x05D4] = "hehebrew",
+ [0x05D5] = "vavhebrew",
+ [0x05D6] = "zayinhebrew",
+ [0x05D7] = "hethebrew",
+ [0x05D8] = "tethebrew",
+ [0x05D9] = "yodhebrew",
+ [0x05DA] = "finalkafshevahebrew",
+ [0x05DB] = "kafhebrew",
+ [0x05DC] = "lamedholamhebrew",
+ [0x05DD] = "finalmemhebrew",
+ [0x05DE] = "memhebrew",
+ [0x05DF] = "finalnunhebrew",
+ [0x05E0] = "nunhebrew",
+ [0x05E1] = "samekhhebrew",
+ [0x05E2] = "ayinhebrew",
+ [0x05E3] = "finalpehebrew",
+ [0x05E4] = "pehebrew",
+ [0x05E5] = "finaltsadihebrew",
+ [0x05E6] = "tsadihebrew",
+ [0x05E7] = "qoftserehebrew",
+ [0x05E8] = "reshtserehebrew",
+ [0x05E9] = "shinhebrew",
+ [0x05EA] = "tavhebrew",
+ [0x05F0] = "vavvavhebrew",
+ [0x05F1] = "vavyodhebrew",
+ [0x05F2] = "yodyodhebrew",
+ [0x05F3] = "gereshhebrew",
+ [0x05F4] = "gershayimhebrew",
+ [0x060C] = "commaarabic",
+ [0x061B] = "semicolonarabic",
+ [0x061F] = "questionarabic",
+ [0x0621] = "hamzasukunarabic",
+ [0x0622] = "alefmaddaabovearabic",
+ [0x0623] = "alefhamzaabovearabic",
+ [0x0624] = "wawhamzaabovearabic",
+ [0x0625] = "alefhamzabelowarabic",
+ [0x0626] = "yehhamzaabovearabic",
+ [0x0627] = "alefarabic",
+ [0x0628] = "beharabic",
+ [0x0629] = "tehmarbutaarabic",
+ [0x062A] = "teharabic",
+ [0x062B] = "theharabic",
+ [0x062C] = "jeemarabic",
+ [0x062D] = "haharabic",
+ [0x062E] = "khaharabic",
+ [0x062F] = "dalarabic",
+ [0x0630] = "thalarabic",
+ [0x0631] = "rehyehaleflamarabic",
+ [0x0632] = "zainarabic",
+ [0x0633] = "seenarabic",
+ [0x0634] = "sheenarabic",
+ [0x0635] = "sadarabic",
+ [0x0636] = "dadarabic",
+ [0x0637] = "taharabic",
+ [0x0638] = "zaharabic",
+ [0x0639] = "ainarabic",
+ [0x063A] = "ghainarabic",
+ [0x0640] = "tatweelarabic",
+ [0x0641] = "feharabic",
+ [0x0642] = "qafarabic",
+ [0x0643] = "kafarabic",
+ [0x0644] = "lamarabic",
+ [0x0645] = "meemarabic",
+ [0x0646] = "noonarabic",
+ [0x0647] = "heharabic",
+ [0x0648] = "wawarabic",
+ [0x0649] = "alefmaksuraarabic",
+ [0x064A] = "yeharabic",
+ [0x064B] = "fathatanarabic",
+ [0x064C] = "dammatanarabic",
+ [0x064D] = "kasratanarabic",
+ [0x064E] = "fathalowarabic",
+ [0x064F] = "dammalowarabic",
+ [0x0650] = "kasraarabic",
+ [0x0651] = "shaddafathatanarabic",
+ [0x0652] = "sukunarabic",
+ [0x0660] = "zerohackarabic",
+ [0x0661] = "onehackarabic",
+ [0x0662] = "twohackarabic",
+ [0x0663] = "threehackarabic",
+ [0x0664] = "fourhackarabic",
+ [0x0665] = "fivehackarabic",
+ [0x0666] = "sixhackarabic",
+ [0x0667] = "sevenhackarabic",
+ [0x0668] = "eighthackarabic",
+ [0x0669] = "ninehackarabic",
+ [0x066A] = "percentarabic",
+ [0x066B] = "decimalseparatorpersian",
+ [0x066C] = "thousandsseparatorpersian",
+ [0x066D] = "asteriskarabic",
+ [0x0679] = "tteharabic",
+ [0x067E] = "peharabic",
+ [0x0686] = "tcheharabic",
+ [0x0688] = "ddalarabic",
+ [0x0691] = "rreharabic",
+ [0x0698] = "jeharabic",
+ [0x06A4] = "veharabic",
+ [0x06AF] = "gafarabic",
+ [0x06BA] = "noonghunnaarabic",
+ [0x06C1] = "hehaltonearabic",
+ [0x06D1] = "yehthreedotsbelowarabic",
+ [0x06D2] = "yehbarreearabic",
+ [0x06D5] = "afii57534",
+ [0x06F0] = "zeropersian",
+ [0x06F1] = "onepersian",
+ [0x06F2] = "twopersian",
+ [0x06F3] = "threepersian",
+ [0x06F4] = "fourpersian",
+ [0x06F5] = "fivepersian",
+ [0x06F6] = "sixpersian",
+ [0x06F7] = "sevenpersian",
+ [0x06F8] = "eightpersian",
+ [0x06F9] = "ninepersian",
+ [0x0901] = "candrabindudeva",
+ [0x0902] = "anusvaradeva",
+ [0x0903] = "visargadeva",
+ [0x0905] = "adeva",
+ [0x0906] = "aadeva",
+ [0x0907] = "ideva",
+ [0x0908] = "iideva",
+ [0x0909] = "udeva",
+ [0x090A] = "uudeva",
+ [0x090B] = "rvocalicdeva",
+ [0x090C] = "lvocalicdeva",
+ [0x090D] = "ecandradeva",
+ [0x090E] = "eshortdeva",
+ [0x090F] = "edeva",
+ [0x0910] = "aideva",
+ [0x0911] = "ocandradeva",
+ [0x0912] = "oshortdeva",
+ [0x0913] = "odeva",
+ [0x0914] = "audeva",
+ [0x0915] = "kadeva",
+ [0x0916] = "khadeva",
+ [0x0917] = "gadeva",
+ [0x0918] = "ghadeva",
+ [0x0919] = "ngadeva",
+ [0x091A] = "cadeva",
+ [0x091B] = "chadeva",
+ [0x091C] = "jadeva",
+ [0x091D] = "jhadeva",
+ [0x091E] = "nyadeva",
+ [0x091F] = "ttadeva",
+ [0x0920] = "tthadeva",
+ [0x0921] = "ddadeva",
+ [0x0922] = "ddhadeva",
+ [0x0923] = "nnadeva",
+ [0x0924] = "tadeva",
+ [0x0925] = "thadeva",
+ [0x0926] = "dadeva",
+ [0x0927] = "dhadeva",
+ [0x0928] = "nadeva",
+ [0x0929] = "nnnadeva",
+ [0x092A] = "padeva",
+ [0x092B] = "phadeva",
+ [0x092C] = "badeva",
+ [0x092D] = "bhadeva",
+ [0x092E] = "madeva",
+ [0x092F] = "yadeva",
+ [0x0930] = "radeva",
+ [0x0931] = "rradeva",
+ [0x0932] = "ladeva",
+ [0x0933] = "lladeva",
+ [0x0934] = "llladeva",
+ [0x0935] = "vadeva",
+ [0x0936] = "shadeva",
+ [0x0937] = "ssadeva",
+ [0x0938] = "sadeva",
+ [0x0939] = "hadeva",
+ [0x093C] = "nuktadeva",
+ [0x093D] = "avagrahadeva",
+ [0x093E] = "aavowelsigndeva",
+ [0x093F] = "ivowelsigndeva",
+ [0x0940] = "iivowelsigndeva",
+ [0x0941] = "uvowelsigndeva",
+ [0x0942] = "uuvowelsigndeva",
+ [0x0943] = "rvocalicvowelsigndeva",
+ [0x0944] = "rrvocalicvowelsigndeva",
+ [0x0945] = "ecandravowelsigndeva",
+ [0x0946] = "eshortvowelsigndeva",
+ [0x0947] = "evowelsigndeva",
+ [0x0948] = "aivowelsigndeva",
+ [0x0949] = "ocandravowelsigndeva",
+ [0x094A] = "oshortvowelsigndeva",
+ [0x094B] = "ovowelsigndeva",
+ [0x094C] = "auvowelsigndeva",
+ [0x094D] = "viramadeva",
+ [0x0950] = "omdeva",
+ [0x0951] = "udattadeva",
+ [0x0952] = "anudattadeva",
+ [0x0953] = "gravedeva",
+ [0x0954] = "acutedeva",
+ [0x0958] = "qadeva",
+ [0x0959] = "khhadeva",
+ [0x095A] = "ghhadeva",
+ [0x095B] = "zadeva",
+ [0x095C] = "dddhadeva",
+ [0x095D] = "rhadeva",
+ [0x095E] = "fadeva",
+ [0x095F] = "yyadeva",
+ [0x0960] = "rrvocalicdeva",
+ [0x0961] = "llvocalicdeva",
+ [0x0962] = "lvocalicvowelsigndeva",
+ [0x0963] = "llvocalicvowelsigndeva",
+ [0x0964] = "danda",
+ [0x0965] = "dbldanda",
+ [0x0966] = "zerodeva",
+ [0x0967] = "onedeva",
+ [0x0968] = "twodeva",
+ [0x0969] = "threedeva",
+ [0x096A] = "fourdeva",
+ [0x096B] = "fivedeva",
+ [0x096C] = "sixdeva",
+ [0x096D] = "sevendeva",
+ [0x096E] = "eightdeva",
+ [0x096F] = "ninedeva",
+ [0x0970] = "abbreviationsigndeva",
+ [0x0981] = "candrabindubengali",
+ [0x0982] = "anusvarabengali",
+ [0x0983] = "visargabengali",
+ [0x0985] = "abengali",
+ [0x0986] = "aabengali",
+ [0x0987] = "ibengali",
+ [0x0988] = "iibengali",
+ [0x0989] = "ubengali",
+ [0x098A] = "uubengali",
+ [0x098B] = "rvocalicbengali",
+ [0x098C] = "lvocalicbengali",
+ [0x098F] = "ebengali",
+ [0x0990] = "aibengali",
+ [0x0993] = "obengali",
+ [0x0994] = "aubengali",
+ [0x0995] = "kabengali",
+ [0x0996] = "khabengali",
+ [0x0997] = "gabengali",
+ [0x0998] = "ghabengali",
+ [0x0999] = "ngabengali",
+ [0x099A] = "cabengali",
+ [0x099B] = "chabengali",
+ [0x099C] = "jabengali",
+ [0x099D] = "jhabengali",
+ [0x099E] = "nyabengali",
+ [0x099F] = "ttabengali",
+ [0x09A0] = "tthabengali",
+ [0x09A1] = "ddabengali",
+ [0x09A2] = "ddhabengali",
+ [0x09A3] = "nnabengali",
+ [0x09A4] = "tabengali",
+ [0x09A5] = "thabengali",
+ [0x09A6] = "dabengali",
+ [0x09A7] = "dhabengali",
+ [0x09A8] = "nabengali",
+ [0x09AA] = "pabengali",
+ [0x09AB] = "phabengali",
+ [0x09AC] = "babengali",
+ [0x09AD] = "bhabengali",
+ [0x09AE] = "mabengali",
+ [0x09AF] = "yabengali",
+ [0x09B0] = "rabengali",
+ [0x09B2] = "labengali",
+ [0x09B6] = "shabengali",
+ [0x09B7] = "ssabengali",
+ [0x09B8] = "sabengali",
+ [0x09B9] = "habengali",
+ [0x09BC] = "nuktabengali",
+ [0x09BE] = "aavowelsignbengali",
+ [0x09BF] = "ivowelsignbengali",
+ [0x09C0] = "iivowelsignbengali",
+ [0x09C1] = "uvowelsignbengali",
+ [0x09C2] = "uuvowelsignbengali",
+ [0x09C3] = "rvocalicvowelsignbengali",
+ [0x09C4] = "rrvocalicvowelsignbengali",
+ [0x09C7] = "evowelsignbengali",
+ [0x09C8] = "aivowelsignbengali",
+ [0x09CB] = "ovowelsignbengali",
+ [0x09CC] = "auvowelsignbengali",
+ [0x09CD] = "viramabengali",
+ [0x09D7] = "aulengthmarkbengali",
+ [0x09DC] = "rrabengali",
+ [0x09DD] = "rhabengali",
+ [0x09DF] = "yyabengali",
+ [0x09E0] = "rrvocalicbengali",
+ [0x09E1] = "llvocalicbengali",
+ [0x09E2] = "lvocalicvowelsignbengali",
+ [0x09E3] = "llvocalicvowelsignbengali",
+ [0x09E6] = "zerobengali",
+ [0x09E7] = "onebengali",
+ [0x09E8] = "twobengali",
+ [0x09E9] = "threebengali",
+ [0x09EA] = "fourbengali",
+ [0x09EB] = "fivebengali",
+ [0x09EC] = "sixbengali",
+ [0x09ED] = "sevenbengali",
+ [0x09EE] = "eightbengali",
+ [0x09EF] = "ninebengali",
+ [0x09F0] = "ramiddlediagonalbengali",
+ [0x09F1] = "ralowerdiagonalbengali",
+ [0x09F2] = "rupeemarkbengali",
+ [0x09F3] = "rupeesignbengali",
+ [0x09F4] = "onenumeratorbengali",
+ [0x09F5] = "twonumeratorbengali",
+ [0x09F6] = "threenumeratorbengali",
+ [0x09F7] = "fournumeratorbengali",
+ [0x09F8] = "denominatorminusonenumeratorbengali",
+ [0x09F9] = "sixteencurrencydenominatorbengali",
+ [0x09FA] = "issharbengali",
+ [0x0A02] = "bindigurmukhi",
+ [0x0A05] = "agurmukhi",
+ [0x0A06] = "aagurmukhi",
+ [0x0A07] = "igurmukhi",
+ [0x0A08] = "iigurmukhi",
+ [0x0A09] = "ugurmukhi",
+ [0x0A0A] = "uugurmukhi",
+ [0x0A0F] = "eegurmukhi",
+ [0x0A10] = "aigurmukhi",
+ [0x0A13] = "oogurmukhi",
+ [0x0A14] = "augurmukhi",
+ [0x0A15] = "kagurmukhi",
+ [0x0A16] = "khagurmukhi",
+ [0x0A17] = "gagurmukhi",
+ [0x0A18] = "ghagurmukhi",
+ [0x0A19] = "ngagurmukhi",
+ [0x0A1A] = "cagurmukhi",
+ [0x0A1B] = "chagurmukhi",
+ [0x0A1C] = "jagurmukhi",
+ [0x0A1D] = "jhagurmukhi",
+ [0x0A1E] = "nyagurmukhi",
+ [0x0A1F] = "ttagurmukhi",
+ [0x0A20] = "tthagurmukhi",
+ [0x0A21] = "ddagurmukhi",
+ [0x0A22] = "ddhagurmukhi",
+ [0x0A23] = "nnagurmukhi",
+ [0x0A24] = "tagurmukhi",
+ [0x0A25] = "thagurmukhi",
+ [0x0A26] = "dagurmukhi",
+ [0x0A27] = "dhagurmukhi",
+ [0x0A28] = "nagurmukhi",
+ [0x0A2A] = "pagurmukhi",
+ [0x0A2B] = "phagurmukhi",
+ [0x0A2C] = "bagurmukhi",
+ [0x0A2D] = "bhagurmukhi",
+ [0x0A2E] = "magurmukhi",
+ [0x0A2F] = "yagurmukhi",
+ [0x0A30] = "ragurmukhi",
+ [0x0A32] = "lagurmukhi",
+ [0x0A35] = "vagurmukhi",
+ [0x0A36] = "shagurmukhi",
+ [0x0A38] = "sagurmukhi",
+ [0x0A39] = "hagurmukhi",
+ [0x0A3C] = "nuktagurmukhi",
+ [0x0A3E] = "aamatragurmukhi",
+ [0x0A3F] = "imatragurmukhi",
+ [0x0A40] = "iimatragurmukhi",
+ [0x0A41] = "umatragurmukhi",
+ [0x0A42] = "uumatragurmukhi",
+ [0x0A47] = "eematragurmukhi",
+ [0x0A48] = "aimatragurmukhi",
+ [0x0A4B] = "oomatragurmukhi",
+ [0x0A4C] = "aumatragurmukhi",
+ [0x0A4D] = "halantgurmukhi",
+ [0x0A59] = "khhagurmukhi",
+ [0x0A5A] = "ghhagurmukhi",
+ [0x0A5B] = "zagurmukhi",
+ [0x0A5C] = "rragurmukhi",
+ [0x0A5E] = "fagurmukhi",
+ [0x0A66] = "zerogurmukhi",
+ [0x0A67] = "onegurmukhi",
+ [0x0A68] = "twogurmukhi",
+ [0x0A69] = "threegurmukhi",
+ [0x0A6A] = "fourgurmukhi",
+ [0x0A6B] = "fivegurmukhi",
+ [0x0A6C] = "sixgurmukhi",
+ [0x0A6D] = "sevengurmukhi",
+ [0x0A6E] = "eightgurmukhi",
+ [0x0A6F] = "ninegurmukhi",
+ [0x0A70] = "tippigurmukhi",
+ [0x0A71] = "addakgurmukhi",
+ [0x0A72] = "irigurmukhi",
+ [0x0A73] = "uragurmukhi",
+ [0x0A74] = "ekonkargurmukhi",
+ [0x0A81] = "candrabindugujarati",
+ [0x0A82] = "anusvaragujarati",
+ [0x0A83] = "visargagujarati",
+ [0x0A85] = "agujarati",
+ [0x0A86] = "aagujarati",
+ [0x0A87] = "igujarati",
+ [0x0A88] = "iigujarati",
+ [0x0A89] = "ugujarati",
+ [0x0A8A] = "uugujarati",
+ [0x0A8B] = "rvocalicgujarati",
+ [0x0A8D] = "ecandragujarati",
+ [0x0A8F] = "egujarati",
+ [0x0A90] = "aigujarati",
+ [0x0A91] = "ocandragujarati",
+ [0x0A93] = "ogujarati",
+ [0x0A94] = "augujarati",
+ [0x0A95] = "kagujarati",
+ [0x0A96] = "khagujarati",
+ [0x0A97] = "gagujarati",
+ [0x0A98] = "ghagujarati",
+ [0x0A99] = "ngagujarati",
+ [0x0A9A] = "cagujarati",
+ [0x0A9B] = "chagujarati",
+ [0x0A9C] = "jagujarati",
+ [0x0A9D] = "jhagujarati",
+ [0x0A9E] = "nyagujarati",
+ [0x0A9F] = "ttagujarati",
+ [0x0AA0] = "tthagujarati",
+ [0x0AA1] = "ddagujarati",
+ [0x0AA2] = "ddhagujarati",
+ [0x0AA3] = "nnagujarati",
+ [0x0AA4] = "tagujarati",
+ [0x0AA5] = "thagujarati",
+ [0x0AA6] = "dagujarati",
+ [0x0AA7] = "dhagujarati",
+ [0x0AA8] = "nagujarati",
+ [0x0AAA] = "pagujarati",
+ [0x0AAB] = "phagujarati",
+ [0x0AAC] = "bagujarati",
+ [0x0AAD] = "bhagujarati",
+ [0x0AAE] = "magujarati",
+ [0x0AAF] = "yagujarati",
+ [0x0AB0] = "ragujarati",
+ [0x0AB2] = "lagujarati",
+ [0x0AB3] = "llagujarati",
+ [0x0AB5] = "vagujarati",
+ [0x0AB6] = "shagujarati",
+ [0x0AB7] = "ssagujarati",
+ [0x0AB8] = "sagujarati",
+ [0x0AB9] = "hagujarati",
+ [0x0ABC] = "nuktagujarati",
+ [0x0ABE] = "aavowelsigngujarati",
+ [0x0ABF] = "ivowelsigngujarati",
+ [0x0AC0] = "iivowelsigngujarati",
+ [0x0AC1] = "uvowelsigngujarati",
+ [0x0AC2] = "uuvowelsigngujarati",
+ [0x0AC3] = "rvocalicvowelsigngujarati",
+ [0x0AC4] = "rrvocalicvowelsigngujarati",
+ [0x0AC5] = "ecandravowelsigngujarati",
+ [0x0AC7] = "evowelsigngujarati",
+ [0x0AC8] = "aivowelsigngujarati",
+ [0x0AC9] = "ocandravowelsigngujarati",
+ [0x0ACB] = "ovowelsigngujarati",
+ [0x0ACC] = "auvowelsigngujarati",
+ [0x0ACD] = "viramagujarati",
+ [0x0AD0] = "omgujarati",
+ [0x0AE0] = "rrvocalicgujarati",
+ [0x0AE6] = "zerogujarati",
+ [0x0AE7] = "onegujarati",
+ [0x0AE8] = "twogujarati",
+ [0x0AE9] = "threegujarati",
+ [0x0AEA] = "fourgujarati",
+ [0x0AEB] = "fivegujarati",
+ [0x0AEC] = "sixgujarati",
+ [0x0AED] = "sevengujarati",
+ [0x0AEE] = "eightgujarati",
+ [0x0AEF] = "ninegujarati",
+ [0x0E01] = "kokaithai",
+ [0x0E02] = "khokhaithai",
+ [0x0E03] = "khokhuatthai",
+ [0x0E04] = "khokhwaithai",
+ [0x0E05] = "khokhonthai",
+ [0x0E06] = "khorakhangthai",
+ [0x0E07] = "ngonguthai",
+ [0x0E08] = "chochanthai",
+ [0x0E09] = "chochingthai",
+ [0x0E0A] = "chochangthai",
+ [0x0E0B] = "sosothai",
+ [0x0E0C] = "chochoethai",
+ [0x0E0D] = "yoyingthai",
+ [0x0E0E] = "dochadathai",
+ [0x0E0F] = "topatakthai",
+ [0x0E10] = "thothanthai",
+ [0x0E11] = "thonangmonthothai",
+ [0x0E12] = "thophuthaothai",
+ [0x0E13] = "nonenthai",
+ [0x0E14] = "dodekthai",
+ [0x0E15] = "totaothai",
+ [0x0E16] = "thothungthai",
+ [0x0E17] = "thothahanthai",
+ [0x0E18] = "thothongthai",
+ [0x0E19] = "nonuthai",
+ [0x0E1A] = "bobaimaithai",
+ [0x0E1B] = "poplathai",
+ [0x0E1C] = "phophungthai",
+ [0x0E1D] = "fofathai",
+ [0x0E1E] = "phophanthai",
+ [0x0E1F] = "fofanthai",
+ [0x0E20] = "phosamphaothai",
+ [0x0E21] = "momathai",
+ [0x0E22] = "yoyakthai",
+ [0x0E23] = "roruathai",
+ [0x0E24] = "ruthai",
+ [0x0E25] = "lolingthai",
+ [0x0E26] = "luthai",
+ [0x0E27] = "wowaenthai",
+ [0x0E28] = "sosalathai",
+ [0x0E29] = "sorusithai",
+ [0x0E2A] = "sosuathai",
+ [0x0E2B] = "hohipthai",
+ [0x0E2C] = "lochulathai",
+ [0x0E2D] = "oangthai",
+ [0x0E2E] = "honokhukthai",
+ [0x0E2F] = "paiyannoithai",
+ [0x0E30] = "saraathai",
+ [0x0E31] = "maihanakatthai",
+ [0x0E32] = "saraaathai",
+ [0x0E33] = "saraamthai",
+ [0x0E34] = "saraithai",
+ [0x0E35] = "saraiithai",
+ [0x0E36] = "sarauethai",
+ [0x0E37] = "saraueethai",
+ [0x0E38] = "sarauthai",
+ [0x0E39] = "sarauuthai",
+ [0x0E3A] = "phinthuthai",
+ [0x0E3F] = "bahtthai",
+ [0x0E40] = "saraethai",
+ [0x0E41] = "saraaethai",
+ [0x0E42] = "saraothai",
+ [0x0E43] = "saraaimaimuanthai",
+ [0x0E44] = "saraaimaimalaithai",
+ [0x0E45] = "lakkhangyaothai",
+ [0x0E46] = "maiyamokthai",
+ [0x0E47] = "maitaikhuthai",
+ [0x0E48] = "maiekthai",
+ [0x0E49] = "maithothai",
+ [0x0E4A] = "maitrithai",
+ [0x0E4B] = "maichattawathai",
+ [0x0E4C] = "thanthakhatthai",
+ [0x0E4D] = "nikhahitthai",
+ [0x0E4E] = "yamakkanthai",
+ [0x0E4F] = "fongmanthai",
+ [0x0E50] = "zerothai",
+ [0x0E51] = "onethai",
+ [0x0E52] = "twothai",
+ [0x0E53] = "threethai",
+ [0x0E54] = "fourthai",
+ [0x0E55] = "fivethai",
+ [0x0E56] = "sixthai",
+ [0x0E57] = "seventhai",
+ [0x0E58] = "eightthai",
+ [0x0E59] = "ninethai",
+ [0x0E5A] = "angkhankhuthai",
+ [0x0E5B] = "khomutthai",
+ [0x1E00] = "Aringbelow",
+ [0x1E01] = "aringbelow",
+ [0x1E02] = "Bdotaccent",
+ [0x1E03] = "bdotaccent",
+ [0x1E04] = "Bdotbelow",
+ [0x1E05] = "bdotbelow",
+ [0x1E06] = "Blinebelow",
+ [0x1E07] = "blinebelow",
+ [0x1E08] = "Ccedillaacute",
+ [0x1E09] = "ccedillaacute",
+ [0x1E0A] = "Ddotaccent",
+ [0x1E0B] = "ddotaccent",
+ [0x1E0C] = "Ddotbelow",
+ [0x1E0D] = "ddotbelow",
+ [0x1E0E] = "Dlinebelow",
+ [0x1E0F] = "dlinebelow",
+ [0x1E10] = "Dcedilla",
+ [0x1E11] = "dcedilla",
+ [0x1E12] = "Dcircumflexbelow",
+ [0x1E13] = "dcircumflexbelow",
+ [0x1E14] = "Emacrongrave",
+ [0x1E15] = "emacrongrave",
+ [0x1E16] = "Emacronacute",
+ [0x1E17] = "emacronacute",
+ [0x1E18] = "Ecircumflexbelow",
+ [0x1E19] = "ecircumflexbelow",
+ [0x1E1A] = "Etildebelow",
+ [0x1E1B] = "etildebelow",
+ [0x1E1C] = "Ecedillabreve",
+ [0x1E1D] = "ecedillabreve",
+ [0x1E1E] = "Fdotaccent",
+ [0x1E1F] = "fdotaccent",
+ [0x1E20] = "Gmacron",
+ [0x1E21] = "gmacron",
+ [0x1E22] = "Hdotaccent",
+ [0x1E23] = "hdotaccent",
+ [0x1E24] = "Hdotbelow",
+ [0x1E25] = "hdotbelow",
+ [0x1E26] = "Hdieresis",
+ [0x1E27] = "hdieresis",
+ [0x1E28] = "Hcedilla",
+ [0x1E29] = "hcedilla",
+ [0x1E2A] = "Hbrevebelow",
+ [0x1E2B] = "hbrevebelow",
+ [0x1E2C] = "Itildebelow",
+ [0x1E2D] = "itildebelow",
+ [0x1E2E] = "Idieresisacute",
+ [0x1E2F] = "idieresisacute",
+ [0x1E30] = "Kacute",
+ [0x1E31] = "kacute",
+ [0x1E32] = "Kdotbelow",
+ [0x1E33] = "kdotbelow",
+ [0x1E34] = "Klinebelow",
+ [0x1E35] = "klinebelow",
+ [0x1E36] = "Ldotbelow",
+ [0x1E37] = "ldotbelow",
+ [0x1E38] = "Ldotbelowmacron",
+ [0x1E39] = "ldotbelowmacron",
+ [0x1E3A] = "Llinebelow",
+ [0x1E3B] = "llinebelow",
+ [0x1E3C] = "Lcircumflexbelow",
+ [0x1E3D] = "lcircumflexbelow",
+ [0x1E3E] = "Macute",
+ [0x1E3F] = "macute",
+ [0x1E40] = "Mdotaccent",
+ [0x1E41] = "mdotaccent",
+ [0x1E42] = "Mdotbelow",
+ [0x1E43] = "mdotbelow",
+ [0x1E44] = "Ndotaccent",
+ [0x1E45] = "ndotaccent",
+ [0x1E46] = "Ndotbelow",
+ [0x1E47] = "ndotbelow",
+ [0x1E48] = "Nlinebelow",
+ [0x1E49] = "nlinebelow",
+ [0x1E4A] = "Ncircumflexbelow",
+ [0x1E4B] = "ncircumflexbelow",
+ [0x1E4C] = "Otildeacute",
+ [0x1E4D] = "otildeacute",
+ [0x1E4E] = "Otildedieresis",
+ [0x1E4F] = "otildedieresis",
+ [0x1E50] = "Omacrongrave",
+ [0x1E51] = "omacrongrave",
+ [0x1E52] = "Omacronacute",
+ [0x1E53] = "omacronacute",
+ [0x1E54] = "Pacute",
+ [0x1E55] = "pacute",
+ [0x1E56] = "Pdotaccent",
+ [0x1E57] = "pdotaccent",
+ [0x1E58] = "Rdotaccent",
+ [0x1E59] = "rdotaccent",
+ [0x1E5A] = "Rdotbelow",
+ [0x1E5B] = "rdotbelow",
+ [0x1E5C] = "Rdotbelowmacron",
+ [0x1E5D] = "rdotbelowmacron",
+ [0x1E5E] = "Rlinebelow",
+ [0x1E5F] = "rlinebelow",
+ [0x1E60] = "Sdotaccent",
+ [0x1E61] = "sdotaccent",
+ [0x1E62] = "Sdotbelow",
+ [0x1E63] = "sdotbelow",
+ [0x1E64] = "Sacutedotaccent",
+ [0x1E65] = "sacutedotaccent",
+ [0x1E66] = "Scarondotaccent",
+ [0x1E67] = "scarondotaccent",
+ [0x1E68] = "Sdotbelowdotaccent",
+ [0x1E69] = "sdotbelowdotaccent",
+ [0x1E6A] = "Tdotaccent",
+ [0x1E6B] = "tdotaccent",
+ [0x1E6C] = "Tdotbelow",
+ [0x1E6D] = "tdotbelow",
+ [0x1E6E] = "Tlinebelow",
+ [0x1E6F] = "tlinebelow",
+ [0x1E70] = "Tcircumflexbelow",
+ [0x1E71] = "tcircumflexbelow",
+ [0x1E72] = "Udieresisbelow",
+ [0x1E73] = "udieresisbelow",
+ [0x1E74] = "Utildebelow",
+ [0x1E75] = "utildebelow",
+ [0x1E76] = "Ucircumflexbelow",
+ [0x1E77] = "ucircumflexbelow",
+ [0x1E78] = "Utildeacute",
+ [0x1E79] = "utildeacute",
+ [0x1E7A] = "Umacrondieresis",
+ [0x1E7B] = "umacrondieresis",
+ [0x1E7C] = "Vtilde",
+ [0x1E7D] = "vtilde",
+ [0x1E7E] = "Vdotbelow",
+ [0x1E7F] = "vdotbelow",
+ [0x1E80] = "Wgrave",
+ [0x1E81] = "wgrave",
+ [0x1E82] = "Wacute",
+ [0x1E83] = "wacute",
+ [0x1E84] = "Wdieresis",
+ [0x1E85] = "wdieresis",
+ [0x1E86] = "Wdotaccent",
+ [0x1E87] = "wdotaccent",
+ [0x1E88] = "Wdotbelow",
+ [0x1E89] = "wdotbelow",
+ [0x1E8A] = "Xdotaccent",
+ [0x1E8B] = "xdotaccent",
+ [0x1E8C] = "Xdieresis",
+ [0x1E8D] = "xdieresis",
+ [0x1E8E] = "Ydotaccent",
+ [0x1E8F] = "ydotaccent",
+ [0x1E90] = "Zcircumflex",
+ [0x1E91] = "zcircumflex",
+ [0x1E92] = "Zdotbelow",
+ [0x1E93] = "zdotbelow",
+ [0x1E94] = "Zlinebelow",
+ [0x1E95] = "zlinebelow",
+ [0x1E96] = "hlinebelow",
+ [0x1E97] = "tdieresis",
+ [0x1E98] = "wring",
+ [0x1E99] = "yring",
+ [0x1E9A] = "arighthalfring",
+ [0x1E9B] = "slongdotaccent",
+ [0x1EA0] = "Adotbelow",
+ [0x1EA1] = "adotbelow",
+ [0x1EA2] = "Ahookabove",
+ [0x1EA3] = "ahookabove",
+ [0x1EA4] = "Acircumflexacute",
+ [0x1EA5] = "acircumflexacute",
+ [0x1EA6] = "Acircumflexgrave",
+ [0x1EA7] = "acircumflexgrave",
+ [0x1EA8] = "Acircumflexhookabove",
+ [0x1EA9] = "acircumflexhookabove",
+ [0x1EAA] = "Acircumflextilde",
+ [0x1EAB] = "acircumflextilde",
+ [0x1EAC] = "Acircumflexdotbelow",
+ [0x1EAD] = "acircumflexdotbelow",
+ [0x1EAE] = "Abreveacute",
+ [0x1EAF] = "abreveacute",
+ [0x1EB0] = "Abrevegrave",
+ [0x1EB1] = "abrevegrave",
+ [0x1EB2] = "Abrevehookabove",
+ [0x1EB3] = "abrevehookabove",
+ [0x1EB4] = "Abrevetilde",
+ [0x1EB5] = "abrevetilde",
+ [0x1EB6] = "Abrevedotbelow",
+ [0x1EB7] = "abrevedotbelow",
+ [0x1EB8] = "Edotbelow",
+ [0x1EB9] = "edotbelow",
+ [0x1EBA] = "Ehookabove",
+ [0x1EBB] = "ehookabove",
+ [0x1EBC] = "Etilde",
+ [0x1EBD] = "etilde",
+ [0x1EBE] = "Ecircumflexacute",
+ [0x1EBF] = "ecircumflexacute",
+ [0x1EC0] = "Ecircumflexgrave",
+ [0x1EC1] = "ecircumflexgrave",
+ [0x1EC2] = "Ecircumflexhookabove",
+ [0x1EC3] = "ecircumflexhookabove",
+ [0x1EC4] = "Ecircumflextilde",
+ [0x1EC5] = "ecircumflextilde",
+ [0x1EC6] = "Ecircumflexdotbelow",
+ [0x1EC7] = "ecircumflexdotbelow",
+ [0x1EC8] = "Ihookabove",
+ [0x1EC9] = "ihookabove",
+ [0x1ECA] = "Idotbelow",
+ [0x1ECB] = "idotbelow",
+ [0x1ECC] = "Odotbelow",
+ [0x1ECD] = "odotbelow",
+ [0x1ECE] = "Ohookabove",
+ [0x1ECF] = "ohookabove",
+ [0x1ED0] = "Ocircumflexacute",
+ [0x1ED1] = "ocircumflexacute",
+ [0x1ED2] = "Ocircumflexgrave",
+ [0x1ED3] = "ocircumflexgrave",
+ [0x1ED4] = "Ocircumflexhookabove",
+ [0x1ED5] = "ocircumflexhookabove",
+ [0x1ED6] = "Ocircumflextilde",
+ [0x1ED7] = "ocircumflextilde",
+ [0x1ED8] = "Ocircumflexdotbelow",
+ [0x1ED9] = "ocircumflexdotbelow",
+ [0x1EDA] = "Ohornacute",
+ [0x1EDB] = "ohornacute",
+ [0x1EDC] = "Ohorngrave",
+ [0x1EDD] = "ohorngrave",
+ [0x1EDE] = "Ohornhookabove",
+ [0x1EDF] = "ohornhookabove",
+ [0x1EE0] = "Ohorntilde",
+ [0x1EE1] = "ohorntilde",
+ [0x1EE2] = "Ohorndotbelow",
+ [0x1EE3] = "ohorndotbelow",
+ [0x1EE4] = "Udotbelow",
+ [0x1EE5] = "udotbelow",
+ [0x1EE6] = "Uhookabove",
+ [0x1EE7] = "uhookabove",
+ [0x1EE8] = "Uhornacute",
+ [0x1EE9] = "uhornacute",
+ [0x1EEA] = "Uhorngrave",
+ [0x1EEB] = "uhorngrave",
+ [0x1EEC] = "Uhornhookabove",
+ [0x1EED] = "uhornhookabove",
+ [0x1EEE] = "Uhorntilde",
+ [0x1EEF] = "uhorntilde",
+ [0x1EF0] = "Uhorndotbelow",
+ [0x1EF1] = "uhorndotbelow",
+ [0x1EF2] = "Ygrave",
+ [0x1EF3] = "ygrave",
+ [0x1EF4] = "Ydotbelow",
+ [0x1EF5] = "ydotbelow",
+ [0x1EF6] = "Yhookabove",
+ [0x1EF7] = "yhookabove",
+ [0x1EF8] = "Ytilde",
+ [0x1EF9] = "ytilde",
+ [0x2002] = "enspace",
+ [0x200B] = "zerowidthspace",
+ [0x200C] = "zerowidthnonjoiner",
+ [0x200D] = "afii301",
+ [0x200E] = "afii299",
+ [0x200F] = "afii300",
+ [0x2010] = "hyphentwo",
+ [0x2012] = "figuredash",
+ [0x2013] = "endash",
+ [0x2014] = "emdash",
+ [0x2015] = "horizontalbar",
+ [0x2016] = "dblverticalbar",
+ [0x2017] = "underscoredbl",
+ [0x2018] = "quoteleft",
+ [0x2019] = "quoteright",
+ [0x201A] = "quotesinglbase",
+ [0x201B] = "quotereversed",
+ [0x201C] = "quotedblleft",
+ [0x201D] = "quotedblright",
+ [0x201E] = "quotedblbase",
+ [0x2020] = "dagger",
+ [0x2021] = "daggerdbl",
+ [0x2022] = "bullet",
+ [0x2024] = "onedotenleader",
+ [0x2025] = "twodotleader",
+ [0x2026] = "ellipsis",
+ [0x202C] = "afii61573",
+ [0x202D] = "afii61574",
+ [0x202E] = "afii61575",
+ [0x2030] = "perthousand",
+ [0x2032] = "minute",
+ [0x2033] = "second",
+ [0x2035] = "primereversed",
+ [0x2039] = "guilsinglleft",
+ [0x203A] = "guilsinglright",
+ [0x203B] = "referencemark",
+ [0x203C] = "exclamdbl",
+ [0x203E] = "overline",
+ [0x2042] = "asterism",
+ [0x2044] = "fraction",
+ [0x2070] = "zerosuperior",
+ [0x2074] = "foursuperior",
+ [0x2075] = "fivesuperior",
+ [0x2076] = "sixsuperior",
+ [0x2077] = "sevensuperior",
+ [0x2078] = "eightsuperior",
+ [0x2079] = "ninesuperior",
+ [0x207A] = "plussuperior",
+ [0x207C] = "equalsuperior",
+ [0x207D] = "parenleftsuperior",
+ [0x207E] = "parenrightsuperior",
+ [0x207F] = "nsuperior",
+ [0x2080] = "zeroinferior",
+ [0x2081] = "oneinferior",
+ [0x2082] = "twoinferior",
+ [0x2083] = "threeinferior",
+ [0x2084] = "fourinferior",
+ [0x2085] = "fiveinferior",
+ [0x2086] = "sixinferior",
+ [0x2087] = "seveninferior",
+ [0x2088] = "eightinferior",
+ [0x2089] = "nineinferior",
+ [0x208D] = "parenleftinferior",
+ [0x208E] = "parenrightinferior",
+ [0x20A1] = "colonsign",
+ [0x20A2] = "cruzeiro",
+ [0x20A3] = "franc",
+ [0x20A4] = "lira",
+ [0x20A7] = "peseta",
+ [0x20A9] = "won",
+ [0x20AA] = "sheqelhebrew",
+ [0x20AB] = "dong",
+ [0x20AC] = "euro",
+ [0x2103] = "centigrade",
+ [0x2105] = "careof",
+ [0x2109] = "fahrenheit",
+ [0x2111] = "Ifraktur",
+ [0x2113] = "lsquare",
+ [0x2116] = "numero",
+ [0x2118] = "weierstrass",
+ [0x211C] = "Rfraktur",
+ [0x211E] = "prescription",
+ [0x2121] = "telephone",
+ [0x2122] = "trademark",
+ [0x2126] = "Omega",
+ [0x212B] = "angstrom",
+ [0x212E] = "estimated",
+ [0x2135] = "aleph",
+ [0x2153] = "onethird",
+ [0x2154] = "twothirds",
+ [0x215B] = "oneeighth",
+ [0x215C] = "threeeighths",
+ [0x215D] = "fiveeighths",
+ [0x215E] = "seveneighths",
+ [0x2160] = "Oneroman",
+ [0x2161] = "Tworoman",
+ [0x2162] = "Threeroman",
+ [0x2163] = "Fourroman",
+ [0x2164] = "Fiveroman",
+ [0x2165] = "Sixroman",
+ [0x2166] = "Sevenroman",
+ [0x2167] = "Eightroman",
+ [0x2168] = "Nineroman",
+ [0x2169] = "Tenroman",
+ [0x216A] = "Elevenroman",
+ [0x216B] = "Twelveroman",
+ [0x2170] = "oneroman",
+ [0x2171] = "tworoman",
+ [0x2172] = "threeroman",
+ [0x2173] = "fourroman",
+ [0x2174] = "fiveroman",
+ [0x2175] = "sixroman",
+ [0x2176] = "sevenroman",
+ [0x2177] = "eightroman",
+ [0x2178] = "nineroman",
+ [0x2179] = "tenroman",
+ [0x217A] = "elevenroman",
+ [0x217B] = "twelveroman",
+ [0x2190] = "arrowleft",
+ [0x2191] = "arrowup",
+ [0x2192] = "arrowright",
+ [0x2193] = "arrowdown",
+ [0x2194] = "arrowboth",
+ [0x2195] = "arrowupdn",
+ [0x2196] = "arrowupleft",
+ [0x2197] = "arrowupright",
+ [0x2198] = "arrowdownright",
+ [0x2199] = "arrowdownleft",
+ [0x21A8] = "arrowupdownbase",
+ [0x21B5] = "carriagereturn",
+ [0x21BC] = "harpoonleftbarbup",
+ [0x21C0] = "harpoonrightbarbup",
+ [0x21C4] = "arrowrightoverleft",
+ [0x21C5] = "arrowupleftofdown",
+ [0x21C6] = "arrowleftoverright",
+ [0x21CD] = "arrowleftdblstroke",
+ [0x21CF] = "arrowrightdblstroke",
+ [0x21D0] = "arrowleftdbl",
+ [0x21D1] = "arrowdblup",
+ [0x21D2] = "dblarrowright",
+ [0x21D3] = "arrowdbldown",
+ [0x21D4] = "dblarrowleft",
+ [0x21DE] = "pageup",
+ [0x21DF] = "pagedown",
+ [0x21E0] = "arrowdashleft",
+ [0x21E1] = "arrowdashup",
+ [0x21E2] = "arrowdashright",
+ [0x21E3] = "arrowdashdown",
+ [0x21E4] = "arrowtableft",
+ [0x21E5] = "arrowtabright",
+ [0x21E6] = "arrowleftwhite",
+ [0x21E7] = "arrowupwhite",
+ [0x21E8] = "arrowrightwhite",
+ [0x21E9] = "arrowdownwhite",
+ [0x21EA] = "capslock",
+ [0x2200] = "universal",
+ [0x2202] = "partialdiff",
+ [0x2203] = "thereexists",
+ [0x2205] = "emptyset",
+ [0x2206] = "increment",
+ [0x2207] = "nabla",
+ [0x2208] = "element",
+ [0x2209] = "notelementof",
+ [0x220B] = "suchthat",
+ [0x220C] = "notcontains",
+ [0x220F] = "product",
+ [0x2211] = "summation",
+ [0x2212] = "minus",
+ [0x2213] = "minusplus",
+ [0x2215] = "divisionslash",
+ [0x2217] = "asteriskmath",
+ [0x2219] = "bulletoperator",
+ [0x221A] = "radical",
+ [0x221D] = "proportional",
+ [0x221E] = "infinity",
+ [0x221F] = "rightangle",
+ [0x2220] = "angle",
+ [0x2223] = "divides",
+ [0x2225] = "parallel",
+ [0x2226] = "notparallel",
+ [0x2227] = "logicaland",
+ [0x2228] = "logicalor",
+ [0x2229] = "intersection",
+ [0x222A] = "union",
+ [0x222B] = "integral",
+ [0x222C] = "dblintegral",
+ [0x222E] = "contourintegral",
+ [0x2234] = "therefore",
+ [0x2235] = "because",
+ [0x2236] = "ratio",
+ [0x2237] = "proportion",
+ [0x223C] = "tildeoperator",
+ [0x223D] = "reversedtilde",
+ [0x2243] = "asymptoticallyequal",
+ [0x2245] = "congruent",
+ [0x2248] = "approxequal",
+ [0x224C] = "allequal",
+ [0x2250] = "approaches",
+ [0x2251] = "geometricallyequal",
+ [0x2252] = "approxequalorimage",
+ [0x2253] = "imageorapproximatelyequal",
+ [0x2260] = "notequal",
+ [0x2261] = "equivalence",
+ [0x2262] = "notidentical",
+ [0x2264] = "lessequal",
+ [0x2265] = "greaterequal",
+ [0x2266] = "lessoverequal",
+ [0x2267] = "greateroverequal",
+ [0x226A] = "muchless",
+ [0x226B] = "muchgreater",
+ [0x226E] = "notless",
+ [0x226F] = "notgreater",
+ [0x2270] = "notlessnorequal",
+ [0x2271] = "notgreaternorequal",
+ [0x2272] = "lessorequivalent",
+ [0x2273] = "greaterorequivalent",
+ [0x2276] = "lessorgreater",
+ [0x2277] = "greaterorless",
+ [0x2279] = "notgreaternorless",
+ [0x227A] = "precedes",
+ [0x227B] = "succeeds",
+ [0x2280] = "notprecedes",
+ [0x2281] = "notsucceeds",
+ [0x2282] = "subset",
+ [0x2283] = "superset",
+ [0x2284] = "notsubset",
+ [0x2285] = "notsuperset",
+ [0x2286] = "subsetorequal",
+ [0x2287] = "supersetorequal",
+ [0x228A] = "subsetnotequal",
+ [0x228B] = "supersetnotequal",
+ [0x2295] = "pluscircle",
+ [0x2296] = "minuscircle",
+ [0x2297] = "timescircle",
+ [0x2299] = "circleot",
+ [0x22A3] = "tackleft",
+ [0x22A4] = "tackdown",
+ [0x22A5] = "perpendicular",
+ [0x22BF] = "righttriangle",
+ [0x22C5] = "dotmath",
+ [0x22CE] = "curlyor",
+ [0x22CF] = "curlyand",
+ [0x22DA] = "lessequalorgreater",
+ [0x22DB] = "greaterequalorless",
+ [0x22EE] = "ellipsisvertical",
+ [0x2302] = "house",
+ [0x2303] = "control",
+ [0x2305] = "projective",
+ [0x2310] = "revlogicalnot",
+ [0x2312] = "arc",
+ [0x2318] = "propellor",
+ [0x2320] = "integraltp",
+ [0x2321] = "integralbt",
+ [0x2325] = "option",
+ [0x2326] = "deleteright",
+ [0x2327] = "clear",
+ [0x2329] = "angleleft",
+ [0x232A] = "angleright",
+ [0x232B] = "deleteleft",
+ [0x2423] = "blank",
+ [0x2460] = "onecircle",
+ [0x2461] = "twocircle",
+ [0x2462] = "threecircle",
+ [0x2463] = "fourcircle",
+ [0x2464] = "fivecircle",
+ [0x2465] = "sixcircle",
+ [0x2466] = "sevencircle",
+ [0x2467] = "eightcircle",
+ [0x2468] = "ninecircle",
+ [0x2469] = "tencircle",
+ [0x246A] = "elevencircle",
+ [0x246B] = "twelvecircle",
+ [0x246C] = "thirteencircle",
+ [0x246D] = "fourteencircle",
+ [0x246E] = "fifteencircle",
+ [0x246F] = "sixteencircle",
+ [0x2470] = "seventeencircle",
+ [0x2471] = "eighteencircle",
+ [0x2472] = "nineteencircle",
+ [0x2473] = "twentycircle",
+ [0x2474] = "oneparen",
+ [0x2475] = "twoparen",
+ [0x2476] = "threeparen",
+ [0x2477] = "fourparen",
+ [0x2478] = "fiveparen",
+ [0x2479] = "sixparen",
+ [0x247A] = "sevenparen",
+ [0x247B] = "eightparen",
+ [0x247C] = "nineparen",
+ [0x247D] = "tenparen",
+ [0x247E] = "elevenparen",
+ [0x247F] = "twelveparen",
+ [0x2480] = "thirteenparen",
+ [0x2481] = "fourteenparen",
+ [0x2482] = "fifteenparen",
+ [0x2483] = "sixteenparen",
+ [0x2484] = "seventeenparen",
+ [0x2485] = "eighteenparen",
+ [0x2486] = "nineteenparen",
+ [0x2487] = "twentyparen",
+ [0x2488] = "oneperiod",
+ [0x2489] = "twoperiod",
+ [0x248A] = "threeperiod",
+ [0x248B] = "fourperiod",
+ [0x248C] = "fiveperiod",
+ [0x248D] = "sixperiod",
+ [0x248E] = "sevenperiod",
+ [0x248F] = "eightperiod",
+ [0x2490] = "nineperiod",
+ [0x2491] = "tenperiod",
+ [0x2492] = "elevenperiod",
+ [0x2493] = "twelveperiod",
+ [0x2494] = "thirteenperiod",
+ [0x2495] = "fourteenperiod",
+ [0x2496] = "fifteenperiod",
+ [0x2497] = "sixteenperiod",
+ [0x2498] = "seventeenperiod",
+ [0x2499] = "eighteenperiod",
+ [0x249A] = "nineteenperiod",
+ [0x249B] = "twentyperiod",
+ [0x249C] = "aparen",
+ [0x249D] = "bparen",
+ [0x249E] = "cparen",
+ [0x249F] = "dparen",
+ [0x24A0] = "eparen",
+ [0x24A1] = "fparen",
+ [0x24A2] = "gparen",
+ [0x24A3] = "hparen",
+ [0x24A4] = "iparen",
+ [0x24A5] = "jparen",
+ [0x24A6] = "kparen",
+ [0x24A7] = "lparen",
+ [0x24A8] = "mparen",
+ [0x24A9] = "nparen",
+ [0x24AA] = "oparen",
+ [0x24AB] = "pparen",
+ [0x24AC] = "qparen",
+ [0x24AD] = "rparen",
+ [0x24AE] = "sparen",
+ [0x24AF] = "tparen",
+ [0x24B0] = "uparen",
+ [0x24B1] = "vparen",
+ [0x24B2] = "wparen",
+ [0x24B3] = "xparen",
+ [0x24B4] = "yparen",
+ [0x24B5] = "zparen",
+ [0x24B6] = "Acircle",
+ [0x24B7] = "Bcircle",
+ [0x24B8] = "Ccircle",
+ [0x24B9] = "Dcircle",
+ [0x24BA] = "Ecircle",
+ [0x24BB] = "Fcircle",
+ [0x24BC] = "Gcircle",
+ [0x24BD] = "Hcircle",
+ [0x24BE] = "Icircle",
+ [0x24BF] = "Jcircle",
+ [0x24C0] = "Kcircle",
+ [0x24C1] = "Lcircle",
+ [0x24C2] = "Mcircle",
+ [0x24C3] = "Ncircle",
+ [0x24C4] = "Ocircle",
+ [0x24C5] = "Pcircle",
+ [0x24C6] = "Qcircle",
+ [0x24C7] = "Rcircle",
+ [0x24C8] = "Scircle",
+ [0x24C9] = "Tcircle",
+ [0x24CA] = "Ucircle",
+ [0x24CB] = "Vcircle",
+ [0x24CC] = "Wcircle",
+ [0x24CD] = "Xcircle",
+ [0x24CE] = "Ycircle",
+ [0x24CF] = "Zcircle",
+ [0x24D0] = "acircle",
+ [0x24D1] = "bcircle",
+ [0x24D2] = "ccircle",
+ [0x24D3] = "dcircle",
+ [0x24D4] = "ecircle",
+ [0x24D5] = "fcircle",
+ [0x24D6] = "gcircle",
+ [0x24D7] = "hcircle",
+ [0x24D8] = "icircle",
+ [0x24D9] = "jcircle",
+ [0x24DA] = "kcircle",
+ [0x24DB] = "lcircle",
+ [0x24DC] = "mcircle",
+ [0x24DD] = "ncircle",
+ [0x24DE] = "ocircle",
+ [0x24DF] = "pcircle",
+ [0x24E0] = "qcircle",
+ [0x24E1] = "rcircle",
+ [0x24E2] = "scircle",
+ [0x24E3] = "tcircle",
+ [0x24E4] = "ucircle",
+ [0x24E5] = "vcircle",
+ [0x24E6] = "wcircle",
+ [0x24E7] = "xcircle",
+ [0x24E8] = "ycircle",
+ [0x24E9] = "zcircle",
+ [0x2500] = "SF100000",
+ [0x2502] = "SF110000",
+ [0x250C] = "SF010000",
+ [0x2510] = "SF030000",
+ [0x2514] = "SF020000",
+ [0x2518] = "SF040000",
+ [0x251C] = "SF080000",
+ [0x2524] = "SF090000",
+ [0x252C] = "SF060000",
+ [0x2534] = "SF070000",
+ [0x253C] = "SF050000",
+ [0x2550] = "SF430000",
+ [0x2551] = "SF240000",
+ [0x2552] = "SF510000",
+ [0x2553] = "SF520000",
+ [0x2554] = "SF390000",
+ [0x2555] = "SF220000",
+ [0x2556] = "SF210000",
+ [0x2557] = "SF250000",
+ [0x2558] = "SF500000",
+ [0x2559] = "SF490000",
+ [0x255A] = "SF380000",
+ [0x255B] = "SF280000",
+ [0x255C] = "SF270000",
+ [0x255D] = "SF260000",
+ [0x255E] = "SF360000",
+ [0x255F] = "SF370000",
+ [0x2560] = "SF420000",
+ [0x2561] = "SF190000",
+ [0x2562] = "SF200000",
+ [0x2563] = "SF230000",
+ [0x2564] = "SF470000",
+ [0x2565] = "SF480000",
+ [0x2566] = "SF410000",
+ [0x2567] = "SF450000",
+ [0x2568] = "SF460000",
+ [0x2569] = "SF400000",
+ [0x256A] = "SF540000",
+ [0x256B] = "SF530000",
+ [0x256C] = "SF440000",
+ [0x2580] = "upblock",
+ [0x2584] = "dnblock",
+ [0x2588] = "block",
+ [0x258C] = "lfblock",
+ [0x2590] = "rtblock",
+ [0x2591] = "shadelight",
+ [0x2592] = "shademedium",
+ [0x2593] = "shadedark",
+ [0x25A0] = "filledbox",
+ [0x25A1] = "whitesquare",
+ [0x25A3] = "squarewhitewithsmallblack",
+ [0x25A4] = "squarehorizontalfill",
+ [0x25A5] = "squareverticalfill",
+ [0x25A6] = "squareorthogonalcrosshatchfill",
+ [0x25A7] = "squareupperlefttolowerrightfill",
+ [0x25A8] = "squareupperrighttolowerleftfill",
+ [0x25A9] = "squarediagonalcrosshatchfill",
+ [0x25AA] = "blacksmallsquare",
+ [0x25AB] = "whitesmallsquare",
+ [0x25AC] = "filledrect",
+ [0x25B2] = "triagup",
+ [0x25B3] = "whiteuppointingtriangle",
+ [0x25B4] = "blackuppointingsmalltriangle",
+ [0x25B5] = "whiteuppointingsmalltriangle",
+ [0x25B6] = "blackrightpointingtriangle",
+ [0x25B7] = "whiterightpointingtriangle",
+ [0x25B9] = "whiterightpointingsmalltriangle",
+ [0x25BA] = "triagrt",
+ [0x25BC] = "triagdn",
+ [0x25BD] = "whitedownpointingtriangle",
+ [0x25BF] = "whitedownpointingsmalltriangle",
+ [0x25C0] = "blackleftpointingtriangle",
+ [0x25C1] = "whiteleftpointingtriangle",
+ [0x25C3] = "whiteleftpointingsmalltriangle",
+ [0x25C4] = "triaglf",
+ [0x25C6] = "blackdiamond",
+ [0x25C7] = "whitediamond",
+ [0x25C8] = "whitediamondcontainingblacksmalldiamond",
+ [0x25C9] = "fisheye",
+ [0x25CA] = "lozenge",
+ [0x25CB] = "whitecircle",
+ [0x25CC] = "dottedcircle",
+ [0x25CE] = "bullseye",
+ [0x25CF] = "blackcircle",
+ [0x25D0] = "circlewithlefthalfblack",
+ [0x25D1] = "circlewithrighthalfblack",
+ [0x25D8] = "invbullet",
+ [0x25D9] = "whitecircleinverse",
+ [0x25E2] = "blacklowerrighttriangle",
+ [0x25E3] = "blacklowerlefttriangle",
+ [0x25E4] = "blackupperlefttriangle",
+ [0x25E5] = "blackupperrighttriangle",
+ [0x25E6] = "whitebullet",
+ [0x25EF] = "largecircle",
+ [0x2605] = "blackstar",
+ [0x2606] = "whitestar",
+ [0x260E] = "telephoneblack",
+ [0x260F] = "whitetelephone",
+ [0x261C] = "pointingindexleftwhite",
+ [0x261D] = "pointingindexupwhite",
+ [0x261E] = "pointingindexrightwhite",
+ [0x261F] = "pointingindexdownwhite",
+ [0x262F] = "yinyang",
+ [0x263A] = "whitesmilingface",
+ [0x263B] = "invsmileface",
+ [0x263C] = "sun",
+ [0x2640] = "venus",
+ [0x2641] = "earth",
+ [0x2642] = "mars",
+ [0x2660] = "spadesuitblack",
+ [0x2661] = "heartsuitwhite",
+ [0x2662] = "diamondsuitwhite",
+ [0x2663] = "clubsuitblack",
+ [0x2664] = "spadesuitwhite",
+ [0x2665] = "heartsuitblack",
+ [0x2666] = "diamond",
+ [0x2667] = "clubsuitwhite",
+ [0x2668] = "hotsprings",
+ [0x2669] = "quarternote",
+ [0x266A] = "musicalnote",
+ [0x266B] = "musicalnotedbl",
+ [0x266C] = "beamedsixteenthnotes",
+ [0x266D] = "musicflatsign",
+ [0x266F] = "musicsharpsign",
+ [0x2713] = "checkmark",
+ [0x278A] = "onecircleinversesansserif",
+ [0x278B] = "twocircleinversesansserif",
+ [0x278C] = "threecircleinversesansserif",
+ [0x278D] = "fourcircleinversesansserif",
+ [0x278E] = "fivecircleinversesansserif",
+ [0x278F] = "sixcircleinversesansserif",
+ [0x2790] = "sevencircleinversesansserif",
+ [0x2791] = "eightcircleinversesansserif",
+ [0x2792] = "ninecircleinversesansserif",
+ [0x279E] = "arrowrightheavy",
+ [0x3000] = "ideographicspace",
+ [0x3001] = "ideographiccomma",
+ [0x3002] = "ideographicperiod",
+ [0x3003] = "dittomark",
+ [0x3004] = "jis",
+ [0x3005] = "ideographiciterationmark",
+ [0x3006] = "ideographicclose",
+ [0x3007] = "ideographiczero",
+ [0x3008] = "anglebracketleft",
+ [0x3009] = "anglebracketright",
+ [0x300A] = "dblanglebracketleft",
+ [0x300B] = "dblanglebracketright",
+ [0x300C] = "cornerbracketleft",
+ [0x300D] = "cornerbracketright",
+ [0x300E] = "whitecornerbracketleft",
+ [0x300F] = "whitecornerbracketright",
+ [0x3010] = "blacklenticularbracketleft",
+ [0x3011] = "blacklenticularbracketright",
+ [0x3012] = "postalmark",
+ [0x3013] = "getamark",
+ [0x3014] = "tortoiseshellbracketleft",
+ [0x3015] = "tortoiseshellbracketright",
+ [0x3016] = "whitelenticularbracketleft",
+ [0x3017] = "whitelenticularbracketright",
+ [0x3018] = "whitetortoiseshellbracketleft",
+ [0x3019] = "whitetortoiseshellbracketright",
+ [0x301C] = "wavedash",
+ [0x301D] = "quotedblprimereversed",
+ [0x301E] = "quotedblprime",
+ [0x3020] = "postalmarkface",
+ [0x3021] = "onehangzhou",
+ [0x3022] = "twohangzhou",
+ [0x3023] = "threehangzhou",
+ [0x3024] = "fourhangzhou",
+ [0x3025] = "fivehangzhou",
+ [0x3026] = "sixhangzhou",
+ [0x3027] = "sevenhangzhou",
+ [0x3028] = "eighthangzhou",
+ [0x3029] = "ninehangzhou",
+ [0x3036] = "circlepostalmark",
+ [0x3041] = "asmallhiragana",
+ [0x3042] = "ahiragana",
+ [0x3043] = "ismallhiragana",
+ [0x3044] = "ihiragana",
+ [0x3045] = "usmallhiragana",
+ [0x3046] = "uhiragana",
+ [0x3047] = "esmallhiragana",
+ [0x3048] = "ehiragana",
+ [0x3049] = "osmallhiragana",
+ [0x304A] = "ohiragana",
+ [0x304B] = "kahiragana",
+ [0x304C] = "gahiragana",
+ [0x304D] = "kihiragana",
+ [0x304E] = "gihiragana",
+ [0x304F] = "kuhiragana",
+ [0x3050] = "guhiragana",
+ [0x3051] = "kehiragana",
+ [0x3052] = "gehiragana",
+ [0x3053] = "kohiragana",
+ [0x3054] = "gohiragana",
+ [0x3055] = "sahiragana",
+ [0x3056] = "zahiragana",
+ [0x3057] = "sihiragana",
+ [0x3058] = "zihiragana",
+ [0x3059] = "suhiragana",
+ [0x305A] = "zuhiragana",
+ [0x305B] = "sehiragana",
+ [0x305C] = "zehiragana",
+ [0x305D] = "sohiragana",
+ [0x305E] = "zohiragana",
+ [0x305F] = "tahiragana",
+ [0x3060] = "dahiragana",
+ [0x3061] = "tihiragana",
+ [0x3062] = "dihiragana",
+ [0x3063] = "tusmallhiragana",
+ [0x3064] = "tuhiragana",
+ [0x3065] = "duhiragana",
+ [0x3066] = "tehiragana",
+ [0x3067] = "dehiragana",
+ [0x3068] = "tohiragana",
+ [0x3069] = "dohiragana",
+ [0x306A] = "nahiragana",
+ [0x306B] = "nihiragana",
+ [0x306C] = "nuhiragana",
+ [0x306D] = "nehiragana",
+ [0x306E] = "nohiragana",
+ [0x306F] = "hahiragana",
+ [0x3070] = "bahiragana",
+ [0x3071] = "pahiragana",
+ [0x3072] = "hihiragana",
+ [0x3073] = "bihiragana",
+ [0x3074] = "pihiragana",
+ [0x3075] = "huhiragana",
+ [0x3076] = "buhiragana",
+ [0x3077] = "puhiragana",
+ [0x3078] = "hehiragana",
+ [0x3079] = "behiragana",
+ [0x307A] = "pehiragana",
+ [0x307B] = "hohiragana",
+ [0x307C] = "bohiragana",
+ [0x307D] = "pohiragana",
+ [0x307E] = "mahiragana",
+ [0x307F] = "mihiragana",
+ [0x3080] = "muhiragana",
+ [0x3081] = "mehiragana",
+ [0x3082] = "mohiragana",
+ [0x3083] = "yasmallhiragana",
+ [0x3084] = "yahiragana",
+ [0x3085] = "yusmallhiragana",
+ [0x3086] = "yuhiragana",
+ [0x3087] = "yosmallhiragana",
+ [0x3088] = "yohiragana",
+ [0x3089] = "rahiragana",
+ [0x308A] = "rihiragana",
+ [0x308B] = "ruhiragana",
+ [0x308C] = "rehiragana",
+ [0x308D] = "rohiragana",
+ [0x308E] = "wasmallhiragana",
+ [0x308F] = "wahiragana",
+ [0x3090] = "wihiragana",
+ [0x3091] = "wehiragana",
+ [0x3092] = "wohiragana",
+ [0x3093] = "nhiragana",
+ [0x3094] = "vuhiragana",
+ [0x309B] = "voicedmarkkana",
+ [0x309C] = "semivoicedmarkkana",
+ [0x309D] = "iterationhiragana",
+ [0x309E] = "voicediterationhiragana",
+ [0x30A1] = "asmallkatakana",
+ [0x30A2] = "akatakana",
+ [0x30A3] = "ismallkatakana",
+ [0x30A4] = "ikatakana",
+ [0x30A5] = "usmallkatakana",
+ [0x30A6] = "ukatakana",
+ [0x30A7] = "esmallkatakana",
+ [0x30A8] = "ekatakana",
+ [0x30A9] = "osmallkatakana",
+ [0x30AA] = "okatakana",
+ [0x30AB] = "kakatakana",
+ [0x30AC] = "gakatakana",
+ [0x30AD] = "kikatakana",
+ [0x30AE] = "gikatakana",
+ [0x30AF] = "kukatakana",
+ [0x30B0] = "gukatakana",
+ [0x30B1] = "kekatakana",
+ [0x30B2] = "gekatakana",
+ [0x30B3] = "kokatakana",
+ [0x30B4] = "gokatakana",
+ [0x30B5] = "sakatakana",
+ [0x30B6] = "zakatakana",
+ [0x30B7] = "sikatakana",
+ [0x30B8] = "zikatakana",
+ [0x30B9] = "sukatakana",
+ [0x30BA] = "zukatakana",
+ [0x30BB] = "sekatakana",
+ [0x30BC] = "zekatakana",
+ [0x30BD] = "sokatakana",
+ [0x30BE] = "zokatakana",
+ [0x30BF] = "takatakana",
+ [0x30C0] = "dakatakana",
+ [0x30C1] = "tikatakana",
+ [0x30C2] = "dikatakana",
+ [0x30C3] = "tusmallkatakana",
+ [0x30C4] = "tukatakana",
+ [0x30C5] = "dukatakana",
+ [0x30C6] = "tekatakana",
+ [0x30C7] = "dekatakana",
+ [0x30C8] = "tokatakana",
+ [0x30C9] = "dokatakana",
+ [0x30CA] = "nakatakana",
+ [0x30CB] = "nikatakana",
+ [0x30CC] = "nukatakana",
+ [0x30CD] = "nekatakana",
+ [0x30CE] = "nokatakana",
+ [0x30CF] = "hakatakana",
+ [0x30D0] = "bakatakana",
+ [0x30D1] = "pakatakana",
+ [0x30D2] = "hikatakana",
+ [0x30D3] = "bikatakana",
+ [0x30D4] = "pikatakana",
+ [0x30D5] = "hukatakana",
+ [0x30D6] = "bukatakana",
+ [0x30D7] = "pukatakana",
+ [0x30D8] = "hekatakana",
+ [0x30D9] = "bekatakana",
+ [0x30DA] = "pekatakana",
+ [0x30DB] = "hokatakana",
+ [0x30DC] = "bokatakana",
+ [0x30DD] = "pokatakana",
+ [0x30DE] = "makatakana",
+ [0x30DF] = "mikatakana",
+ [0x30E0] = "mukatakana",
+ [0x30E1] = "mekatakana",
+ [0x30E2] = "mokatakana",
+ [0x30E3] = "yasmallkatakana",
+ [0x30E4] = "yakatakana",
+ [0x30E5] = "yusmallkatakana",
+ [0x30E6] = "yukatakana",
+ [0x30E7] = "yosmallkatakana",
+ [0x30E8] = "yokatakana",
+ [0x30E9] = "rakatakana",
+ [0x30EA] = "rikatakana",
+ [0x30EB] = "rukatakana",
+ [0x30EC] = "rekatakana",
+ [0x30ED] = "rokatakana",
+ [0x30EE] = "wasmallkatakana",
+ [0x30EF] = "wakatakana",
+ [0x30F0] = "wikatakana",
+ [0x30F1] = "wekatakana",
+ [0x30F2] = "wokatakana",
+ [0x30F3] = "nkatakana",
+ [0x30F4] = "vukatakana",
+ [0x30F5] = "kasmallkatakana",
+ [0x30F6] = "kesmallkatakana",
+ [0x30F7] = "vakatakana",
+ [0x30F8] = "vikatakana",
+ [0x30F9] = "vekatakana",
+ [0x30FA] = "vokatakana",
+ [0x30FB] = "dotkatakana",
+ [0x30FC] = "prolongedkana",
+ [0x30FD] = "iterationkatakana",
+ [0x30FE] = "voicediterationkatakana",
+ [0x3105] = "bbopomofo",
+ [0x3106] = "pbopomofo",
+ [0x3107] = "mbopomofo",
+ [0x3108] = "fbopomofo",
+ [0x3109] = "dbopomofo",
+ [0x310A] = "tbopomofo",
+ [0x310B] = "nbopomofo",
+ [0x310C] = "lbopomofo",
+ [0x310D] = "gbopomofo",
+ [0x310E] = "kbopomofo",
+ [0x310F] = "hbopomofo",
+ [0x3110] = "jbopomofo",
+ [0x3111] = "qbopomofo",
+ [0x3112] = "xbopomofo",
+ [0x3113] = "zhbopomofo",
+ [0x3114] = "chbopomofo",
+ [0x3115] = "shbopomofo",
+ [0x3116] = "rbopomofo",
+ [0x3117] = "zbopomofo",
+ [0x3118] = "cbopomofo",
+ [0x3119] = "sbopomofo",
+ [0x311A] = "abopomofo",
+ [0x311B] = "obopomofo",
+ [0x311C] = "ebopomofo",
+ [0x311D] = "ehbopomofo",
+ [0x311E] = "aibopomofo",
+ [0x311F] = "eibopomofo",
+ [0x3120] = "aubopomofo",
+ [0x3121] = "oubopomofo",
+ [0x3122] = "anbopomofo",
+ [0x3123] = "enbopomofo",
+ [0x3124] = "angbopomofo",
+ [0x3125] = "engbopomofo",
+ [0x3126] = "erbopomofo",
+ [0x3127] = "ibopomofo",
+ [0x3128] = "ubopomofo",
+ [0x3129] = "iubopomofo",
+ [0x3131] = "kiyeokkorean",
+ [0x3132] = "ssangkiyeokkorean",
+ [0x3133] = "kiyeoksioskorean",
+ [0x3134] = "nieunkorean",
+ [0x3135] = "nieuncieuckorean",
+ [0x3136] = "nieunhieuhkorean",
+ [0x3137] = "tikeutkorean",
+ [0x3138] = "ssangtikeutkorean",
+ [0x3139] = "rieulkorean",
+ [0x313A] = "rieulkiyeokkorean",
+ [0x313B] = "rieulmieumkorean",
+ [0x313C] = "rieulpieupkorean",
+ [0x313D] = "rieulsioskorean",
+ [0x313E] = "rieulthieuthkorean",
+ [0x313F] = "rieulphieuphkorean",
+ [0x3140] = "rieulhieuhkorean",
+ [0x3141] = "mieumkorean",
+ [0x3142] = "pieupkorean",
+ [0x3143] = "ssangpieupkorean",
+ [0x3144] = "pieupsioskorean",
+ [0x3145] = "sioskorean",
+ [0x3146] = "ssangsioskorean",
+ [0x3147] = "ieungkorean",
+ [0x3148] = "cieuckorean",
+ [0x3149] = "ssangcieuckorean",
+ [0x314A] = "chieuchkorean",
+ [0x314B] = "khieukhkorean",
+ [0x314C] = "thieuthkorean",
+ [0x314D] = "phieuphkorean",
+ [0x314E] = "hieuhkorean",
+ [0x314F] = "akorean",
+ [0x3150] = "aekorean",
+ [0x3151] = "yakorean",
+ [0x3152] = "yaekorean",
+ [0x3153] = "eokorean",
+ [0x3154] = "ekorean",
+ [0x3155] = "yeokorean",
+ [0x3156] = "yekorean",
+ [0x3157] = "okorean",
+ [0x3158] = "wakorean",
+ [0x3159] = "waekorean",
+ [0x315A] = "oekorean",
+ [0x315B] = "yokorean",
+ [0x315C] = "ukorean",
+ [0x315D] = "weokorean",
+ [0x315E] = "wekorean",
+ [0x315F] = "wikorean",
+ [0x3160] = "yukorean",
+ [0x3161] = "eukorean",
+ [0x3162] = "yikorean",
+ [0x3163] = "ikorean",
+ [0x3164] = "hangulfiller",
+ [0x3165] = "ssangnieunkorean",
+ [0x3166] = "nieuntikeutkorean",
+ [0x3167] = "nieunsioskorean",
+ [0x3168] = "nieunpansioskorean",
+ [0x3169] = "rieulkiyeoksioskorean",
+ [0x316A] = "rieultikeutkorean",
+ [0x316B] = "rieulpieupsioskorean",
+ [0x316C] = "rieulpansioskorean",
+ [0x316D] = "rieulyeorinhieuhkorean",
+ [0x316E] = "mieumpieupkorean",
+ [0x316F] = "mieumsioskorean",
+ [0x3170] = "mieumpansioskorean",
+ [0x3171] = "kapyeounmieumkorean",
+ [0x3172] = "pieupkiyeokkorean",
+ [0x3173] = "pieuptikeutkorean",
+ [0x3174] = "pieupsioskiyeokkorean",
+ [0x3175] = "pieupsiostikeutkorean",
+ [0x3176] = "pieupcieuckorean",
+ [0x3177] = "pieupthieuthkorean",
+ [0x3178] = "kapyeounpieupkorean",
+ [0x3179] = "kapyeounssangpieupkorean",
+ [0x317A] = "sioskiyeokkorean",
+ [0x317B] = "siosnieunkorean",
+ [0x317C] = "siostikeutkorean",
+ [0x317D] = "siospieupkorean",
+ [0x317E] = "sioscieuckorean",
+ [0x317F] = "pansioskorean",
+ [0x3180] = "ssangieungkorean",
+ [0x3181] = "yesieungkorean",
+ [0x3182] = "yesieungsioskorean",
+ [0x3183] = "yesieungpansioskorean",
+ [0x3184] = "kapyeounphieuphkorean",
+ [0x3185] = "ssanghieuhkorean",
+ [0x3186] = "yeorinhieuhkorean",
+ [0x3187] = "yoyakorean",
+ [0x3188] = "yoyaekorean",
+ [0x3189] = "yoikorean",
+ [0x318A] = "yuyeokorean",
+ [0x318B] = "yuyekorean",
+ [0x318C] = "yuikorean",
+ [0x318D] = "araeakorean",
+ [0x318E] = "araeaekorean",
+ [0x3200] = "kiyeokparenkorean",
+ [0x3201] = "nieunparenkorean",
+ [0x3202] = "tikeutparenkorean",
+ [0x3203] = "rieulparenkorean",
+ [0x3204] = "mieumparenkorean",
+ [0x3205] = "pieupparenkorean",
+ [0x3206] = "siosparenkorean",
+ [0x3207] = "ieungparenkorean",
+ [0x3208] = "cieucparenkorean",
+ [0x3209] = "chieuchparenkorean",
+ [0x320A] = "khieukhparenkorean",
+ [0x320B] = "thieuthparenkorean",
+ [0x320C] = "phieuphparenkorean",
+ [0x320D] = "hieuhparenkorean",
+ [0x320E] = "kiyeokaparenkorean",
+ [0x320F] = "nieunaparenkorean",
+ [0x3210] = "tikeutaparenkorean",
+ [0x3211] = "rieulaparenkorean",
+ [0x3212] = "mieumaparenkorean",
+ [0x3213] = "pieupaparenkorean",
+ [0x3214] = "siosaparenkorean",
+ [0x3215] = "ieungaparenkorean",
+ [0x3216] = "cieucaparenkorean",
+ [0x3217] = "chieuchaparenkorean",
+ [0x3218] = "khieukhaparenkorean",
+ [0x3219] = "thieuthaparenkorean",
+ [0x321A] = "phieuphaparenkorean",
+ [0x321B] = "hieuhaparenkorean",
+ [0x321C] = "cieucuparenkorean",
+ [0x3220] = "oneideographicparen",
+ [0x3221] = "twoideographicparen",
+ [0x3222] = "threeideographicparen",
+ [0x3223] = "fourideographicparen",
+ [0x3224] = "fiveideographicparen",
+ [0x3225] = "sixideographicparen",
+ [0x3226] = "sevenideographicparen",
+ [0x3227] = "eightideographicparen",
+ [0x3228] = "nineideographicparen",
+ [0x3229] = "tenideographicparen",
+ [0x322A] = "ideographicmoonparen",
+ [0x322B] = "ideographicfireparen",
+ [0x322C] = "ideographicwaterparen",
+ [0x322D] = "ideographicwoodparen",
+ [0x322E] = "ideographicmetalparen",
+ [0x322F] = "ideographicearthparen",
+ [0x3230] = "ideographicsunparen",
+ [0x3231] = "ideographicstockparen",
+ [0x3232] = "ideographichaveparen",
+ [0x3233] = "ideographicsocietyparen",
+ [0x3234] = "ideographicnameparen",
+ [0x3235] = "ideographicspecialparen",
+ [0x3236] = "ideographicfinancialparen",
+ [0x3237] = "ideographiccongratulationparen",
+ [0x3238] = "ideographiclaborparen",
+ [0x3239] = "ideographicrepresentparen",
+ [0x323A] = "ideographiccallparen",
+ [0x323B] = "ideographicstudyparen",
+ [0x323C] = "ideographicsuperviseparen",
+ [0x323D] = "ideographicenterpriseparen",
+ [0x323E] = "ideographicresourceparen",
+ [0x323F] = "ideographicallianceparen",
+ [0x3240] = "ideographicfestivalparen",
+ [0x3242] = "ideographicselfparen",
+ [0x3243] = "ideographicreachparen",
+ [0x3260] = "kiyeokcirclekorean",
+ [0x3261] = "nieuncirclekorean",
+ [0x3262] = "tikeutcirclekorean",
+ [0x3263] = "rieulcirclekorean",
+ [0x3264] = "mieumcirclekorean",
+ [0x3265] = "pieupcirclekorean",
+ [0x3266] = "sioscirclekorean",
+ [0x3267] = "ieungcirclekorean",
+ [0x3268] = "cieuccirclekorean",
+ [0x3269] = "chieuchcirclekorean",
+ [0x326A] = "khieukhcirclekorean",
+ [0x326B] = "thieuthcirclekorean",
+ [0x326C] = "phieuphcirclekorean",
+ [0x326D] = "hieuhcirclekorean",
+ [0x326E] = "kiyeokacirclekorean",
+ [0x326F] = "nieunacirclekorean",
+ [0x3270] = "tikeutacirclekorean",
+ [0x3271] = "rieulacirclekorean",
+ [0x3272] = "mieumacirclekorean",
+ [0x3273] = "pieupacirclekorean",
+ [0x3274] = "siosacirclekorean",
+ [0x3275] = "ieungacirclekorean",
+ [0x3276] = "cieucacirclekorean",
+ [0x3277] = "chieuchacirclekorean",
+ [0x3278] = "khieukhacirclekorean",
+ [0x3279] = "thieuthacirclekorean",
+ [0x327A] = "phieuphacirclekorean",
+ [0x327B] = "hieuhacirclekorean",
+ [0x327F] = "koreanstandardsymbol",
+ [0x328A] = "ideographmooncircle",
+ [0x328B] = "ideographfirecircle",
+ [0x328C] = "ideographwatercircle",
+ [0x328D] = "ideographwoodcircle",
+ [0x328E] = "ideographmetalcircle",
+ [0x328F] = "ideographearthcircle",
+ [0x3290] = "ideographsuncircle",
+ [0x3294] = "ideographnamecircle",
+ [0x3296] = "ideographicfinancialcircle",
+ [0x3298] = "ideographiclaborcircle",
+ [0x3299] = "ideographicsecretcircle",
+ [0x329D] = "ideographicexcellentcircle",
+ [0x329E] = "ideographicprintcircle",
+ [0x32A3] = "ideographiccorrectcircle",
+ [0x32A4] = "ideographichighcircle",
+ [0x32A5] = "ideographiccentrecircle",
+ [0x32A6] = "ideographiclowcircle",
+ [0x32A7] = "ideographicleftcircle",
+ [0x32A8] = "ideographicrightcircle",
+ [0x32A9] = "ideographicmedicinecircle",
+ [0x3300] = "apaatosquare",
+ [0x3303] = "aarusquare",
+ [0x3305] = "intisquare",
+ [0x330D] = "karoriisquare",
+ [0x3314] = "kirosquare",
+ [0x3315] = "kiroguramusquare",
+ [0x3316] = "kiromeetorusquare",
+ [0x3318] = "guramusquare",
+ [0x331E] = "kooposquare",
+ [0x3322] = "sentisquare",
+ [0x3323] = "sentosquare",
+ [0x3326] = "dorusquare",
+ [0x3327] = "tonsquare",
+ [0x332A] = "haitusquare",
+ [0x332B] = "paasentosquare",
+ [0x3331] = "birusquare",
+ [0x3333] = "huiitosquare",
+ [0x3336] = "hekutaarusquare",
+ [0x3339] = "herutusquare",
+ [0x333B] = "peezisquare",
+ [0x3342] = "hoonsquare",
+ [0x3347] = "mansyonsquare",
+ [0x3349] = "mirisquare",
+ [0x334A] = "miribaarusquare",
+ [0x334D] = "meetorusquare",
+ [0x334E] = "yaadosquare",
+ [0x3351] = "rittorusquare",
+ [0x3357] = "wattosquare",
+ [0x337B] = "heiseierasquare",
+ [0x337C] = "syouwaerasquare",
+ [0x337D] = "taisyouerasquare",
+ [0x337E] = "meizierasquare",
+ [0x337F] = "corporationsquare",
+ [0x3380] = "paampssquare",
+ [0x3381] = "nasquare",
+ [0x3382] = "muasquare",
+ [0x3383] = "masquare",
+ [0x3384] = "kasquare",
+ [0x3385] = "KBsquare",
+ [0x3386] = "MBsquare",
+ [0x3387] = "GBsquare",
+ [0x3388] = "calsquare",
+ [0x3389] = "kcalsquare",
+ [0x338A] = "pfsquare",
+ [0x338B] = "nfsquare",
+ [0x338C] = "mufsquare",
+ [0x338D] = "mugsquare",
+ [0x338E] = "squaremg",
+ [0x338F] = "squarekg",
+ [0x3390] = "Hzsquare",
+ [0x3391] = "khzsquare",
+ [0x3392] = "mhzsquare",
+ [0x3393] = "ghzsquare",
+ [0x3394] = "thzsquare",
+ [0x3395] = "mulsquare",
+ [0x3396] = "mlsquare",
+ [0x3397] = "dlsquare",
+ [0x3398] = "klsquare",
+ [0x3399] = "fmsquare",
+ [0x339A] = "nmsquare",
+ [0x339B] = "mumsquare",
+ [0x339C] = "squaremm",
+ [0x339D] = "squarecm",
+ [0x339E] = "squarekm",
+ [0x339F] = "mmsquaredsquare",
+ [0x33A0] = "cmsquaredsquare",
+ [0x33A1] = "squaremsquared",
+ [0x33A2] = "kmsquaredsquare",
+ [0x33A3] = "mmcubedsquare",
+ [0x33A4] = "cmcubedsquare",
+ [0x33A5] = "mcubedsquare",
+ [0x33A6] = "kmcubedsquare",
+ [0x33A7] = "moverssquare",
+ [0x33A8] = "moverssquaredsquare",
+ [0x33A9] = "pasquare",
+ [0x33AA] = "kpasquare",
+ [0x33AB] = "mpasquare",
+ [0x33AC] = "gpasquare",
+ [0x33AD] = "radsquare",
+ [0x33AE] = "radoverssquare",
+ [0x33AF] = "radoverssquaredsquare",
+ [0x33B0] = "pssquare",
+ [0x33B1] = "nssquare",
+ [0x33B2] = "mussquare",
+ [0x33B3] = "mssquare",
+ [0x33B4] = "pvsquare",
+ [0x33B5] = "nvsquare",
+ [0x33B6] = "muvsquare",
+ [0x33B7] = "mvsquare",
+ [0x33B8] = "kvsquare",
+ [0x33B9] = "mvmegasquare",
+ [0x33BA] = "pwsquare",
+ [0x33BB] = "nwsquare",
+ [0x33BC] = "muwsquare",
+ [0x33BD] = "mwsquare",
+ [0x33BE] = "kwsquare",
+ [0x33BF] = "mwmegasquare",
+ [0x33C0] = "kohmsquare",
+ [0x33C1] = "mohmsquare",
+ [0x33C2] = "amsquare",
+ [0x33C3] = "bqsquare",
+ [0x33C4] = "squarecc",
+ [0x33C5] = "cdsquare",
+ [0x33C6] = "coverkgsquare",
+ [0x33C7] = "cosquare",
+ [0x33C8] = "dbsquare",
+ [0x33C9] = "gysquare",
+ [0x33CA] = "hasquare",
+ [0x33CB] = "HPsquare",
+ [0x33CD] = "KKsquare",
+ [0x33CE] = "squarekmcapital",
+ [0x33CF] = "ktsquare",
+ [0x33D0] = "lmsquare",
+ [0x33D1] = "squareln",
+ [0x33D2] = "squarelog",
+ [0x33D3] = "lxsquare",
+ [0x33D4] = "mbsquare",
+ [0x33D5] = "squaremil",
+ [0x33D6] = "molsquare",
+ [0x33D8] = "pmsquare",
+ [0x33DB] = "srsquare",
+ [0x33DC] = "svsquare",
+ [0x33DD] = "wbsquare",
+ [0x5344] = "twentyhangzhou",
+ [0xF6BE] = "dotlessj",
+ [0xF6BF] = "LL",
+ [0xF6C0] = "ll",
+ [0xF6C3] = "commaaccent",
+ [0xF6C4] = "afii10063",
+ [0xF6C5] = "afii10064",
+ [0xF6C6] = "afii10192",
+ [0xF6C7] = "afii10831",
+ [0xF6C8] = "afii10832",
+ [0xF6C9] = "Acute",
+ [0xF6CA] = "Caron",
+ [0xF6CB] = "Dieresis",
+ [0xF6CC] = "DieresisAcute",
+ [0xF6CD] = "DieresisGrave",
+ [0xF6CE] = "Grave",
+ [0xF6CF] = "Hungarumlaut",
+ [0xF6D0] = "Macron",
+ [0xF6D1] = "cyrBreve",
+ [0xF6D2] = "cyrFlex",
+ [0xF6D3] = "dblGrave",
+ [0xF6D4] = "cyrbreve",
+ [0xF6D5] = "cyrflex",
+ [0xF6D6] = "dblgrave",
+ [0xF6D7] = "dieresisacute",
+ [0xF6D8] = "dieresisgrave",
+ [0xF6D9] = "copyrightserif",
+ [0xF6DA] = "registerserif",
+ [0xF6DB] = "trademarkserif",
+ [0xF6DC] = "onefitted",
+ [0xF6DD] = "rupiah",
+ [0xF6DE] = "threequartersemdash",
+ [0xF6DF] = "centinferior",
+ [0xF6E0] = "centsuperior",
+ [0xF6E1] = "commainferior",
+ [0xF6E2] = "commasuperior",
+ [0xF6E3] = "dollarinferior",
+ [0xF6E4] = "dollarsuperior",
+ [0xF6E5] = "hypheninferior",
+ [0xF6E6] = "hyphensuperior",
+ [0xF6E7] = "periodinferior",
+ [0xF6E8] = "periodsuperior",
+ [0xF6E9] = "asuperior",
+ [0xF6EA] = "bsuperior",
+ [0xF6EB] = "dsuperior",
+ [0xF6EC] = "esuperior",
+ [0xF6ED] = "isuperior",
+ [0xF6EE] = "lsuperior",
+ [0xF6EF] = "msuperior",
+ [0xF6F0] = "osuperior",
+ [0xF6F1] = "rsuperior",
+ [0xF6F2] = "ssuperior",
+ [0xF6F3] = "tsuperior",
+ [0xF6F4] = "Brevesmall",
+ [0xF6F5] = "Caronsmall",
+ [0xF6F6] = "Circumflexsmall",
+ [0xF6F7] = "Dotaccentsmall",
+ [0xF6F8] = "Hungarumlautsmall",
+ [0xF6F9] = "Lslashsmall",
+ [0xF6FA] = "OEsmall",
+ [0xF6FB] = "Ogoneksmall",
+ [0xF6FC] = "Ringsmall",
+ [0xF6FD] = "Scaronsmall",
+ [0xF6FE] = "Tildesmall",
+ [0xF6FF] = "Zcaronsmall",
+ [0xF721] = "exclamsmall",
+ [0xF724] = "dollaroldstyle",
+ [0xF726] = "ampersandsmall",
+ [0xF730] = "zerooldstyle",
+ [0xF731] = "oneoldstyle",
+ [0xF732] = "twooldstyle",
+ [0xF733] = "threeoldstyle",
+ [0xF734] = "fouroldstyle",
+ [0xF735] = "fiveoldstyle",
+ [0xF736] = "sixoldstyle",
+ [0xF737] = "sevenoldstyle",
+ [0xF738] = "eightoldstyle",
+ [0xF739] = "nineoldstyle",
+ [0xF73F] = "questionsmall",
+ [0xF760] = "Gravesmall",
+ [0xF761] = "Asmall",
+ [0xF762] = "Bsmall",
+ [0xF763] = "Csmall",
+ [0xF764] = "Dsmall",
+ [0xF765] = "Esmall",
+ [0xF766] = "Fsmall",
+ [0xF767] = "Gsmall",
+ [0xF768] = "Hsmall",
+ [0xF769] = "Ismall",
+ [0xF76A] = "Jsmall",
+ [0xF76B] = "Ksmall",
+ [0xF76C] = "Lsmall",
+ [0xF76D] = "Msmall",
+ [0xF76E] = "Nsmall",
+ [0xF76F] = "Osmall",
+ [0xF770] = "Psmall",
+ [0xF771] = "Qsmall",
+ [0xF772] = "Rsmall",
+ [0xF773] = "Ssmall",
+ [0xF774] = "Tsmall",
+ [0xF775] = "Usmall",
+ [0xF776] = "Vsmall",
+ [0xF777] = "Wsmall",
+ [0xF778] = "Xsmall",
+ [0xF779] = "Ysmall",
+ [0xF77A] = "Zsmall",
+ [0xF7A1] = "exclamdownsmall",
+ [0xF7A2] = "centoldstyle",
+ [0xF7A8] = "Dieresissmall",
+ [0xF7AF] = "Macronsmall",
+ [0xF7B4] = "Acutesmall",
+ [0xF7B8] = "Cedillasmall",
+ [0xF7BF] = "questiondownsmall",
+ [0xF7E0] = "Agravesmall",
+ [0xF7E1] = "Aacutesmall",
+ [0xF7E2] = "Acircumflexsmall",
+ [0xF7E3] = "Atildesmall",
+ [0xF7E4] = "Adieresissmall",
+ [0xF7E5] = "Aringsmall",
+ [0xF7E6] = "AEsmall",
+ [0xF7E7] = "Ccedillasmall",
+ [0xF7E8] = "Egravesmall",
+ [0xF7E9] = "Eacutesmall",
+ [0xF7EA] = "Ecircumflexsmall",
+ [0xF7EB] = "Edieresissmall",
+ [0xF7EC] = "Igravesmall",
+ [0xF7ED] = "Iacutesmall",
+ [0xF7EE] = "Icircumflexsmall",
+ [0xF7EF] = "Idieresissmall",
+ [0xF7F0] = "Ethsmall",
+ [0xF7F1] = "Ntildesmall",
+ [0xF7F2] = "Ogravesmall",
+ [0xF7F3] = "Oacutesmall",
+ [0xF7F4] = "Ocircumflexsmall",
+ [0xF7F5] = "Otildesmall",
+ [0xF7F6] = "Odieresissmall",
+ [0xF7F8] = "Oslashsmall",
+ [0xF7F9] = "Ugravesmall",
+ [0xF7FA] = "Uacutesmall",
+ [0xF7FB] = "Ucircumflexsmall",
+ [0xF7FC] = "Udieresissmall",
+ [0xF7FD] = "Yacutesmall",
+ [0xF7FE] = "Thornsmall",
+ [0xF7FF] = "Ydieresissmall",
+ [0xF884] = "maihanakatleftthai",
+ [0xF885] = "saraileftthai",
+ [0xF886] = "saraiileftthai",
+ [0xF887] = "saraueleftthai",
+ [0xF888] = "saraueeleftthai",
+ [0xF889] = "maitaikhuleftthai",
+ [0xF88A] = "maiekupperleftthai",
+ [0xF88B] = "maieklowrightthai",
+ [0xF88C] = "maieklowleftthai",
+ [0xF88D] = "maithoupperleftthai",
+ [0xF88E] = "maitholowrightthai",
+ [0xF88F] = "maitholowleftthai",
+ [0xF890] = "maitriupperleftthai",
+ [0xF891] = "maitrilowrightthai",
+ [0xF892] = "maitrilowleftthai",
+ [0xF893] = "maichattawaupperleftthai",
+ [0xF894] = "maichattawalowrightthai",
+ [0xF895] = "maichattawalowleftthai",
+ [0xF896] = "thanthakhatupperleftthai",
+ [0xF897] = "thanthakhatlowrightthai",
+ [0xF898] = "thanthakhatlowleftthai",
+ [0xF899] = "nikhahitleftthai",
+ [0xF8E5] = "radicalex",
+ [0xF8E6] = "arrowvertex",
+ [0xF8E7] = "arrowhorizex",
+ [0xF8E8] = "registersans",
+ [0xF8E9] = "copyrightsans",
+ [0xF8EA] = "trademarksans",
+ [0xF8EB] = "parenlefttp",
+ [0xF8EC] = "parenleftex",
+ [0xF8ED] = "parenleftbt",
+ [0xF8EE] = "bracketlefttp",
+ [0xF8EF] = "bracketleftex",
+ [0xF8F0] = "bracketleftbt",
+ [0xF8F1] = "bracelefttp",
+ [0xF8F2] = "braceleftmid",
+ [0xF8F3] = "braceleftbt",
+ [0xF8F4] = "braceex",
+ [0xF8F5] = "integralex",
+ [0xF8F6] = "parenrighttp",
+ [0xF8F7] = "parenrightex",
+ [0xF8F8] = "parenrightbt",
+ [0xF8F9] = "bracketrighttp",
+ [0xF8FA] = "bracketrightex",
+ [0xF8FB] = "bracketrightbt",
+ [0xF8FC] = "bracerighttp",
+ [0xF8FD] = "bracerightmid",
+ [0xF8FE] = "bracerightbt",
+ [0xF8FF] = "apple",
+ [0xFB00] = "ff",
+ [0xFB01] = "fi",
+ [0xFB02] = "fl",
+ [0xFB03] = "ffi",
+ [0xFB04] = "ffl",
+ [0xFB1F] = "yodyodpatahhebrew",
+ [0xFB20] = "ayinaltonehebrew",
+ [0xFB2A] = "shinshindothebrew",
+ [0xFB2B] = "shinsindothebrew",
+ [0xFB2C] = "shindageshshindothebrew",
+ [0xFB2D] = "shindageshsindothebrew",
+ [0xFB2E] = "alefpatahhebrew",
+ [0xFB2F] = "alefqamatshebrew",
+ [0xFB30] = "alefdageshhebrew",
+ [0xFB31] = "betdageshhebrew",
+ [0xFB32] = "gimeldageshhebrew",
+ [0xFB33] = "daletdageshhebrew",
+ [0xFB34] = "hedageshhebrew",
+ [0xFB35] = "vavdageshhebrew",
+ [0xFB36] = "zayindageshhebrew",
+ [0xFB38] = "tetdageshhebrew",
+ [0xFB39] = "yoddageshhebrew",
+ [0xFB3A] = "finalkafdageshhebrew",
+ [0xFB3B] = "kafdageshhebrew",
+ [0xFB3C] = "lameddageshhebrew",
+ [0xFB3E] = "memdageshhebrew",
+ [0xFB40] = "nundageshhebrew",
+ [0xFB41] = "samekhdageshhebrew",
+ [0xFB43] = "pefinaldageshhebrew",
+ [0xFB44] = "pedageshhebrew",
+ [0xFB46] = "tsadidageshhebrew",
+ [0xFB47] = "qofdageshhebrew",
+ [0xFB48] = "reshdageshhebrew",
+ [0xFB49] = "shindageshhebrew",
+ [0xFB4A] = "tavdageshhebrew",
+ [0xFB4B] = "vavholamhebrew",
+ [0xFB4C] = "betrafehebrew",
+ [0xFB4D] = "kafrafehebrew",
+ [0xFB4E] = "perafehebrew",
+ [0xFB4F] = "aleflamedhebrew",
+ [0xFB57] = "pehfinalarabic",
+ [0xFB58] = "pehinitialarabic",
+ [0xFB59] = "pehmedialarabic",
+ [0xFB67] = "ttehfinalarabic",
+ [0xFB68] = "ttehinitialarabic",
+ [0xFB69] = "ttehmedialarabic",
+ [0xFB6B] = "vehfinalarabic",
+ [0xFB6C] = "vehinitialarabic",
+ [0xFB6D] = "vehmedialarabic",
+ [0xFB7B] = "tchehfinalarabic",
+ [0xFB7C] = "tchehmeeminitialarabic",
+ [0xFB7D] = "tchehmedialarabic",
+ [0xFB89] = "ddalfinalarabic",
+ [0xFB8B] = "jehfinalarabic",
+ [0xFB8D] = "rrehfinalarabic",
+ [0xFB93] = "gaffinalarabic",
+ [0xFB94] = "gafinitialarabic",
+ [0xFB95] = "gafmedialarabic",
+ [0xFB9F] = "noonghunnafinalarabic",
+ [0xFBA4] = "hehhamzaaboveisolatedarabic",
+ [0xFBA5] = "hehhamzaabovefinalarabic",
+ [0xFBA7] = "hehfinalaltonearabic",
+ [0xFBA8] = "hehinitialaltonearabic",
+ [0xFBA9] = "hehmedialaltonearabic",
+ [0xFBAF] = "yehbarreefinalarabic",
+ [0xFC08] = "behmeemisolatedarabic",
+ [0xFC0B] = "tehjeemisolatedarabic",
+ [0xFC0C] = "tehhahisolatedarabic",
+ [0xFC0E] = "tehmeemisolatedarabic",
+ [0xFC48] = "meemmeemisolatedarabic",
+ [0xFC4B] = "noonjeemisolatedarabic",
+ [0xFC4E] = "noonmeemisolatedarabic",
+ [0xFC58] = "yehmeemisolatedarabic",
+ [0xFC5E] = "shaddadammatanarabic",
+ [0xFC5F] = "shaddakasratanarabic",
+ [0xFC60] = "shaddafathaarabic",
+ [0xFC61] = "shaddadammaarabic",
+ [0xFC62] = "shaddakasraarabic",
+ [0xFC6D] = "behnoonfinalarabic",
+ [0xFC73] = "tehnoonfinalarabic",
+ [0xFC8D] = "noonnoonfinalarabic",
+ [0xFC94] = "yehnoonfinalarabic",
+ [0xFC9F] = "behmeeminitialarabic",
+ [0xFCA1] = "tehjeeminitialarabic",
+ [0xFCA2] = "tehhahinitialarabic",
+ [0xFCA4] = "tehmeeminitialarabic",
+ [0xFCC9] = "lamjeeminitialarabic",
+ [0xFCCA] = "lamhahinitialarabic",
+ [0xFCCB] = "lamkhahinitialarabic",
+ [0xFCCC] = "lammeeminitialarabic",
+ [0xFCD1] = "meemmeeminitialarabic",
+ [0xFCD2] = "noonjeeminitialarabic",
+ [0xFCD5] = "noonmeeminitialarabic",
+ [0xFCDD] = "yehmeeminitialarabic",
+ [0xFD3E] = "parenleftaltonearabic",
+ [0xFD3F] = "parenrightaltonearabic",
+ [0xFD88] = "lammeemhahinitialarabic",
+ [0xFDF2] = "lamlamhehisolatedarabic",
+ [0xFDFA] = "sallallahoualayhewasallamarabic",
+ [0xFE30] = "twodotleadervertical",
+ [0xFE31] = "emdashvertical",
+ [0xFE32] = "endashvertical",
+ [0xFE33] = "underscorevertical",
+ [0xFE34] = "wavyunderscorevertical",
+ [0xFE35] = "parenleftvertical",
+ [0xFE36] = "parenrightvertical",
+ [0xFE37] = "braceleftvertical",
+ [0xFE38] = "bracerightvertical",
+ [0xFE39] = "tortoiseshellbracketleftvertical",
+ [0xFE3A] = "tortoiseshellbracketrightvertical",
+ [0xFE3B] = "blacklenticularbracketleftvertical",
+ [0xFE3C] = "blacklenticularbracketrightvertical",
+ [0xFE3D] = "dblanglebracketleftvertical",
+ [0xFE3E] = "dblanglebracketrightvertical",
+ [0xFE3F] = "anglebracketleftvertical",
+ [0xFE40] = "anglebracketrightvertical",
+ [0xFE41] = "cornerbracketleftvertical",
+ [0xFE42] = "cornerbracketrightvertical",
+ [0xFE43] = "whitecornerbracketleftvertical",
+ [0xFE44] = "whitecornerbracketrightvertical",
+ [0xFE49] = "overlinedashed",
+ [0xFE4A] = "overlinecenterline",
+ [0xFE4B] = "overlinewavy",
+ [0xFE4C] = "overlinedblwavy",
+ [0xFE4D] = "lowlinedashed",
+ [0xFE4E] = "lowlinecenterline",
+ [0xFE4F] = "underscorewavy",
+ [0xFE50] = "commasmall",
+ [0xFE52] = "periodsmall",
+ [0xFE54] = "semicolonsmall",
+ [0xFE55] = "colonsmall",
+ [0xFE59] = "parenleftsmall",
+ [0xFE5A] = "parenrightsmall",
+ [0xFE5B] = "braceleftsmall",
+ [0xFE5C] = "bracerightsmall",
+ [0xFE5D] = "tortoiseshellbracketleftsmall",
+ [0xFE5E] = "tortoiseshellbracketrightsmall",
+ [0xFE5F] = "numbersignsmall",
+ [0xFE61] = "asterisksmall",
+ [0xFE62] = "plussmall",
+ [0xFE63] = "hyphensmall",
+ [0xFE64] = "lesssmall",
+ [0xFE65] = "greatersmall",
+ [0xFE66] = "equalsmall",
+ [0xFE69] = "dollarsmall",
+ [0xFE6A] = "percentsmall",
+ [0xFE6B] = "atsmall",
+ [0xFE82] = "alefmaddaabovefinalarabic",
+ [0xFE84] = "alefhamzaabovefinalarabic",
+ [0xFE86] = "wawhamzaabovefinalarabic",
+ [0xFE88] = "alefhamzabelowfinalarabic",
+ [0xFE8A] = "yehhamzaabovefinalarabic",
+ [0xFE8B] = "yehhamzaaboveinitialarabic",
+ [0xFE8C] = "yehhamzaabovemedialarabic",
+ [0xFE8E] = "aleffinalarabic",
+ [0xFE90] = "behfinalarabic",
+ [0xFE91] = "behinitialarabic",
+ [0xFE92] = "behmedialarabic",
+ [0xFE94] = "tehmarbutafinalarabic",
+ [0xFE96] = "tehfinalarabic",
+ [0xFE97] = "tehinitialarabic",
+ [0xFE98] = "tehmedialarabic",
+ [0xFE9A] = "thehfinalarabic",
+ [0xFE9B] = "thehinitialarabic",
+ [0xFE9C] = "thehmedialarabic",
+ [0xFE9E] = "jeemfinalarabic",
+ [0xFE9F] = "jeeminitialarabic",
+ [0xFEA0] = "jeemmedialarabic",
+ [0xFEA2] = "hahfinalarabic",
+ [0xFEA3] = "hahinitialarabic",
+ [0xFEA4] = "hahmedialarabic",
+ [0xFEA6] = "khahfinalarabic",
+ [0xFEA7] = "khahinitialarabic",
+ [0xFEA8] = "khahmedialarabic",
+ [0xFEAA] = "dalfinalarabic",
+ [0xFEAC] = "thalfinalarabic",
+ [0xFEAE] = "rehfinalarabic",
+ [0xFEB0] = "zainfinalarabic",
+ [0xFEB2] = "seenfinalarabic",
+ [0xFEB3] = "seeninitialarabic",
+ [0xFEB4] = "seenmedialarabic",
+ [0xFEB6] = "sheenfinalarabic",
+ [0xFEB7] = "sheeninitialarabic",
+ [0xFEB8] = "sheenmedialarabic",
+ [0xFEBA] = "sadfinalarabic",
+ [0xFEBB] = "sadinitialarabic",
+ [0xFEBC] = "sadmedialarabic",
+ [0xFEBE] = "dadfinalarabic",
+ [0xFEBF] = "dadinitialarabic",
+ [0xFEC0] = "dadmedialarabic",
+ [0xFEC2] = "tahfinalarabic",
+ [0xFEC3] = "tahinitialarabic",
+ [0xFEC4] = "tahmedialarabic",
+ [0xFEC6] = "zahfinalarabic",
+ [0xFEC7] = "zahinitialarabic",
+ [0xFEC8] = "zahmedialarabic",
+ [0xFECA] = "ainfinalarabic",
+ [0xFECB] = "aininitialarabic",
+ [0xFECC] = "ainmedialarabic",
+ [0xFECE] = "ghainfinalarabic",
+ [0xFECF] = "ghaininitialarabic",
+ [0xFED0] = "ghainmedialarabic",
+ [0xFED2] = "fehfinalarabic",
+ [0xFED3] = "fehinitialarabic",
+ [0xFED4] = "fehmedialarabic",
+ [0xFED6] = "qaffinalarabic",
+ [0xFED7] = "qafinitialarabic",
+ [0xFED8] = "qafmedialarabic",
+ [0xFEDA] = "kaffinalarabic",
+ [0xFEDB] = "kafinitialarabic",
+ [0xFEDC] = "kafmedialarabic",
+ [0xFEDE] = "lamfinalarabic",
+ [0xFEDF] = "lammeemkhahinitialarabic",
+ [0xFEE0] = "lammedialarabic",
+ [0xFEE2] = "meemfinalarabic",
+ [0xFEE3] = "meeminitialarabic",
+ [0xFEE4] = "meemmedialarabic",
+ [0xFEE6] = "noonfinalarabic",
+ [0xFEE7] = "nooninitialarabic",
+ [0xFEE8] = "noonmedialarabic",
+ [0xFEEA] = "hehfinalarabic",
+ [0xFEEB] = "hehinitialarabic",
+ [0xFEEC] = "hehmedialarabic",
+ [0xFEEE] = "wawfinalarabic",
+ [0xFEF0] = "alefmaksurafinalarabic",
+ [0xFEF2] = "yehfinalarabic",
+ [0xFEF3] = "yehinitialarabic",
+ [0xFEF4] = "yehmedialarabic",
+ [0xFEF5] = "lamalefmaddaaboveisolatedarabic",
+ [0xFEF6] = "lamalefmaddaabovefinalarabic",
+ [0xFEF7] = "lamalefhamzaaboveisolatedarabic",
+ [0xFEF8] = "lamalefhamzaabovefinalarabic",
+ [0xFEF9] = "lamalefhamzabelowisolatedarabic",
+ [0xFEFA] = "lamalefhamzabelowfinalarabic",
+ [0xFEFB] = "lamalefisolatedarabic",
+ [0xFEFC] = "lamaleffinalarabic",
+ [0xFEFF] = "zerowidthjoiner",
+ [0xFF01] = "exclammonospace",
+ [0xFF02] = "quotedblmonospace",
+ [0xFF03] = "numbersignmonospace",
+ [0xFF04] = "dollarmonospace",
+ [0xFF05] = "percentmonospace",
+ [0xFF06] = "ampersandmonospace",
+ [0xFF07] = "quotesinglemonospace",
+ [0xFF08] = "parenleftmonospace",
+ [0xFF09] = "parenrightmonospace",
+ [0xFF0A] = "asteriskmonospace",
+ [0xFF0B] = "plusmonospace",
+ [0xFF0C] = "commamonospace",
+ [0xFF0D] = "hyphenmonospace",
+ [0xFF0E] = "periodmonospace",
+ [0xFF0F] = "slashmonospace",
+ [0xFF10] = "zeromonospace",
+ [0xFF11] = "onemonospace",
+ [0xFF12] = "twomonospace",
+ [0xFF13] = "threemonospace",
+ [0xFF14] = "fourmonospace",
+ [0xFF15] = "fivemonospace",
+ [0xFF16] = "sixmonospace",
+ [0xFF17] = "sevenmonospace",
+ [0xFF18] = "eightmonospace",
+ [0xFF19] = "ninemonospace",
+ [0xFF1A] = "colonmonospace",
+ [0xFF1B] = "semicolonmonospace",
+ [0xFF1C] = "lessmonospace",
+ [0xFF1D] = "equalmonospace",
+ [0xFF1E] = "greatermonospace",
+ [0xFF1F] = "questionmonospace",
+ [0xFF20] = "atmonospace",
+ [0xFF21] = "Amonospace",
+ [0xFF22] = "Bmonospace",
+ [0xFF23] = "Cmonospace",
+ [0xFF24] = "Dmonospace",
+ [0xFF25] = "Emonospace",
+ [0xFF26] = "Fmonospace",
+ [0xFF27] = "Gmonospace",
+ [0xFF28] = "Hmonospace",
+ [0xFF29] = "Imonospace",
+ [0xFF2A] = "Jmonospace",
+ [0xFF2B] = "Kmonospace",
+ [0xFF2C] = "Lmonospace",
+ [0xFF2D] = "Mmonospace",
+ [0xFF2E] = "Nmonospace",
+ [0xFF2F] = "Omonospace",
+ [0xFF30] = "Pmonospace",
+ [0xFF31] = "Qmonospace",
+ [0xFF32] = "Rmonospace",
+ [0xFF33] = "Smonospace",
+ [0xFF34] = "Tmonospace",
+ [0xFF35] = "Umonospace",
+ [0xFF36] = "Vmonospace",
+ [0xFF37] = "Wmonospace",
+ [0xFF38] = "Xmonospace",
+ [0xFF39] = "Ymonospace",
+ [0xFF3A] = "Zmonospace",
+ [0xFF3B] = "bracketleftmonospace",
+ [0xFF3C] = "backslashmonospace",
+ [0xFF3D] = "bracketrightmonospace",
+ [0xFF3E] = "asciicircummonospace",
+ [0xFF3F] = "underscoremonospace",
+ [0xFF40] = "gravemonospace",
+ [0xFF41] = "amonospace",
+ [0xFF42] = "bmonospace",
+ [0xFF43] = "cmonospace",
+ [0xFF44] = "dmonospace",
+ [0xFF45] = "emonospace",
+ [0xFF46] = "fmonospace",
+ [0xFF47] = "gmonospace",
+ [0xFF48] = "hmonospace",
+ [0xFF49] = "imonospace",
+ [0xFF4A] = "jmonospace",
+ [0xFF4B] = "kmonospace",
+ [0xFF4C] = "lmonospace",
+ [0xFF4D] = "mmonospace",
+ [0xFF4E] = "nmonospace",
+ [0xFF4F] = "omonospace",
+ [0xFF50] = "pmonospace",
+ [0xFF51] = "qmonospace",
+ [0xFF52] = "rmonospace",
+ [0xFF53] = "smonospace",
+ [0xFF54] = "tmonospace",
+ [0xFF55] = "umonospace",
+ [0xFF56] = "vmonospace",
+ [0xFF57] = "wmonospace",
+ [0xFF58] = "xmonospace",
+ [0xFF59] = "ymonospace",
+ [0xFF5A] = "zmonospace",
+ [0xFF5B] = "braceleftmonospace",
+ [0xFF5C] = "barmonospace",
+ [0xFF5D] = "bracerightmonospace",
+ [0xFF5E] = "asciitildemonospace",
+ [0xFF61] = "periodhalfwidth",
+ [0xFF62] = "cornerbracketlefthalfwidth",
+ [0xFF63] = "cornerbracketrighthalfwidth",
+ [0xFF64] = "ideographiccommaleft",
+ [0xFF65] = "middledotkatakanahalfwidth",
+ [0xFF66] = "wokatakanahalfwidth",
+ [0xFF67] = "asmallkatakanahalfwidth",
+ [0xFF68] = "ismallkatakanahalfwidth",
+ [0xFF69] = "usmallkatakanahalfwidth",
+ [0xFF6A] = "esmallkatakanahalfwidth",
+ [0xFF6B] = "osmallkatakanahalfwidth",
+ [0xFF6C] = "yasmallkatakanahalfwidth",
+ [0xFF6D] = "yusmallkatakanahalfwidth",
+ [0xFF6E] = "yosmallkatakanahalfwidth",
+ [0xFF6F] = "tusmallkatakanahalfwidth",
+ [0xFF70] = "katahiraprolongmarkhalfwidth",
+ [0xFF71] = "akatakanahalfwidth",
+ [0xFF72] = "ikatakanahalfwidth",
+ [0xFF73] = "ukatakanahalfwidth",
+ [0xFF74] = "ekatakanahalfwidth",
+ [0xFF75] = "okatakanahalfwidth",
+ [0xFF76] = "kakatakanahalfwidth",
+ [0xFF77] = "kikatakanahalfwidth",
+ [0xFF78] = "kukatakanahalfwidth",
+ [0xFF79] = "kekatakanahalfwidth",
+ [0xFF7A] = "kokatakanahalfwidth",
+ [0xFF7B] = "sakatakanahalfwidth",
+ [0xFF7C] = "sikatakanahalfwidth",
+ [0xFF7D] = "sukatakanahalfwidth",
+ [0xFF7E] = "sekatakanahalfwidth",
+ [0xFF7F] = "sokatakanahalfwidth",
+ [0xFF80] = "takatakanahalfwidth",
+ [0xFF81] = "tikatakanahalfwidth",
+ [0xFF82] = "tukatakanahalfwidth",
+ [0xFF83] = "tekatakanahalfwidth",
+ [0xFF84] = "tokatakanahalfwidth",
+ [0xFF85] = "nakatakanahalfwidth",
+ [0xFF86] = "nikatakanahalfwidth",
+ [0xFF87] = "nukatakanahalfwidth",
+ [0xFF88] = "nekatakanahalfwidth",
+ [0xFF89] = "nokatakanahalfwidth",
+ [0xFF8A] = "hakatakanahalfwidth",
+ [0xFF8B] = "hikatakanahalfwidth",
+ [0xFF8C] = "hukatakanahalfwidth",
+ [0xFF8D] = "hekatakanahalfwidth",
+ [0xFF8E] = "hokatakanahalfwidth",
+ [0xFF8F] = "makatakanahalfwidth",
+ [0xFF90] = "mikatakanahalfwidth",
+ [0xFF91] = "mukatakanahalfwidth",
+ [0xFF92] = "mekatakanahalfwidth",
+ [0xFF93] = "mokatakanahalfwidth",
+ [0xFF94] = "yakatakanahalfwidth",
+ [0xFF95] = "yukatakanahalfwidth",
+ [0xFF96] = "yokatakanahalfwidth",
+ [0xFF97] = "rakatakanahalfwidth",
+ [0xFF98] = "rikatakanahalfwidth",
+ [0xFF99] = "rukatakanahalfwidth",
+ [0xFF9A] = "rekatakanahalfwidth",
+ [0xFF9B] = "rokatakanahalfwidth",
+ [0xFF9C] = "wakatakanahalfwidth",
+ [0xFF9D] = "nkatakanahalfwidth",
+ [0xFF9E] = "voicedmarkkanahalfwidth",
+ [0xFF9F] = "semivoicedmarkkanahalfwidth",
+ [0xFFE0] = "centmonospace",
+ [0xFFE1] = "sterlingmonospace",
+ [0xFFE3] = "macronmonospace",
+ [0xFFE5] = "yenmonospace",
+ [0xFFE6] = "wonmonospace",
+}
+
+agl.unicodes = allocate(table.swapped(agl.names)) -- to unicode
diff --git a/otfl-font-cid.lua b/otfl-font-cid.lua
index d1c727a..1d03bca 100644
--- a/otfl-font-cid.lua
+++ b/otfl-font-cid.lua
@@ -12,11 +12,14 @@ local lpegmatch = lpeg.match
local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
-fonts = fonts or { }
-fonts.cid = fonts.cid or { }
-fonts.cid.map = fonts.cid.map or { }
-fonts.cid.max = fonts.cid.max or 10
+local report_otf = logs.new("load otf")
+local fonts = fonts
+
+fonts.cid = fonts.cid or { }
+local cid = fonts.cid
+cid.map = cid.map or { }
+cid.max = cid.max or 10
-- original string parser: 0.109, lpeg parser: 0.036 seconds for Adobe-CNS1-4.cidmap
--
@@ -25,12 +28,14 @@ fonts.cid.max = fonts.cid.max or 10
-- 1..95 0020
-- 99 3000
-local number = lpeg.C(lpeg.R("09","af","AF")^1)
-local space = lpeg.S(" \n\r\t")
+local P, S, R, C = lpeg.P, lpeg.S, lpeg.R, lpeg.C
+
+local number = C(R("09","af","AF")^1)
+local space = S(" \n\r\t")
local spaces = space^0
-local period = lpeg.P(".")
+local period = P(".")
local periods = period * period
-local name = lpeg.P("/") * lpeg.C((1-space)^1)
+local name = P("/") * C((1-space)^1)
local unicodes, names = { }, { }
@@ -58,7 +63,7 @@ local grammar = lpeg.P { "start",
named = (number * spaces * name) / do_name
}
-function fonts.cid.load(filename)
+function cid.load(filename)
local data = io.loaddata(filename)
if data then
unicodes, names = { }, { }
@@ -79,23 +84,22 @@ end
local template = "%s-%s-%s.cidmap"
-
local function locate(registry,ordering,supplement)
local filename = format(template,registry,ordering,supplement)
local hashname = lower(filename)
- local cidmap = fonts.cid.map[hashname]
+ local cidmap = cid.map[hashname]
if not cidmap then
if trace_loading then
- logs.report("load otf","checking cidmap, registry: %s, ordering: %s, supplement: %s, filename: %s",registry,ordering,supplement,filename)
+ report_otf("checking cidmap, registry: %s, ordering: %s, supplement: %s, filename: %s",registry,ordering,supplement,filename)
end
- local fullname = resolvers.find_file(filename,'cid') or ""
+ local fullname = resolvers.findfile(filename,'cid') or ""
if fullname ~= "" then
- cidmap = fonts.cid.load(fullname)
+ cidmap = cid.load(fullname)
if cidmap then
if trace_loading then
- logs.report("load otf","using cidmap file %s",filename)
+ report_otf("using cidmap file %s",filename)
end
- fonts.cid.map[hashname] = cidmap
+ cid.map[hashname] = cidmap
cidmap.usedname = file.basename(filename)
return cidmap
end
@@ -104,18 +108,18 @@ local function locate(registry,ordering,supplement)
return cidmap
end
-function fonts.cid.getmap(registry,ordering,supplement)
+function cid.getmap(registry,ordering,supplement)
-- cf Arthur R. we can safely scan upwards since cids are downward compatible
local supplement = tonumber(supplement)
if trace_loading then
- logs.report("load otf","needed cidmap, registry: %s, ordering: %s, supplement: %s",registry,ordering,supplement)
+ report_otf("needed cidmap, registry: %s, ordering: %s, supplement: %s",registry,ordering,supplement)
end
local cidmap = locate(registry,ordering,supplement)
if not cidmap then
local cidnum = nil
-- next highest (alternatively we could start high)
- if supplement < fonts.cid.max then
- for supplement=supplement+1,fonts.cid.max do
+ if supplement < cid.max then
+ for supplement=supplement+1,cid.max do
local c = locate(registry,ordering,supplement)
if c then
cidmap, cidnum = c, supplement
@@ -137,8 +141,8 @@ function fonts.cid.getmap(registry,ordering,supplement)
if cidmap and cidnum > 0 then
for s=0,cidnum-1 do
filename = format(template,registry,ordering,s)
- if not fonts.cid.map[filename] then
- fonts.cid.map[filename] = cidmap -- copy of ref
+ if not cid.map[filename] then
+ cid.map[filename] = cidmap -- copy of ref
end
end
end
diff --git a/otfl-font-def.lua b/otfl-font-def.lua
index 8e64872..e87fee4 100644
--- a/otfl-font-def.lua
+++ b/otfl-font-def.lua
@@ -10,45 +10,54 @@ local format, concat, gmatch, match, find, lower = string.format, table.concat,
local tostring, next = tostring, next
local lpegmatch = lpeg.match
+local allocate = utilities.storage.allocate
+
local trace_defining = false trackers .register("fonts.defining", function(v) trace_defining = v end)
local directive_embedall = false directives.register("fonts.embedall", function(v) directive_embedall = v end)
trackers.register("fonts.loading", "fonts.defining", "otf.loading", "afm.loading", "tfm.loading")
trackers.register("fonts.all", "fonts.*", "otf.*", "afm.*", "tfm.*")
+local report_define = logs.new("define fonts")
+local report_afm = logs.new("load afm")
+
--[[ldx--
<p>Here we deal with defining fonts. We do so by intercepting the
default loader that only handles <l n='tfm'/>.</p>
--ldx]]--
-fonts = fonts or { }
-fonts.define = fonts.define or { }
-fonts.tfm = fonts.tfm or { }
-fonts.ids = fonts.ids or { }
-fonts.vf = fonts.vf or { }
-fonts.used = fonts.used or { }
+local fonts = fonts
+local tfm = fonts.tfm
+local vf = fonts.vf
+local fontcsnames = fonts.csnames
+
+fonts.used = allocate()
-local tfm = fonts.tfm
-local vf = fonts.vf
-local define = fonts.define
+tfm.readers = tfm.readers or { }
+tfm.fonts = allocate()
+tfm.internalized = allocate() -- internal tex numbers
-tfm.version = 1.01
-tfm.cache = containers.define("fonts", "tfm", tfm.version, false) -- better in font-tfm
+local readers = tfm.readers
+local sequence = allocate { 'otf', 'ttf', 'afm', 'tfm' }
+readers.sequence = sequence
-define.method = "afm or tfm" -- afm, tfm, afm or tfm, tfm or afm
-define.specify = fonts.define.specify or { }
-define.methods = fonts.define.methods or { }
+tfm.version = 1.01
+tfm.cache = containers.define("fonts", "tfm", tfm.version, false) -- better in font-tfm
+tfm.autoprefixedafm = true -- this will become false some day (catches texnansi-blabla.*)
-tfm.fonts = tfm.fonts or { }
-tfm.readers = tfm.readers or { }
-tfm.internalized = tfm.internalized or { } -- internal tex numbers
+fonts.definers = fonts.definers or { }
+local definers = fonts.definers
-tfm.readers.sequence = { 'otf', 'ttf', 'afm', 'tfm' }
+definers.specifiers = definers.specifiers or { }
+local specifiers = definers.specifiers
-tfm.auto_afm = true
+specifiers.variants = allocate()
+local variants = specifiers.variants
-local readers = tfm.readers
-local sequence = readers.sequence
+definers.method = "afm or tfm" -- afm, tfm, afm or tfm, tfm or afm
+definers.methods = definers.methods or { }
+
+local findbinfile = resolvers.findbinfile
--[[ldx--
<p>We hardly gain anything when we cache the final (pre scaled)
@@ -77,7 +86,7 @@ and prepares a table that will move along as we proceed.</p>
-- name name(sub) name(sub)*spec name*spec
-- name@spec*oeps
-local splitter, specifiers = nil, ""
+local splitter, splitspecifiers = nil, ""
local P, C, S, Cc = lpeg.P, lpeg.C, lpeg.S, lpeg.Cc
@@ -86,13 +95,13 @@ local right = P(")")
local colon = P(":")
local space = P(" ")
-define.defaultlookup = "file"
+definers.defaultlookup = "file"
local prefixpattern = P(false)
-function define.add_specifier(symbol)
- specifiers = specifiers .. symbol
- local method = S(specifiers)
+local function addspecifier(symbol)
+ splitspecifiers = splitspecifiers .. symbol
+ local method = S(splitspecifiers)
local lookup = C(prefixpattern) * colon
local sub = left * C(P(1-left-right-method)^1) * right
local specification = C(method) * C(P(1)^1)
@@ -100,36 +109,36 @@ function define.add_specifier(symbol)
splitter = P((lookup + Cc("")) * name * (sub + Cc("")) * (specification + Cc("")))
end
-function define.add_lookup(str,default)
+local function addlookup(str,default)
prefixpattern = prefixpattern + P(str)
end
-define.add_lookup("file")
-define.add_lookup("name")
-define.add_lookup("spec")
+definers.addlookup = addlookup
+
+addlookup("file")
+addlookup("name")
+addlookup("spec")
-function define.get_specification(str)
+local function getspecification(str)
return lpegmatch(splitter,str)
end
-function define.register_split(symbol,action)
- define.add_specifier(symbol)
- define.specify[symbol] = action
+definers.getspecification = getspecification
+
+function definers.registersplit(symbol,action)
+ addspecifier(symbol)
+ variants[symbol] = action
end
-function define.makespecification(specification, lookup, name, sub, method, detail, size)
+function definers.makespecification(specification, lookup, name, sub, method, detail, size)
size = size or 655360
if trace_defining then
- logs.report("define font","%s -> lookup: %s, name: %s, sub: %s, method: %s, detail: %s",
+ report_define("%s -> lookup: %s, name: %s, sub: %s, method: %s, detail: %s",
specification, (lookup ~= "" and lookup) or "[file]", (name ~= "" and name) or "-",
(sub ~= "" and sub) or "-", (method ~= "" and method) or "-", (detail ~= "" and detail) or "-")
end
---~ if specification.lookup then
---~ lookup = specification.lookup -- can come from xetex [] syntax
---~ specification.lookup = nil
---~ end
if not lookup or lookup == "" then
- lookup = define.defaultlookup
+ lookup = definers.defaultlookup
end
local t = {
lookup = lookup, -- forced type
@@ -146,10 +155,10 @@ function define.makespecification(specification, lookup, name, sub, method, deta
return t
end
-function define.analyze(specification, size)
+function definers.analyze(specification, size)
-- can be optimized with locals
- local lookup, name, sub, method, detail = define.get_specification(specification or "")
- return define.makespecification(specification, lookup, name, sub, method, detail, size)
+ local lookup, name, sub, method, detail = getspecification(specification or "")
+ return definers.makespecification(specification, lookup, name, sub, method, detail, size)
end
--[[ldx--
@@ -158,17 +167,18 @@ end
local sortedhashkeys = table.sortedhashkeys
-function tfm.hash_features(specification)
+function tfm.hashfeatures(specification)
local features = specification.features
if features then
- local t = { }
+ local t, tn = { }, 0
local normal = features.normal
if normal and next(normal) then
local f = sortedhashkeys(normal)
for i=1,#f do
local v = f[i]
if v ~= "number" and v ~= "features" then -- i need to figure this out, features
- t[#t+1] = v .. '=' .. tostring(normal[v])
+ tn = tn + 1
+ t[tn] = v .. '=' .. tostring(normal[v])
end
end
end
@@ -177,20 +187,22 @@ function tfm.hash_features(specification)
local f = sortedhashkeys(vtf)
for i=1,#f do
local v = f[i]
- t[#t+1] = v .. '=' .. tostring(vtf[v])
+ tn = tn + 1
+ t[tn] = v .. '=' .. tostring(vtf[v])
end
end
---~ if specification.mathsize then
---~ t[#t+1] = "mathsize=" .. specification.mathsize
---~ end
- if #t > 0 then
+ --~ if specification.mathsize then
+ --~ tn = tn + 1
+ --~ t[tn] = "mathsize=" .. specification.mathsize
+ --~ end
+ if tn > 0 then
return concat(t,"+")
end
end
return "unknown"
end
-fonts.designsizes = { }
+fonts.designsizes = allocate()
--[[ldx--
<p>In principle we can share tfm tables when we are in node for a font, but then
@@ -200,14 +212,14 @@ when we get rid of base mode we can optimize even further by sharing, but then w
loose our testcases for <l n='luatex'/>.</p>
--ldx]]--
-function tfm.hash_instance(specification,force)
+function tfm.hashinstance(specification,force)
local hash, size, fallbacks = specification.hash, specification.size, specification.fallbacks
if force or not hash then
- hash = tfm.hash_features(specification)
+ hash = tfm.hashfeatures(specification)
specification.hash = hash
end
if size < 1000 and fonts.designsizes[hash] then
- size = math.round(tfm.scaled(size, fonts.designsizes[hash]))
+ size = math.round(tfm.scaled(size,fonts.designsizes[hash]))
specification.size = size
end
--~ local mathsize = specification.mathsize or 0
@@ -231,11 +243,12 @@ end
<p>We can resolve the filename using the next function:</p>
--ldx]]--
-define.resolvers = resolvers
+definers.resolvers = definers.resolvers or { }
+local resolvers = definers.resolvers
-- todo: reporter
-function define.resolvers.file(specification)
+function resolvers.file(specification)
local suffix = file.suffix(specification.name)
if fonts.formats[suffix] then
specification.forced = suffix
@@ -243,7 +256,7 @@ function define.resolvers.file(specification)
end
end
-function define.resolvers.name(specification)
+function resolvers.name(specification)
local resolve = fonts.names.resolve
if resolve then
local resolved, sub = fonts.names.resolve(specification)
@@ -258,11 +271,11 @@ function define.resolvers.name(specification)
end
end
else
- define.resolvers.file(specification)
+ resolvers.file(specification)
end
end
-function define.resolvers.spec(specification)
+function resolvers.spec(specification)
local resolvespec = fonts.names.resolvespec
if resolvespec then
specification.resolved, specification.sub = fonts.names.resolvespec(specification)
@@ -271,13 +284,13 @@ function define.resolvers.spec(specification)
specification.name = file.removesuffix(specification.resolved)
end
else
- define.resolvers.name(specification)
+ resolvers.name(specification)
end
end
-function define.resolve(specification)
+function definers.resolve(specification)
if not specification.resolved or specification.resolved == "" then -- resolved itself not per se in mapping hash
- local r = define.resolvers[specification.lookup]
+ local r = resolvers[specification.lookup]
if r then
r(specification)
end
@@ -287,7 +300,16 @@ function define.resolve(specification)
else
specification.forced = specification.forced
end
- specification.hash = lower(specification.name .. ' @ ' .. tfm.hash_features(specification))
+ -- for the moment here (goodies set outside features)
+ local goodies = specification.goodies
+ if goodies and goodies ~= "" then
+ local normalgoodies = specification.features.normal.goodies
+ if not normalgoodies or normalgoodies == "" then
+ specification.features.normal.goodies = goodies
+ end
+ end
+ --
+ specification.hash = lower(specification.name .. ' @ ' .. tfm.hashfeatures(specification))
if specification.sub and specification.sub ~= "" then
specification.hash = specification.sub .. ' @ ' .. specification.hash
end
@@ -311,21 +333,21 @@ specification yet.</p>
--ldx]]--
function tfm.read(specification)
- local hash = tfm.hash_instance(specification)
+ local hash = tfm.hashinstance(specification)
local tfmtable = tfm.fonts[hash] -- hashes by size !
if not tfmtable then
local forced = specification.forced or ""
if forced ~= "" then
tfmtable = readers[lower(forced)](specification)
if not tfmtable then
- logs.report("define font","forced type %s of %s not found",forced,specification.name)
+ report_define("forced type %s of %s not found",forced,specification.name)
end
else
for s=1,#sequence do -- reader sequence
local reader = sequence[s]
if readers[reader] then -- not really needed
if trace_defining then
- logs.report("define font","trying (reader sequence driven) type %s for %s with file %s",reader,specification.name,specification.filename or "unknown")
+ report_define("trying (reader sequence driven) type %s for %s with file %s",reader,specification.name,specification.filename or "unknown")
end
tfmtable = readers[reader](specification)
if tfmtable then
@@ -350,7 +372,7 @@ function tfm.read(specification)
end
end
if not tfmtable then
- logs.report("define font","font with name %s is not found",specification.name)
+ report_define("font with name %s is not found",specification.name)
end
return tfmtable
end
@@ -359,22 +381,22 @@ end
<p>For virtual fonts we need a slightly different approach:</p>
--ldx]]--
-function tfm.read_and_define(name,size) -- no id
- local specification = define.analyze(name,size)
+function tfm.readanddefine(name,size) -- no id
+ local specification = definers.analyze(name,size)
local method = specification.method
- if method and define.specify[method] then
- specification = define.specify[method](specification)
+ if method and variants[method] then
+ specification = variants[method](specification)
end
- specification = define.resolve(specification)
- local hash = tfm.hash_instance(specification)
- local id = define.registered(hash)
+ specification = definers.resolve(specification)
+ local hash = tfm.hashinstance(specification)
+ local id = definers.registered(hash)
if not id then
local fontdata = tfm.read(specification)
if fontdata then
fontdata.hash = hash
id = font.define(fontdata)
- define.register(fontdata,id)
- tfm.cleanup_table(fontdata)
+ definers.register(fontdata,id)
+ tfm.cleanuptable(fontdata)
else
id = 0 -- signal
end
@@ -390,9 +412,12 @@ evolved. Each one has its own way of dealing with its format.</p>
local function check_tfm(specification,fullname)
-- ofm directive blocks local path search unless set; btw, in context we
-- don't support ofm files anyway as this format is obsolete
- local foundname = resolvers.findbinfile(fullname, 'tfm') or "" -- just to be sure
+ local foundname = findbinfile(fullname, 'tfm') or "" -- just to be sure
+ if foundname == "" then
+ foundname = findbinfile(fullname, 'ofm') or "" -- bonus for usage outside context
+ end
if foundname == "" then
- foundname = resolvers.findbinfile(fullname, 'ofm') or "" -- bonus for usage outside context
+ foundname = fonts.names.getfilename(fullname,"tfm")
end
if foundname ~= "" then
specification.filename, specification.format = foundname, "ofm"
@@ -401,16 +426,18 @@ local function check_tfm(specification,fullname)
end
local function check_afm(specification,fullname)
- local foundname = resolvers.findbinfile(fullname, 'afm') or "" -- just to be sure
- if foundname == "" and tfm.auto_afm then
+ local foundname = findbinfile(fullname, 'afm') or "" -- just to be sure
+ if foundname == "" then
+ foundname = fonts.names.getfilename(fullname,"afm")
+ end
+ if foundname == "" and tfm.autoprefixedafm then
local encoding, shortname = match(fullname,"^(.-)%-(.*)$") -- context: encoding-name.*
if encoding and shortname and fonts.enc.known[encoding] then
- shortname = resolvers.findbinfile(shortname,'afm') or "" -- just to be sure
+ shortname = findbinfile(shortname,'afm') or "" -- just to be sure
if shortname ~= "" then
foundname = shortname
- -- tfm.set_normal_feature(specification,'encoding',encoding) -- will go away
if trace_loading then
- logs.report("load afm","stripping encoding prefix from filename %s",afmname)
+ report_afm("stripping encoding prefix from filename %s",afmname)
end
end
end
@@ -445,7 +472,7 @@ function readers.afm(specification,method)
tfmtable = check_afm(specification,specification.name .. "." .. forced)
end
if not tfmtable then
- method = method or define.method or "afm or tfm"
+ method = method or definers.method or "afm or tfm"
if method == "tfm" then
tfmtable = check_tfm(specification,specification.name)
elseif method == "afm" then
@@ -469,22 +496,27 @@ local function check_otf(forced,specification,suffix,what)
if forced then
name = file.addsuffix(name,suffix,true)
end
- local fullname, tfmtable = resolvers.findbinfile(name,suffix) or "", nil -- one shot
- if fullname == "" then
- local fb = fonts.names.old_to_new[name]
- if fb then
- fullname = resolvers.findbinfile(fb,suffix) or ""
- end
- end
+ local fullname, tfmtable = findbinfile(name,suffix) or "", nil -- one shot
+ -- if false then -- can be enabled again when needed
+ -- if fullname == "" then
+ -- local fb = fonts.names.old_to_new[name]
+ -- if fb then
+ -- fullname = findbinfile(fb,suffix) or ""
+ -- end
+ -- end
+ -- if fullname == "" then
+ -- local fb = fonts.names.new_to_old[name]
+ -- if fb then
+ -- fullname = findbinfile(fb,suffix) or ""
+ -- end
+ -- end
+ -- end
if fullname == "" then
- local fb = fonts.names.new_to_old[name]
- if fb then
- fullname = resolvers.findbinfile(fb,suffix) or ""
- end
+ fullname = fonts.names.getfilename(name,suffix)
end
if fullname ~= "" then
specification.filename, specification.format = fullname, what -- hm, so we do set the filename, then
- tfmtable = tfm.read_from_open_type(specification) -- we need to do it for all matches / todo
+ tfmtable = tfm.read_from_otf(specification) -- we need to do it for all matches / todo
end
return tfmtable
end
@@ -510,7 +542,7 @@ function readers.dfont(specification) return readers.opentype(specification,"ttf
a helper function.</p>
--ldx]]--
-function define.check(features,defaults) -- nb adapts features !
+function definers.check(features,defaults) -- nb adapts features !
local done = false
if features and next(features) then
for k,v in next, defaults do
@@ -525,7 +557,7 @@ function define.check(features,defaults) -- nb adapts features !
end
--[[ldx--
-<p>So far the specifyers. Now comes the real definer. Here we cache
+<p>So far the specifiers. Now comes the real definer. Here we cache
based on id's. Here we also intercept the virtual font handler. Since
it evolved stepwise I may rewrite this bit (combine code).</p>
@@ -536,25 +568,29 @@ not gain much. By the way, passing id's back to in the callback was
introduced later in the development.</p>
--ldx]]--
-define.last = nil
+local lastdefined = nil -- we don't want this one to end up in s-tra-02
-function define.register(fontdata,id)
+function definers.current() -- or maybe current
+ return lastdefined
+end
+
+function definers.register(fontdata,id)
if fontdata and id then
local hash = fontdata.hash
if not tfm.internalized[hash] then
if trace_defining then
- logs.report("define font","loading at 2 id %s, hash: %s",id or "?",hash or "?")
+ report_define("loading at 2 id %s, hash: %s",id or "?",hash or "?")
end
fonts.identifiers[id] = fontdata
fonts.characters [id] = fontdata.characters
- fonts.quads [id] = fontdata.parameters.quad
+ fonts.quads [id] = fontdata.parameters and fontdata.parameters.quad
-- todo: extra functions, e.g. setdigitwidth etc in list
tfm.internalized[hash] = id
end
end
end
-function define.registered(hash)
+function definers.registered(hash)
local id = tfm.internalized[hash]
return id, id and fonts.ids[id]
end
@@ -569,7 +605,7 @@ function tfm.make(specification)
-- however, when virtual tricks are used as feature (makes more
-- sense) we scale the commands in fonts.tfm.scale (and set the
-- factor there)
- local fvm = define.methods[specification.features.vtf.preset]
+ local fvm = definers.methods.variants[specification.features.vtf.preset]
if fvm then
return fvm(specification)
else
@@ -577,28 +613,28 @@ function tfm.make(specification)
end
end
-function define.read(specification,size,id) -- id can be optional, name can already be table
+function definers.read(specification,size,id) -- id can be optional, name can already be table
statistics.starttiming(fonts)
if type(specification) == "string" then
- specification = define.analyze(specification,size)
+ specification = definers.analyze(specification,size)
end
local method = specification.method
- if method and define.specify[method] then
- specification = define.specify[method](specification)
+ if method and variants[method] then
+ specification = variants[method](specification)
end
- specification = define.resolve(specification)
- local hash = tfm.hash_instance(specification)
+ specification = definers.resolve(specification)
+ local hash = tfm.hashinstance(specification)
if cache_them then
local fontdata = containers.read(fonts.cache,hash) -- for tracing purposes
end
- local fontdata = define.registered(hash) -- id
+ local fontdata = definers.registered(hash) -- id
if not fontdata then
if specification.features.vtf and specification.features.vtf.preset then
fontdata = tfm.make(specification)
else
fontdata = tfm.read(specification)
if fontdata then
- tfm.check_virtual_id(fontdata)
+ tfm.checkvirtualid(fontdata)
end
end
if cache_them then
@@ -608,15 +644,15 @@ function define.read(specification,size,id) -- id can be optional, name can alre
fontdata.hash = hash
fontdata.cache = "no"
if id then
- define.register(fontdata,id)
+ definers.register(fontdata,id)
end
end
end
- define.last = fontdata or id -- todo ! ! ! ! !
- if not fontdata then
- logs.report("define font", "unknown font %s, loading aborted",specification.name)
+ lastdefined = fontdata or id -- todo ! ! ! ! !
+ if not fontdata then -- or id?
+ report_define( "unknown font %s, loading aborted",specification.name)
elseif trace_defining and type(fontdata) == "table" then
- logs.report("define font","using %s font with id %s, name:%s size:%s bytes:%s encoding:%s fullname:%s filename:%s",
+ report_define("using %s font with id %s, name:%s size:%s bytes:%s encoding:%s fullname:%s filename:%s",
fontdata.type or "unknown",
id or "?",
fontdata.name or "?",
@@ -625,7 +661,10 @@ function define.read(specification,size,id) -- id can be optional, name can alre
fontdata.encodingname or "unicode",
fontdata.fullname or "?",
file.basename(fontdata.filename or "?"))
-
+ end
+ local cs = specification.cs
+ if cs then
+ fontcsnames[cs] = fontdata -- new (beware: locals can be forgotten)
end
statistics.stoptiming(fonts)
return fontdata
@@ -633,24 +672,24 @@ end
function vf.find(name)
name = file.removesuffix(file.basename(name))
- if tfm.resolve_vf then
+ if tfm.resolvevirtualtoo then
local format = fonts.logger.format(name)
if format == 'tfm' or format == 'ofm' then
if trace_defining then
- logs.report("define font","locating vf for %s",name)
+ report_define("locating vf for %s",name)
end
- return resolvers.findbinfile(name,"ovf")
+ return findbinfile(name,"ovf")
else
if trace_defining then
- logs.report("define font","vf for %s is already taken care of",name)
+ report_define("vf for %s is already taken care of",name)
end
return nil -- ""
end
else
if trace_defining then
- logs.report("define font","locating vf for %s",name)
+ report_define("locating vf for %s",name)
end
- return resolvers.findbinfile(name,"ovf")
+ return findbinfile(name,"ovf")
end
end
@@ -658,5 +697,5 @@ end
<p>We overload both the <l n='tfm'/> and <l n='vf'/> readers.</p>
--ldx]]--
-callbacks.register('define_font' , define.read, "definition of fonts (tfmtable preparation)")
+callbacks.register('define_font' , definers.read, "definition of fonts (tfmtable preparation)")
callbacks.register('find_vf_file', vf.find , "locating virtual fonts, insofar needed") -- not that relevant any more
diff --git a/otfl-font-dum.lua b/otfl-font-dum.lua
index c9ffb63..14d155a 100644
--- a/otfl-font-dum.lua
+++ b/otfl-font-dum.lua
@@ -10,9 +10,9 @@ fonts = fonts or { }
-- general
-fonts.otf.pack = false
-fonts.tfm.resolve_vf = false -- no sure about this
-fonts.tfm.fontname_mode = "specification" -- somehow latex needs this
+fonts.otf.pack = false -- only makes sense in context
+fonts.tfm.resolvevirtualtoo = false -- context specific (du eto resolver)
+fonts.tfm.fontnamemode = "specification" -- somehow latex needs this (changed name!)
-- readers
@@ -22,16 +22,17 @@ fonts.tfm.readers.afm = nil
-- define
-fonts.define = fonts.define or { }
+fonts.definers = fonts.definers or { }
+fonts.definers.specifiers = fonts.definers.specifiers or { }
---~ fonts.define.method = "tfm"
+fonts.definers.specifiers.colonizedpreference = "name" -- is "file" in context
-fonts.define.specify.colonized_default_lookup = "name"
-
-function fonts.define.get_specification(str)
+function fonts.definers.getspecification(str)
return "", str, "", ":", str
end
+fonts.definers.registersplit("",fonts.definers.specifiers.variants[":"]) -- we add another one for catching lone [names]
+
-- logger
fonts.logger = fonts.logger or { }
@@ -63,7 +64,7 @@ function fonts.names.resolve(name,sub)
if basename and basename ~= "" then
for i=1,#fileformats do
local format = fileformats[i]
- local foundname = resolvers.find_file(basename,format) or ""
+ local foundname = resolvers.findfile(basename,format) or ""
if foundname ~= "" then
data = dofile(foundname)
break
@@ -90,6 +91,10 @@ end
fonts.names.resolvespec = fonts.names.resolve -- only supported in mkiv
+function fonts.names.getfilename(askedname,suffix) -- only supported in mkiv
+ return ""
+end
+
-- For the moment we put this (adapted) pseudo feature here.
table.insert(fonts.triggers,"itlc")
@@ -158,119 +163,20 @@ fonts.protrusions.setups = fonts.protrusions.setups or { }
local setups = fonts.protrusions.setups
--- As this is experimental code, users should not depend on it. The
--- implications are still discussed on the ConTeXt Dev List and we're
--- not sure yet what exactly the spec is (the next code is tested with
--- a gyre font patched by / fea file made by Khaled Hosny). The double
--- trick should not be needed it proper hanging punctuation is used in
--- which case values < 1 can be used.
---
--- preferred (in context, usine vectors):
---
--- \definefontfeature[whatever][default][mode=node,protrusion=quality]
---
--- using lfbd and rtbd, with possibibility to enable only one side :
---
--- \definefontfeature[whocares][default][mode=node,protrusion=yes, opbd=yes,script=latn]
--- \definefontfeature[whocares][default][mode=node,protrusion=right,opbd=yes,script=latn]
---
--- idem, using multiplier
---
--- \definefontfeature[whocares][default][mode=node,protrusion=2,opbd=yes,script=latn]
--- \definefontfeature[whocares][default][mode=node,protrusion=double,opbd=yes,script=latn]
---
--- idem, using named feature file (less frozen):
---
--- \definefontfeature[whocares][default][mode=node,protrusion=2,opbd=yes,script=latn,featurefile=texgyrepagella-regularxx.fea]
-
-local function map_opbd_onto_protrusion(tfmdata,value,opbd)
- local characters, descriptions = tfmdata.characters, tfmdata.descriptions
- local otfdata = tfmdata.shared.otfdata
- local singles = otfdata.shared.featuredata.gpos_single
- local script, language = tfmdata.script, tfmdata.language
- local done, factor, left, right = false, 1, 1, 1
- local setup = setups[value]
- if setup then
- factor = setup.factor or 1
- left = setup.left or 1
- right = setup.right or 1
- else
- factor = tonumber(value) or 1
- end
- if opbd ~= "right" then
- local validlookups, lookuplist = fonts.otf.collect_lookups(otfdata,"lfbd",script,language)
- if validlookups then
- for i=1,#lookuplist do
- local lookup = lookuplist[i]
- local data = singles[lookup]
- if data then
- if trace_protrusion then
- logs.report("fonts","set left protrusion using lfbd lookup '%s'",lookup)
- end
- for k, v in next, data do
- -- local p = - v[3] / descriptions[k].width-- or 1 ~= 0 too but the same
- local p = - (v[1] / 1000) * factor * left
- characters[k].left_protruding = p
- if trace_protrusion then
- logs.report("opbd","lfbd -> %s -> 0x%05X (%s) -> %0.03f (%s)",lookup,k,utfchar(k),p,concat(v," "))
- end
- end
- done = true
- end
- end
- end
- end
- if opbd ~= "left" then
- local validlookups, lookuplist = fonts.otf.collect_lookups(otfdata,"rtbd",script,language)
- if validlookups then
- for i=1,#lookuplist do
- local lookup = lookuplist[i]
- local data = singles[lookup]
- if data then
- if trace_protrusion then
- logs.report("fonts","set right protrusion using rtbd lookup '%s'",lookup)
- end
- for k, v in next, data do
- -- local p = v[3] / descriptions[k].width -- or 3
- local p = (v[1] / 1000) * factor * right
- characters[k].right_protruding = p
- if trace_protrusion then
- logs.report("opbd","rtbd -> %s -> 0x%05X (%s) -> %0.03f (%s)",lookup,k,utfchar(k),p,concat(v," "))
- end
- end
- end
- done = true
- end
- end
- end
- tfmdata.auto_protrude = done
-end
-
--- The opbd test is just there because it was discussed on the
--- context development list. However, the mentioned fxlbi.otf font
--- only has some kerns for digits. So, consider this feature not
--- supported till we have a proper test font.
-
function fonts.initializers.common.protrusion(tfmdata,value)
if value then
- local opbd = tfmdata.shared.features.opbd
- if opbd then
- -- possible values: left right both yes no (experimental)
- map_opbd_onto_protrusion(tfmdata,value,opbd)
- elseif value then
- local setup = setups[value]
- if setup then
- local factor, left, right = setup.factor or 1, setup.left or 1, setup.right or 1
- local emwidth = tfmdata.parameters.quad
- tfmdata.auto_protrude = true
- for i, chr in next, tfmdata.characters do
- local v, pl, pr = setup[i], nil, nil
- if v then
- pl, pr = v[1], v[2]
- end
- if pl and pl ~= 0 then chr.left_protruding = left *pl*factor end
- if pr and pr ~= 0 then chr.right_protruding = right*pr*factor end
+ local setup = setups[value]
+ if setup then
+ local factor, left, right = setup.factor or 1, setup.left or 1, setup.right or 1
+ local emwidth = tfmdata.parameters.quad
+ tfmdata.auto_protrude = true
+ for i, chr in next, tfmdata.characters do
+ local v, pl, pr = setup[i], nil, nil
+ if v then
+ pl, pr = v[1], v[2]
end
+ if pl and pl ~= 0 then chr.left_protruding = left *pl*factor end
+ if pr and pr ~= 0 then chr.right_protruding = right*pr*factor end
end
end
end
@@ -309,7 +215,7 @@ fonts.initializers.node.otf.expansion = fonts.initializers.common.expansion
-- left over
-function fonts.register_message()
+function fonts.registermessage()
end
-- example vectors
@@ -360,9 +266,15 @@ fonts.otf.meanings.normalize = fonts.otf.meanings.normalize or function(t)
end
end
+-- needed (different in context)
+
+function fonts.otf.scriptandlanguage(tfmdata)
+ return tfmdata.script, tfmdata.language
+end
+
-- bonus
-function fonts.otf.name_to_slot(name)
+function fonts.otf.nametoslot(name)
local tfmdata = fonts.ids[font.current()]
if tfmdata and tfmdata.shared then
local otfdata = tfmdata.shared.otfdata
@@ -373,7 +285,7 @@ end
function fonts.otf.char(n)
if type(n) == "string" then
- n = fonts.otf.name_to_slot(n)
+ n = fonts.otf.nametoslot(n)
end
if type(n) == "number" then
tex.sprint("\\char" .. n)
diff --git a/otfl-font-ini.lua b/otfl-font-ini.lua
index c695ec4..6082c1d 100644
--- a/otfl-font-ini.lua
+++ b/otfl-font-ini.lua
@@ -14,24 +14,33 @@ local utf = unicode.utf8
local format, serialize = string.format, table.serialize
local write_nl = texio.write_nl
local lower = string.lower
+local allocate, mark = utilities.storage.allocate, utilities.storage.mark
-if not fontloader then fontloader = fontforge end
+local report_define = logs.new("define fonts")
fontloader.totable = fontloader.to_table
-- vtf comes first
-- fix comes last
-fonts = fonts or { }
+fonts = fonts or { }
-fonts.ids = fonts.ids or { } fonts.identifiers = fonts.ids -- aka fontdata
-fonts.chr = fonts.chr or { } fonts.characters = fonts.chr -- aka chardata
-fonts.qua = fonts.qua or { } fonts.quads = fonts.qua -- aka quaddata
+-- we will also have des and fam hashes
+
+-- beware, soem alreadyu defined
+
+fonts.ids = mark(fonts.ids or { }) fonts.identifiers = fonts.ids -- aka fontdata
+fonts.chr = mark(fonts.chr or { }) fonts.characters = fonts.chr -- aka chardata
+fonts.qua = mark(fonts.qua or { }) fonts.quads = fonts.qua -- aka quaddata
+fonts.css = mark(fonts.css or { }) fonts.csnames = fonts.css -- aka namedata
fonts.tfm = fonts.tfm or { }
+fonts.vf = fonts.vf or { }
+fonts.afm = fonts.afm or { }
+fonts.pfb = fonts.pfb or { }
+fonts.otf = fonts.otf or { }
-fonts.mode = 'base'
-fonts.private = 0xF0000 -- 0x10FFFF
+fonts.privateoffset = 0xF0000 -- 0x10FFFF
fonts.verbose = false -- more verbose cache tables
fonts.ids[0] = { -- nullfont
@@ -62,18 +71,28 @@ fonts.triggers = fonts.triggers or {
fonts.processors = fonts.processors or {
}
+fonts.analyzers = fonts.analyzers or {
+ useunicodemarks = false,
+}
+
fonts.manipulators = fonts.manipulators or {
}
-fonts.define = fonts.define or { }
-fonts.define.specify = fonts.define.specify or { }
-fonts.define.specify.synonyms = fonts.define.specify.synonyms or { }
+fonts.tracers = fonts.tracers or {
+}
+
+fonts.typefaces = fonts.typefaces or {
+}
+
+fonts.definers = fonts.definers or { }
+fonts.definers.specifiers = fonts.definers.specifiers or { }
+fonts.definers.specifiers.synonyms = fonts.definers.specifiers.synonyms or { }
-- tracing
-if not fonts.color then
+if not fonts.colors then
- fonts.color = {
+ fonts.colors = allocate {
set = function() end,
reset = function() end,
}
@@ -82,7 +101,7 @@ end
-- format identification
-fonts.formats = { }
+fonts.formats = allocate()
function fonts.fontformat(filename,default)
local extname = lower(file.extname(filename))
@@ -90,7 +109,7 @@ function fonts.fontformat(filename,default)
if format then
return format
else
- logs.report("fonts define","unable to determine font format for '%s'",filename)
+ report_define("unable to determine font format for '%s'",filename)
return default
end
end
diff --git a/otfl-font-map.lua b/otfl-font-map.lua
index 2995087..b206632 100644
--- a/otfl-font-map.lua
+++ b/otfl-font-map.lua
@@ -14,7 +14,7 @@ local utfbyte = utf.byte
local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
local trace_unimapping = false trackers.register("otf.unimapping", function(v) trace_unimapping = v end)
-local ctxcatcodes = tex and tex.ctxcatcodes
+local report_otf = logs.new("load otf")
--[[ldx--
<p>Eventually this code will disappear because map files are kind
@@ -22,50 +22,51 @@ of obsolete. Some code may move to runtime or auxiliary modules.</p>
<p>The name to unciode related code will stay of course.</p>
--ldx]]--
-fonts = fonts or { }
-fonts.map = fonts.map or { }
+local fonts = fonts
+fonts.map = fonts.map or { }
-local function load_lum_table(filename) -- will move to font goodies
+local function loadlumtable(filename) -- will move to font goodies
local lumname = file.replacesuffix(file.basename(filename),"lum")
- local lumfile = resolvers.find_file(lumname,"map") or ""
+ local lumfile = resolvers.findfile(lumname,"map") or ""
if lumfile ~= "" and lfs.isfile(lumfile) then
if trace_loading or trace_unimapping then
- logs.report("load otf","enhance: loading %s ",lumfile)
+ report_otf("enhance: loading %s ",lumfile)
end
lumunic = dofile(lumfile)
return lumunic, lumfile
end
end
-local hex = lpeg.R("AF","09")
+local P, R, S, C, Ct, Cc = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cc
+
+local hex = R("AF","09")
local hexfour = (hex*hex*hex*hex) / function(s) return tonumber(s,16) end
local hexsix = (hex^1) / function(s) return tonumber(s,16) end
-local dec = (lpeg.R("09")^1) / tonumber
-local period = lpeg.P(".")
-
-local unicode = lpeg.P("uni") * (hexfour * (period + lpeg.P(-1)) * lpeg.Cc(false) + lpeg.Ct(hexfour^1) * lpeg.Cc(true))
-local ucode = lpeg.P("u") * (hexsix * (period + lpeg.P(-1)) * lpeg.Cc(false) + lpeg.Ct(hexsix ^1) * lpeg.Cc(true))
-local index = lpeg.P("index") * dec * lpeg.Cc(false)
+local dec = (R("09")^1) / tonumber
+local period = P(".")
+local unicode = P("uni") * (hexfour * (period + P(-1)) * Cc(false) + Ct(hexfour^1) * Cc(true))
+local ucode = P("u") * (hexsix * (period + P(-1)) * Cc(false) + Ct(hexsix ^1) * Cc(true))
+local index = P("index") * dec * Cc(false)
local parser = unicode + ucode + index
local parsers = { }
-local function make_name_parser(str)
+local function makenameparser(str)
if not str or str == "" then
return parser
else
local p = parsers[str]
if not p then
- p = lpeg.P(str) * period * dec * lpeg.Cc(false)
+ p = P(str) * period * dec * Cc(false)
parsers[str] = p
end
return p
end
end
---~ local parser = fonts.map.make_name_parser("Japan1")
---~ local parser = fonts.map.make_name_parser()
+--~ local parser = fonts.map.makenameparser("Japan1")
+--~ local parser = fonts.map.makenameparser()
--~ local function test(str)
--~ local b, a = lpegmatch(parser,str)
--~ print((a and table.serialize(b)) or b)
@@ -119,14 +120,14 @@ end
--~ return s
--~ end
-fonts.map.load_lum_table = load_lum_table
-fonts.map.make_name_parser = make_name_parser
+fonts.map.loadlumtable = loadlumtable
+fonts.map.makenameparser = makenameparser
fonts.map.tounicode16 = tounicode16
fonts.map.tounicode16sequence = tounicode16sequence
-local separator = lpeg.S("_.")
-local other = lpeg.C((1 - separator)^1)
-local ligsplitter = lpeg.Ct(other * (separator * other)^0)
+local separator = S("_.")
+local other = C((1 - separator)^1)
+local ligsplitter = Ct(other * (separator * other)^0)
--~ print(table.serialize(lpegmatch(ligsplitter,"this")))
--~ print(table.serialize(lpegmatch(ligsplitter,"this.that")))
@@ -134,7 +135,7 @@ local ligsplitter = lpeg.Ct(other * (separator * other)^0)
--~ print(table.serialize(lpegmatch(ligsplitter,"such_so_more")))
--~ print(table.serialize(lpegmatch(ligsplitter,"such_so_more.that")))
-fonts.map.add_to_unicode = function(data,filename)
+fonts.map.addtounicode = function(data,filename)
local unicodes = data.luatex and data.luatex.unicodes
if not unicodes then
return
@@ -145,11 +146,11 @@ fonts.map.add_to_unicode = function(data,filename)
unicodes['zwj'] = unicodes['zwj'] or 0x200D
unicodes['zwnj'] = unicodes['zwnj'] or 0x200C
-- the tounicode mapping is sparse and only needed for alternatives
- local tounicode, originals, ns, nl, private, unknown = { }, { }, 0, 0, fonts.private, format("%04X",utfbyte("?"))
+ local tounicode, originals, ns, nl, private, unknown = { }, { }, 0, 0, fonts.privateoffset, format("%04X",utfbyte("?"))
data.luatex.tounicode, data.luatex.originals = tounicode, originals
local lumunic, uparser, oparser
if false then -- will become an option
- lumunic = load_lum_table(filename)
+ lumunic = loadlumtable(filename)
lumunic = lumunic and lumunic.tounicode
end
local cidinfo, cidnames, cidcodes = data.cidinfo
@@ -157,12 +158,12 @@ fonts.map.add_to_unicode = function(data,filename)
usedmap = usedmap and lower(usedmap)
usedmap = usedmap and fonts.cid.map[usedmap]
if usedmap then
- oparser = usedmap and make_name_parser(cidinfo.ordering)
+ oparser = usedmap and makenameparser(cidinfo.ordering)
cidnames = usedmap.names
cidcodes = usedmap.unicodes
end
- uparser = make_name_parser()
- local aglmap = fonts.map and fonts.map.agl_to_unicode
+ uparser = makenameparser()
+ local aglmap = fonts.enc and fonts.enc.agl -- to name
for index, glyph in next, data.glyphs do
local name, unic = glyph.name, glyph.unicode or -1 -- play safe
if unic == -1 or unic >= private or (unic >= 0xE000 and unic <= 0xF8FF) or unic == 0xFFFE or unic == 0xFFFF then
@@ -218,19 +219,25 @@ fonts.map.add_to_unicode = function(data,filename)
originals[index], tounicode[index], ns = unicode, tounicode16(unicode), ns + 1
end
else
- local t = { }
+ local t, n = { }, 0
for l=1,nplit do
local base = split[l]
local u = unicodes[base] or (aglmap and aglmap[base])
if not u then
break
elseif type(u) == "table" then
- t[#t+1] = u[1]
+ n = n + 1
+ t[n] = u[1]
else
- t[#t+1] = u
+ n = n + 1
+ t[n] = u
end
end
- if #t > 0 then -- done then
+ if n == 0 then -- done then
+ -- nothing
+ elseif n == 1 then
+ originals[index], tounicode[index], nl, unicode = t[1], tounicode16(t[1]), nl + 1, true
+ else
originals[index], tounicode[index], nl, unicode = t, tounicode16sequence(t), nl + 1, true
end
end
@@ -255,116 +262,13 @@ fonts.map.add_to_unicode = function(data,filename)
for index, glyph in table.sortedhash(data.glyphs) do
local toun, name, unic = tounicode[index], glyph.name, glyph.unicode or -1 -- play safe
if toun then
- logs.report("load otf","internal: 0x%05X, name: %s, unicode: 0x%05X, tounicode: %s",index,name,unic,toun)
+ report_otf("internal: 0x%05X, name: %s, unicode: 0x%05X, tounicode: %s",index,name,unic,toun)
else
- logs.report("load otf","internal: 0x%05X, name: %s, unicode: 0x%05X",index,name,unic)
+ report_otf("internal: 0x%05X, name: %s, unicode: 0x%05X",index,name,unic)
end
end
end
if trace_loading and (ns > 0 or nl > 0) then
- logs.report("load otf","enhance: %s tounicode entries added (%s ligatures)",nl+ns, ns)
+ report_otf("enhance: %s tounicode entries added (%s ligatures)",nl+ns, ns)
end
end
-
--- the following is sort of obsolete
---
--- fonts.map.data = fonts.map.data or { }
--- fonts.map.encodings = fonts.map.encodings or { }
--- fonts.map.loaded = fonts.map.loaded or { }
--- fonts.map.line = fonts.map.line or { }
---
--- function fonts.map.line.pdftex(e)
--- if e.name and e.fontfile then
--- local fullname = e.fullname or ""
--- if e.slant and e.slant ~= 0 then
--- if e.encoding then
--- pdf.mapline(format('= %s %s "%g SlantFont" <%s <%s',e.name,fullname,e.slant,e.encoding,e.fontfile)))
--- else
--- pdf.mapline(format('= %s %s "%g SlantFont" <%s',e.name,fullname,e.slant,e.fontfile)))
--- end
--- elseif e.extend and e.extend ~= 1 and e.extend ~= 0 then
--- if e.encoding then
--- pdf.mapline(format('= %s %s "%g ExtendFont" <%s <%s',e.name,fullname,e.extend,e.encoding,e.fontfile)))
--- else
--- pdf.mapline(format('= %s %s "%g ExtendFont" <%s',e.name,fullname,e.extend,e.fontfile)))
--- end
--- else
--- if e.encoding then
--- pdf.mapline(format('= %s %s <%s <%s',e.name,fullname,e.encoding,e.fontfile)))
--- else
--- pdf.mapline(format('= %s %s <%s',e.name,fullname,e.fontfile)))
--- end
--- end
--- else
--- return nil
--- end
--- end
---
--- function fonts.map.flush(backend) -- will also erase the accumulated data
--- local flushline = fonts.map.line[backend or "pdftex"] or fonts.map.line.pdftex
--- for _, e in next, fonts.map.data do
--- flushline(e)
--- end
--- fonts.map.data = { }
--- end
---
--- fonts.map.line.dvips = fonts.map.line.pdftex
--- fonts.map.line.dvipdfmx = function() end
---
--- function fonts.map.convert_entries(filename)
--- if not fonts.map.loaded[filename] then
--- fonts.map.data, fonts.map.encodings = fonts.map.load_file(filename,fonts.map.data, fonts.map.encodings)
--- fonts.map.loaded[filename] = true
--- end
--- end
---
--- function fonts.map.load_file(filename, entries, encodings)
--- entries = entries or { }
--- encodings = encodings or { }
--- local f = io.open(filename)
--- if f then
--- local data = f:read("*a")
--- if data then
--- for line in gmatch(data,"(.-)[\n\t]") do
--- if find(line,"^[%#%%%s]") then
--- -- print(line)
--- else
--- local extend, slant, name, fullname, fontfile, encoding
--- line = gsub(line,'"(.+)"', function(s)
--- extend = find(s,'"([^"]+) ExtendFont"')
--- slant = find(s,'"([^"]+) SlantFont"')
--- return ""
--- end)
--- if not name then
--- -- name fullname encoding fontfile
--- name, fullname, encoding, fontfile = match(line,"^(%S+)%s+(%S*)[%s<]+(%S*)[%s<]+(%S*)%s*$")
--- end
--- if not name then
--- -- name fullname (flag) fontfile encoding
--- name, fullname, fontfile, encoding = match(line,"^(%S+)%s+(%S*)[%d%s<]+(%S*)[%s<]+(%S*)%s*$")
--- end
--- if not name then
--- -- name fontfile
--- name, fontfile = match(line,"^(%S+)%s+[%d%s<]+(%S*)%s*$")
--- end
--- if name then
--- if encoding == "" then encoding = nil end
--- entries[name] = {
--- name = name, -- handy
--- fullname = fullname,
--- encoding = encoding,
--- fontfile = fontfile,
--- slant = tonumber(slant),
--- extend = tonumber(extend)
--- }
--- encodings[name] = encoding
--- elseif line ~= "" then
--- -- print(line)
--- end
--- end
--- end
--- end
--- f:close()
--- end
--- return entries, encodings
--- end
diff --git a/otfl-font-ota.lua b/otfl-font-ota.lua
index 0e5b555..18b0bf2 100644
--- a/otfl-font-ota.lua
+++ b/otfl-font-ota.lua
@@ -17,46 +17,46 @@ local trace_cjk = false trackers.register("cjk.injections", function(v) t
trackers.register("cjk.analyzing","otf.analyzing")
-fonts = fonts or { }
-fonts.analyzers = fonts.analyzers or { }
-fonts.analyzers.initializers = fonts.analyzers.initializers or { node = { otf = { } } }
-fonts.analyzers.methods = fonts.analyzers.methods or { node = { otf = { } } }
+local fonts, nodes = fonts, nodes
+local node = node
local otf = fonts.otf
local tfm = fonts.tfm
-local initializers = fonts.analyzers.initializers
-local methods = fonts.analyzers.methods
+fonts.analyzers = fonts.analyzers or { }
+local analyzers = fonts.analyzers
-local glyph = node.id('glyph')
-local glue = node.id('glue')
-local penalty = node.id('penalty')
+analyzers.initializers = analyzers.initializers or { node = { otf = { } } }
+analyzers.methods = analyzers.methods or { node = { otf = { } } }
+
+local initializers = analyzers.initializers
+local methods = analyzers.methods
+
+local nodecodes = nodes.nodecodes
+local glyph_code = nodecodes.glyph
local set_attribute = node.set_attribute
local has_attribute = node.has_attribute
local traverse_id = node.traverse_id
local traverse_node_list = node.traverse
-local fontdata = fonts.ids
-local state = attributes.private('state')
+local fontdata = fonts.ids
+local state = attributes.private('state')
+local categories = characters and characters.categories or { } -- sorry, only in context
-local fcs = (fonts.color and fonts.color.set) or function() end
-local fcr = (fonts.color and fonts.color.reset) or function() end
+local fontscolors = fonts.colors
+local fcs = (fontscolors and fontscolors.set) or function() end
+local fcr = (fontscolors and fontscolors.reset) or function() end
-local a_to_script = otf.a_to_script
-local a_to_language = otf.a_to_language
-- in the future we will use language/script attributes instead of the
-- font related value, but then we also need dynamic features which is
-- somewhat slower; and .. we need a chain of them
+local scriptandlanguage = otf.scriptandlanguage
+
function fonts.initializers.node.otf.analyze(tfmdata,value,attr)
- local script, language
- if attr and attr > 0 then
- script, language = a_to_script[attr], a_to_language[attr]
- else
- script, language = tfmdata.script, tfmdata.language
- end
+ local script, language = otf.scriptandlanguage(tfmdata,attr)
local action = initializers[script]
if action then
if type(action) == "function" then
@@ -73,12 +73,7 @@ end
function fonts.methods.node.otf.analyze(head,font,attr)
local tfmdata = fontdata[font]
- local script, language
- if attr and attr > 0 then
- script, language = a_to_script[attr], a_to_language[attr]
- else
- script, language = tfmdata.script, tfmdata.language
- end
+ local script, language = otf.scriptandlanguage(tfmdata,attr)
local action = methods[script]
if action then
if type(action) == "function" then
@@ -98,7 +93,7 @@ table.insert(fonts.triggers,"analyze") -- we need a proper function for doing t
-- latin
-fonts.analyzers.methods.latn = fonts.analyzers.aux.setstate
+analyzers.methods.latn = analyzers.aux.setstate
-- this info eventually will go into char-def
@@ -180,8 +175,8 @@ local function warning(current,what)
end
end
-function fonts.analyzers.methods.nocolor(head,font,attr)
- for n in traverse_node_list(head,glyph) do
+function analyzers.methods.nocolor(head,font,attr)
+ for n in traverse_id(glyph_code,head) do
if not font or n.font == font then
fcr(n)
end
@@ -230,15 +225,16 @@ local function finish(first,last)
return first, last
end
-function fonts.analyzers.methods.arab(head,font,attr) -- maybe make a special version with no trace
+function analyzers.methods.arab(head,font,attr) -- maybe make a special version with no trace
+ local useunicodemarks = analyzers.useunicodemarks
local tfmdata = fontdata[font]
local marks = tfmdata.marks
local first, last, current, done = nil, nil, head, false
while current do
- if current.id == glyph and current.subtype<256 and current.font == font and not has_attribute(current,state) then
+ if current.id == glyph_code and current.subtype<256 and current.font == font and not has_attribute(current,state) then
done = true
local char = current.char
- if marks[char] then
+ if marks[char] or (useunicodemarks and categories[char] == "mn") then
set_attribute(current,state,5) -- mark
if trace_analyzing then fcs(current,"font:mark") end
elseif isol[char] then -- can be zwj or zwnj too
diff --git a/otfl-font-otb.lua b/otfl-font-otb.lua
index e0528a4..241845f 100644
--- a/otfl-font-otb.lua
+++ b/otfl-font-otb.lua
@@ -11,8 +11,9 @@ local format, gmatch, gsub, find, match, lower, strip = string.format, string.gm
local type, next, tonumber, tostring = type, next, tonumber, tostring
local lpegmatch = lpeg.match
-local otf = fonts.otf
-local tfm = fonts.tfm
+local fonts = fonts
+local otf = fonts.otf
+local tfm = fonts.tfm
local trace_baseinit = false trackers.register("otf.baseinit", function(v) trace_baseinit = v end)
local trace_singles = false trackers.register("otf.singles", function(v) trace_singles = v end)
@@ -22,6 +23,8 @@ local trace_ligatures = false trackers.register("otf.ligatures", function
local trace_kerns = false trackers.register("otf.kerns", function(v) trace_kerns = v end)
local trace_preparing = false trackers.register("otf.preparing", function(v) trace_preparing = v end)
+local report_prepare = logs.new("otf prepare")
+
local wildcard = "*"
local default = "dflt"
@@ -41,8 +44,20 @@ local function gref(descriptions,n)
local num, nam = { }, { }
for i=1,#n do
local ni = n[i]
- num[i] = format("U+%04X",ni)
- nam[i] = descriptions[ni].name or "?"
+ -- ! ! ! could be a helper ! ! !
+ if type(ni) == "table" then
+ local nnum, nnam = { }, { }
+ for j=1,#ni do
+ local nj = ni[j]
+ nnum[j] = format("U+%04X",nj)
+ nnam[j] = descriptions[nj].name or "?"
+ end
+ num[i] = concat(nnum,"|")
+ nam[i] = concat(nnam,"|")
+ else
+ num[i] = format("U+%04X",ni)
+ nam[i] = descriptions[ni].name or "?"
+ end
end
return format("%s (%s)",concat(num," "), concat(nam," "))
else
@@ -76,7 +91,7 @@ local function resolve_ligatures(tfmdata,ligatures,kind)
local c, f, s = characters[uc], ligs[1], ligs[2]
local uft, ust = unicodes[f] or 0, unicodes[s] or 0
if not uft or not ust then
- logs.report("define otf","%s: unicode problem with base ligature %s = %s + %s",cref(kind),gref(descriptions,uc),gref(descriptions,uft),gref(descriptions,ust))
+ report_prepare("%s: unicode problem with base ligature %s = %s + %s",cref(kind),gref(descriptions,uc),gref(descriptions,uft),gref(descriptions,ust))
-- some kind of error
else
if type(uft) == "number" then uft = { uft } end
@@ -87,7 +102,7 @@ local function resolve_ligatures(tfmdata,ligatures,kind)
local us = ust[usi]
if changed[uf] or changed[us] then
if trace_baseinit and trace_ligatures then
- logs.report("define otf","%s: base ligature %s + %s ignored",cref(kind),gref(descriptions,uf),gref(descriptions,us))
+ report_prepare("%s: base ligature %s + %s ignored",cref(kind),gref(descriptions,uf),gref(descriptions,us))
end
else
local first, second = characters[uf], us
@@ -103,7 +118,7 @@ local function resolve_ligatures(tfmdata,ligatures,kind)
t[second] = { type = 0, char = uc[1] } -- can this still happen?
end
if trace_baseinit and trace_ligatures then
- logs.report("define otf","%s: base ligature %s + %s => %s",cref(kind),gref(descriptions,uf),gref(descriptions,us),gref(descriptions,uc))
+ report_prepare("%s: base ligature %s + %s => %s",cref(kind),gref(descriptions,uf),gref(descriptions,us),gref(descriptions,uc))
end
end
end
@@ -139,7 +154,7 @@ local splitter = lpeg.splitat(" ")
local function prepare_base_substitutions(tfmdata,kind,value) -- we can share some code with the node features
if value then
local otfdata = tfmdata.shared.otfdata
- local validlookups, lookuplist = otf.collect_lookups(otfdata,kind,tfmdata.script,tfmdata.language)
+ local validlookups, lookuplist = otf.collectlookups(otfdata,kind,tfmdata.script,tfmdata.language)
if validlookups then
local ligatures = { }
local unicodes = tfmdata.unicodes -- names to unicodes
@@ -154,12 +169,12 @@ local function prepare_base_substitutions(tfmdata,kind,value) -- we can share so
if pv then
local upv = unicodes[pv]
if upv then
- if type(upv) == "table" then
+ if type(upv) == "table" then -- zero change that table
upv = upv[1]
end
if characters[upv] then
if trace_baseinit and trace_singles then
- logs.report("define otf","%s: base substitution %s => %s",cref(kind,lookup),gref(descriptions,k),gref(descriptions,upv))
+ report_prepare("%s: base substitution %s => %s",cref(kind,lookup),gref(descriptions,k),gref(descriptions,upv))
end
changed[k] = upv
end
@@ -182,12 +197,12 @@ local function prepare_base_substitutions(tfmdata,kind,value) -- we can share so
if pc then
local upc = unicodes[pc]
if upc then
- if type(upc) == "table" then
+ if type(upc) == "table" then -- zero change that table
upc = upc[1]
end
if characters[upc] then
if trace_baseinit and trace_alternatives then
- logs.report("define otf","%s: base alternate %s %s => %s",cref(kind,lookup),tostring(value),gref(descriptions,k),gref(descriptions,upc))
+ report_prepare("%s: base alternate %s %s => %s",cref(kind,lookup),tostring(value),gref(descriptions,k),gref(descriptions,upc))
end
changed[k] = upc
end
@@ -202,7 +217,7 @@ local function prepare_base_substitutions(tfmdata,kind,value) -- we can share so
local upc = { lpegmatch(splitter,pc) }
for i=1,#upc do upc[i] = unicodes[upc[i]] end
-- we assume that it's no table
- logs.report("define otf","%s: base ligature %s => %s",cref(kind,lookup),gref(descriptions,upc),gref(descriptions,k))
+ report_prepare("%s: base ligature %s => %s",cref(kind,lookup),gref(descriptions,upc),gref(descriptions,k))
end
ligatures[#ligatures+1] = { pc, k }
end
@@ -248,10 +263,10 @@ local function prepare_base_substitutions(tfmdata,kind,value) -- we can share so
end
end
-local function prepare_base_kerns(tfmdata,kind,value) -- todo what kind of kerns, currently all
+local function preparebasekerns(tfmdata,kind,value) -- todo what kind of kerns, currently all
if value then
local otfdata = tfmdata.shared.otfdata
- local validlookups, lookuplist = otf.collect_lookups(otfdata,kind,tfmdata.script,tfmdata.language)
+ local validlookups, lookuplist = otf.collectlookups(otfdata,kind,tfmdata.script,tfmdata.language)
if validlookups then
local unicodes = tfmdata.unicodes -- names to unicodes
local indices = tfmdata.indices
@@ -261,7 +276,7 @@ local function prepare_base_kerns(tfmdata,kind,value) -- todo what kind of kerns
for u, chr in next, characters do
local d = descriptions[u]
if d then
- local dk = d.mykerns -- shared
+ local dk = d.kerns -- shared
if dk then
local s = sharedkerns[dk]
if s == false then
@@ -278,7 +293,7 @@ local function prepare_base_kerns(tfmdata,kind,value) -- todo what kind of kerns
if v ~= 0 and not t[k] then -- maybe no 0 test here
t[k], done = v, true
if trace_baseinit and trace_kerns then
- logs.report("define otf","%s: base kern %s + %s => %s",cref(kind,lookup),gref(descriptions,u),gref(descriptions,k),v)
+ report_prepare("%s: base kern %s + %s => %s",cref(kind,lookup),gref(descriptions,u),gref(descriptions,k),v)
end
end
end
@@ -318,10 +333,10 @@ local supported_gpos = {
'kern'
}
-function otf.features.register_base_substitution(tag)
+function otf.features.registerbasesubstitution(tag)
supported_gsub[#supported_gsub+1] = tag
end
-function otf.features.register_base_kern(tag)
+function otf.features.registerbasekern(tag)
supported_gsub[#supported_gpos+1] = tag
end
@@ -345,7 +360,7 @@ function fonts.initializers.base.otf.features(tfmdata,value)
for f=1,#supported_gpos do
local feature = supported_gpos[f]
local value = features[feature]
- prepare_base_kerns(tfmdata,feature,features[feature])
+ preparebasekerns(tfmdata,feature,features[feature])
if value then
h[#h+1] = feature .. "=" .. tostring(value)
end
@@ -364,10 +379,10 @@ function fonts.initializers.base.otf.features(tfmdata,value)
-- verbose name as long as we don't use <()<>[]{}/%> and the length
-- is < 128.
tfmdata.fullname = tfmdata.fullname .. "-" .. base -- tfmdata.psname is the original
- --~ logs.report("otf define","fullname base hash: '%s', featureset '%s'",tfmdata.fullname,hash)
+ --~ report_prepare("fullname base hash: '%s', featureset '%s'",tfmdata.fullname,hash)
end
if trace_preparing then
- logs.report("otf define","preparation time is %0.3f seconds for %s",os.clock()-t,tfmdata.fullname or "?")
+ report_prepare("preparation time is %0.3f seconds for %s",os.clock()-t,tfmdata.fullname or "?")
end
end
end
diff --git a/otfl-font-otc.lua b/otfl-font-otc.lua
index 35555ed..cc7f0ab 100644
--- a/otfl-font-otc.lua
+++ b/otfl-font-otc.lua
@@ -13,8 +13,10 @@ local type, next = type, next
local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
-local otf = fonts.otf
-local tfm = fonts.tfm
+local fonts = fonts
+local otf = fonts.otf
+
+local report_otf = logs.new("load otf")
-- instead of "script = "DFLT", langs = { 'dflt' }" we now use wildcards (we used to
-- have always); some day we can write a "force always when true" trick for other
@@ -22,6 +24,12 @@ local tfm = fonts.tfm
--
-- we could have a tnum variant as well
+-- In the userdata interface we can not longer tweak the loaded font as
+-- conveniently as before. For instance, instead of pushing extra data in
+-- in the table using the original structure, we now have to operate on
+-- the mkiv representation. And as the fontloader interface is modelled
+-- after fontforge we cannot change that one too much either.
+
local extra_lists = {
tlig = {
{
@@ -76,142 +84,157 @@ local extra_lists = {
local extra_features = { -- maybe just 1..n so that we prescribe order
tlig = {
{
- features = { { scripts = { { script = "*", langs = { "*" }, } }, tag = "tlig", comment = "added bij mkiv" }, },
+ features = { ["*"] = { ["*"] = true } },
name = "ctx_tlig_1",
- subtables = { { name = "ctx_tlig_1_s" } },
+ subtables = { "ctx_tlig_1_s" },
type = "gsub_ligature",
flags = { },
},
},
trep = {
{
- features = { { scripts = { { script = "*", langs = { "*" }, } }, tag = "trep", comment = "added bij mkiv" }, },
+ features = { ["*"] = { ["*"] = true } },
name = "ctx_trep_1",
- subtables = { { name = "ctx_trep_1_s" } },
+ subtables = { "ctx_trep_1_s" },
type = "gsub_single",
flags = { },
},
},
anum = {
{
- features = { { scripts = { { script = "arab", langs = { "dflt", "ARA" }, } }, tag = "anum", comment = "added bij mkiv" }, },
+ features = { arab = { FAR = true, dflt = true } },
name = "ctx_anum_1",
- subtables = { { name = "ctx_anum_1_s" } },
+ subtables = { "ctx_anum_1_s" },
type = "gsub_single",
flags = { },
},
{
- features = { { scripts = { { script = "arab", langs = { "FAR" }, } }, tag = "anum", comment = "added bij mkiv" }, },
+ features = { arab = { FAR = true } },
name = "ctx_anum_2",
- subtables = { { name = "ctx_anum_2_s" } },
+ subtables = { "ctx_anum_2_s" },
type = "gsub_single",
flags = { },
},
},
}
-fonts.otf.enhancers["add some missing characters"] = function(data,filename)
- -- todo
-end
-
-fonts.otf.enhancers["enrich with features"] = function(data,filename)
- -- could be done elsewhere (true can be #)
- local used = { }
- for i=1,#otf.glists do
- local g = data[otf.glists[i]]
- if g then
- for i=1,#g do
- local f = g[i].features
- if f then
- for i=1,#f do
- local t = f[i].tag
- if t then used[t] = true end
- end
- end
- end
- end
- end
- --
+local function enhancedata(data,filename,raw)
+ local luatex = data.luatex
+ local lookups = luatex.lookups
+ local sequences = luatex.sequences
local glyphs = data.glyphs
- local indices = data.map.map
- data.gsub = data.gsub or { }
+ local indices = luatex.indices
+ local gsubfeatures = luatex.features.gsub
for kind, specifications in next, extra_features do
- if not used[kind] then
+ if gsub and gsub[kind] then
+ -- already present
+ else
local done = 0
for s=1,#specifications do
local added = false
local specification = specifications[s]
+ local features, subtables = specification.features, specification.subtables
+ local name, type, flags = specification.name, specification.type, specification.flags
+ local full = subtables[1]
local list = extra_lists[kind][s]
- local name = specification.name .. "_s"
- if specification.type == "gsub_ligature" then
+ if type == "gsub_ligature" then
+ -- inefficient loop
for unicode, index in next, indices do
local glyph = glyphs[index]
local ligature = list[glyph.name]
if ligature then
- local o = glyph.lookups or { }
- -- o[name] = { "ligature", ligature, glyph.name }
- o[name] = {
- {
- ["type"] = "ligature",
- ["specification"] = {
- char = glyph.name,
- components = ligature,
- }
- }
- }
- glyph.lookups, done, added = o, done+1, true
+ if glyph.slookups then
+ glyph.slookups [full] = { "ligature", ligature, glyph.name }
+ else
+ glyph.slookups = { [full] = { "ligature", ligature, glyph.name } }
+ end
+ done, added = done+1, true
end
end
- elseif specification.type == "gsub_single" then
+ elseif type == "gsub_single" then
+ -- inefficient loop
for unicode, index in next, indices do
local glyph = glyphs[index]
local r = list[unicode]
if r then
local replacement = indices[r]
if replacement and glyphs[replacement] then
- local o = glyph.lookups or { }
- -- o[name] = { { "substitution", glyphs[replacement].name } }
- o[name] = {
- {
- ["type"] = "substitution",
- ["specification"] = {
- variant = glyphs[replacement].name,
- }
- }
- }
- glyph.lookups, done, added = o, done+1, true
+ if glyph.slookups then
+ glyph.slookups [full] = { "substitution", glyphs[replacement].name }
+ else
+ glyph.slookups = { [full] = { "substitution", glyphs[replacement].name } }
+ end
+ done, added = done+1, true
end
end
end
end
if added then
- insert(data.gsub,s,table.fastcopy(specification)) -- right order
+ sequences[#sequences+1] = {
+ chain = 0,
+ features = { [kind] = features },
+ flags = flags,
+ name = name,
+ subtables = subtables,
+ type = type,
+ }
+ -- register in metadata (merge as there can be a few)
+ if not gsubfeatures then
+ gsubfeatures = { }
+ luatex.features.gsub = gsubfeatures
+ end
+ local k = gsubfeatures[kind]
+ if not k then
+ k = { }
+ gsubfeatures[kind] = k
+ end
+ for script, languages in next, features do
+ local kk = k[script]
+ if not kk then
+ kk = { }
+ k[script] = kk
+ end
+ for language, value in next, languages do
+ kk[language] = value
+ end
+ end
end
end
if done > 0 then
if trace_loading then
- logs.report("load otf","enhance: registering %s feature (%s glyphs affected)",kind,done)
+ report_otf("enhance: registering %s feature (%s glyphs affected)",kind,done)
end
end
end
end
end
-otf.tables.features['tlig'] = 'TeX Ligatures'
-otf.tables.features['trep'] = 'TeX Replacements'
-otf.tables.features['anum'] = 'Arabic Digits'
+otf.enhancers.register("check extra features",enhancedata)
-otf.features.register_base_substitution('tlig')
-otf.features.register_base_substitution('trep')
-otf.features.register_base_substitution('anum')
+local features = otf.tables.features
+
+features['tlig'] = 'TeX Ligatures'
+features['trep'] = 'TeX Replacements'
+features['anum'] = 'Arabic Digits'
+
+local registerbasesubstitution = otf.features.registerbasesubstitution
+
+registerbasesubstitution('tlig')
+registerbasesubstitution('trep')
+registerbasesubstitution('anum')
-- the functionality is defined elsewhere
-fonts.initializers.base.otf.equaldigits = fonts.initializers.common.equaldigits
-fonts.initializers.node.otf.equaldigits = fonts.initializers.common.equaldigits
+local initializers = fonts.initializers
+local common_initializers = initializers.common
+local base_initializers = initializers.base.otf
+local node_initializers = initializers.node.otf
+
+base_initializers.equaldigits = common_initializers.equaldigits
+node_initializers.equaldigits = common_initializers.equaldigits
-fonts.initializers.base.otf.lineheight = fonts.initializers.common.lineheight
-fonts.initializers.node.otf.lineheight = fonts.initializers.common.lineheight
+base_initializers.lineheight = common_initializers.lineheight
+node_initializers.lineheight = common_initializers.lineheight
-fonts.initializers.base.otf.compose = fonts.initializers.common.compose
-fonts.initializers.node.otf.compose = fonts.initializers.common.compose
+base_initializers.compose = common_initializers.compose
+node_initializers.compose = common_initializers.compose
diff --git a/otfl-font-otd.lua b/otfl-font-otd.lua
index 46899fd..910725a 100644
--- a/otfl-font-otd.lua
+++ b/otfl-font-otd.lua
@@ -6,25 +6,28 @@ if not modules then modules = { } end modules ['font-otd'] = {
license = "see context related readme files"
}
-local trace_dynamics = false trackers.register("otf.dynamics", function(v) trace_dynamics = v end)
+local trace_dynamics = false trackers.register("otf.dynamics", function(v) trace_dynamics = v end)
-fonts = fonts or { }
-fonts.otf = fonts.otf or { }
+local report_otf = logs.new("load otf")
-local otf = fonts.otf
-local fontdata = fonts.ids
+local fonts = fonts
+local otf = fonts.otf
+local fontdata = fonts.ids
otf.features = otf.features or { }
otf.features.default = otf.features.default or { }
-local context_setups = fonts.define.specify.context_setups
-local context_numbers = fonts.define.specify.context_numbers
+local definers = fonts.definers
+local contextsetups = definers.specifiers.contextsetups
+local contextnumbers = definers.specifiers.contextnumbers
-local a_to_script = { } otf.a_to_script = a_to_script
-local a_to_language = { } otf.a_to_language = a_to_language
+-- todo: dynamics namespace
-function otf.set_dynamics(font,dynamics,attribute)
- local features = context_setups[context_numbers[attribute]] -- can be moved to caller
+local a_to_script = { }
+local a_to_language = { }
+
+function otf.setdynamics(font,dynamics,attribute)
+ local features = contextsetups[contextnumbers[attribute]] -- can be moved to caller
if features then
local script = features.script or 'dflt'
local language = features.language or 'dflt'
@@ -41,7 +44,7 @@ function otf.set_dynamics(font,dynamics,attribute)
local dsla = dsl[attribute]
if dsla then
-- if trace_dynamics then
- -- logs.report("otf define","using dynamics %s: attribute %s, script %s, language %s",context_numbers[attribute],attribute,script,language)
+ -- report_otf("using dynamics %s: attribute %s, script %s, language %s",contextnumbers[attribute],attribute,script,language)
-- end
return dsla
else
@@ -56,14 +59,15 @@ function otf.set_dynamics(font,dynamics,attribute)
features = tfmdata.shared.features
}
tfmdata.mode = "node"
+ tfmdata.dynamics = true -- handy for tracing
tfmdata.language = language
tfmdata.script = script
tfmdata.shared.features = { }
-- end of save
- local set = fonts.define.check(features,otf.features.default)
- dsla = otf.set_features(tfmdata,set)
+ local set = definers.check(features,otf.features.default)
+ dsla = otf.setfeatures(tfmdata,set)
if trace_dynamics then
- logs.report("otf define","setting dynamics %s: attribute %s, script %s, language %s, set: %s",context_numbers[attribute],attribute,script,language,table.sequenced(set))
+ report_otf("setting dynamics %s: attribute %s, script %s, language %s, set: %s",contextnumbers[attribute],attribute,script,language,table.sequenced(set))
end
-- we need to restore some values
tfmdata.script = saved.script
@@ -77,3 +81,11 @@ function otf.set_dynamics(font,dynamics,attribute)
end
return nil -- { }
end
+
+function otf.scriptandlanguage(tfmdata,attr)
+ if attr and attr > 0 then
+ return a_to_script[attr] or tfmdata.script, a_to_language[attr] or tfmdata.language
+ else
+ return tfmdata.script, tfmdata.language
+ end
+end
diff --git a/otfl-font-otf.lua b/otfl-font-otf.lua
index fe9cd51..7656f13 100644
--- a/otfl-font-otf.lua
+++ b/otfl-font-otf.lua
@@ -6,97 +6,153 @@ if not modules then modules = { } end modules ['font-otf'] = {
license = "see context related readme files"
}
+-- langs -> languages enz
+-- anchor_classes vs kernclasses
+-- modification/creationtime in subfont is runtime dus zinloos
+-- to_table -> totable
+
local utf = unicode.utf8
-local concat, utfbyte = table.concat, utf.byte
+local utfbyte = utf.byte
local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
local type, next, tonumber, tostring = type, next, tonumber, tostring
local abs = math.abs
local getn = table.getn
local lpegmatch = lpeg.match
+local reversed, concat = table.reversed, table.concat
+local ioflush = io.flush
-local trace_private = false trackers.register("otf.private", function(v) trace_private = v end)
-local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
-local trace_features = false trackers.register("otf.features", function(v) trace_features = v end)
-local trace_dynamics = false trackers.register("otf.dynamics", function(v) trace_dynamics = v end)
-local trace_sequences = false trackers.register("otf.sequences", function(v) trace_sequences = v end)
-local trace_math = false trackers.register("otf.math", function(v) trace_math = v end)
-local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end)
+local allocate = utilities.storage.allocate
---~ trackers.enable("otf.loading")
+local trace_private = false trackers.register("otf.private", function(v) trace_private = v end)
+local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
+local trace_features = false trackers.register("otf.features", function(v) trace_features = v end)
+local trace_dynamics = false trackers.register("otf.dynamics", function(v) trace_dynamics = v end)
+local trace_sequences = false trackers.register("otf.sequences", function(v) trace_sequences = v end)
+local trace_math = false trackers.register("otf.math", function(v) trace_math = v end)
+local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end)
---[[ldx--
-<p>The fontforge table has organized lookups in a certain way. A first implementation
-of this code was organized featurewise: information related to features was
-collected and processing boiled down to a run over the features. The current
-implementation honors the order in the main feature table. Since we can reorder this
-table as we want, we can eventually support several models of processing. We kept
-the static as well as dynamic feature processing, because it had proved to be
-rather useful. The formerly three loop variants have beem discarded but will
-reapear at some time.</p>
-
-<itemize>
-<item>we loop over all lookups</item>
-<item>for each lookup we do a run over the list of glyphs</item>
-<item>but we only process them for features that are enabled</item>
-<item>if we're dealing with a contextual lookup, we loop over all contexts</item>
-<item>in that loop we quit at a match and then process the list of sublookups</item>
-<item>we always continue after the match</item>
-</itemize>
-
-<p>In <l n='context'/> we do this for each font that is used in a list, so in
-practice we have quite some nested loops.</p>
-
-<p>We process the whole list and then consult the glyph nodes. An alternative approach
-is to collect strings of characters using the same font including spaces (because some
-lookups involve spaces). However, we then need to reconstruct the list which is no fun.
-Also, we need to carry quite some information, like attributes, so eventually we don't
-gain much (if we gain something at all).</p>
-
-<p>Another consideration has been to operate on sublists (subhead, subtail) but again
-this would complicate matters as we then neext to keep track of a changing subhead
-and subtail. On the other hand, this might save some runtime. The number of changes
-involved is not that large. This only makes sense when we have many fonts in a list
-and don't change to frequently.</p>
---ldx]]--
+local report_otf = logs.new("load otf")
-fonts = fonts or { }
-fonts.otf = fonts.otf or { }
-fonts.tfm = fonts.tfm or { }
+local starttiming, stoptiming, elapsedtime = statistics.starttiming, statistics.stoptiming, statistics.elapsedtime
+
+local fonts = fonts
+fonts.otf = fonts.otf or { }
local otf = fonts.otf
local tfm = fonts.tfm
local fontdata = fonts.ids
-
-otf.tables = otf.tables or { } -- defined in font-ott.lua
-otf.meanings = otf.meanings or { } -- defined in font-ott.lua
-otf.tables.features = otf.tables.features or { } -- defined in font-ott.lua
-otf.tables.languages = otf.tables.languages or { } -- defined in font-ott.lua
-otf.tables.scripts = otf.tables.scripts or { } -- defined in font-ott.lua
+local chardata = characters and characters.data -- not used
otf.features = otf.features or { }
otf.features.list = otf.features.list or { }
otf.features.default = otf.features.default or { }
-otf.enhancers = otf.enhancers or { }
+otf.enhancers = allocate()
+local enhancers = otf.enhancers
+enhancers.patches = { }
+
+local definers = fonts.definers
+
otf.glists = { "gsub", "gpos" }
-otf.version = 2.653 -- beware: also sync font-mis.lua
-otf.pack = true -- beware: also sync font-mis.lua
-otf.syncspace = true
-otf.notdef = false
+otf.version = 2.706 -- beware: also sync font-mis.lua
otf.cache = containers.define("fonts", "otf", otf.version, true)
-otf.cleanup_aat = false -- only context
-local wildcard = "*"
-local default = "dflt"
+local loadmethod = "table" -- table, mixed, sparse
+local forceload = false
+local cleanup = 0
+local usemetatables = false -- .4 slower on mk but 30 M less mem so we might change the default -- will be directive
+local packdata = true
+local syncspace = true
+local forcenotdef = false
+
+local wildcard = "*"
+local default = "dflt"
+
+local fontloaderfields = fontloader.fields
+local mainfields = nil
+local glyphfields = nil -- not used yet
+
+directives.register("fonts.otf.loader.method", function(v)
+ if v == "sparse" and fontloaderfields then
+ loadmethod = "sparse"
+ elseif v == "mixed" then
+ loadmethod = "mixed"
+ elseif v == "table" then
+ loadmethod = "table"
+ else
+ loadmethod = "table"
+ report_otf("no loader method '%s', using '%s' instead",v,loadmethod)
+ end
+end)
+
+directives.register("fonts.otf.loader.cleanup",function(v)
+ cleanup = tonumber(v) or (v and 1) or 0
+end)
+
+directives.register("fonts.otf.loader.force", function(v) forceload = v end)
+directives.register("fonts.otf.loader.usemetatables", function(v) usemetatables = v end)
+directives.register("fonts.otf.loader.pack", function(v) packdata = v end)
+directives.register("fonts.otf.loader.syncspace", function(v) syncspace = v end)
+directives.register("fonts.otf.loader.forcenotdef", function(v) forcenotdef = v end)
+
+local function load_featurefile(raw,featurefile)
+ if featurefile and featurefile ~= "" then
+ if trace_loading then
+ report_otf("featurefile: %s", featurefile)
+ end
+ fontloader.apply_featurefile(raw, featurefile)
+ end
+end
+
+local function showfeatureorder(otfdata,filename)
+ local sequences = otfdata.luatex.sequences
+ if sequences and #sequences > 0 then
+ if trace_loading then
+ report_otf("font %s has %s sequences",filename,#sequences)
+ report_otf(" ")
+ end
+ for nos=1,#sequences do
+ local sequence = sequences[nos]
+ local typ = sequence.type or "no-type"
+ local name = sequence.name or "no-name"
+ local subtables = sequence.subtables or { "no-subtables" }
+ local features = sequence.features
+ if trace_loading then
+ report_otf("%3i %-15s %-20s [%s]",nos,name,typ,concat(subtables,","))
+ end
+ if features then
+ for feature, scripts in next, features do
+ local tt = { }
+ for script, languages in next, scripts do
+ local ttt = { }
+ for language, _ in next, languages do
+ ttt[#ttt+1] = language
+ end
+ tt[#tt+1] = format("[%s: %s]",script,concat(ttt," "))
+ end
+ if trace_loading then
+ report_otf(" %s: %s",feature,concat(tt," "))
+ end
+ end
+ end
+ end
+ if trace_loading then
+ report_otf("\n")
+ end
+ elseif trace_loading then
+ report_otf("font %s has no sequences",filename)
+ end
+end
--[[ldx--
<p>We start with a lot of tables and related functions.</p>
--ldx]]--
-otf.tables.global_fields = table.tohash {
+local global_fields = table.tohash {
+ "metadata",
"lookups",
"glyphs",
"subfonts",
@@ -107,20 +163,20 @@ otf.tables.global_fields = table.tohash {
"names",
"unicodes",
"names",
---~ "math",
+ -- "math",
"anchor_classes",
"kern_classes",
"gpos",
"gsub"
}
-otf.tables.valid_fields = {
- "anchor_classes",
+local valid_fields = table.tohash {
+ -- "anchor_classes",
"ascent",
- "cache_version",
+ -- "cache_version",
"cidinfo",
"copyright",
- "creationtime",
+ -- "creationtime",
"descent",
"design_range_bottom",
"design_range_top",
@@ -132,23 +188,23 @@ otf.tables.valid_fields = {
"fontstyle_id",
"fontstyle_name",
"fullname",
- "glyphs",
+ -- "glyphs",
"hasvmetrics",
"head_optimized_for_cleartype",
"horiz_base",
"issans",
"isserif",
"italicangle",
- "kerns",
- "lookups",
+ -- "kerns",
+ -- "lookups",
-- "luatex",
"macstyle",
- "modificationtime",
+ -- "modificationtime",
"onlybitmaps",
"origname",
"os2_version",
- "pfminfo",
- "private",
+ -- "pfminfo",
+ -- "private",
"serifcheck",
"sfd_version",
-- "size",
@@ -165,65 +221,116 @@ otf.tables.valid_fields = {
"upos",
"use_typo_metrics",
"uwidth",
- "validation_state",
+ -- "validation_state",
"verbose",
"version",
"vert_base",
"weight",
"weight_width_slope_only",
- "xuid",
+ -- "xuid",
+}
+
+local ordered_enhancers = {
+ "prepare tables",
+ "prepare glyphs",
+ "prepare unicodes",
+ "prepare lookups",
+
+ "analyze glyphs",
+ "analyze math",
+
+ "prepare tounicode", -- maybe merge with prepare
+
+ "reorganize lookups",
+ "reorganize mark classes",
+ "reorganize anchor classes",
+
+ "reorganize glyph kerns",
+ "reorganize glyph lookups",
+ "reorganize glyph anchors",
+
+ "reorganize features",
+ "reorganize subtables",
+
+ "check glyphs",
+ "check metadata",
+ "check math parameters",
+ "check extra features", -- after metadata
}
--[[ldx--
<p>Here we go.</p>
--ldx]]--
-local function load_featurefile(ff,featurefile)
- if featurefile then
- featurefile = resolvers.find_file(file.addsuffix(featurefile,'fea'),'fea')
- if featurefile and featurefile ~= "" then
- if trace_loading then
- logs.report("load otf", "featurefile: %s", featurefile)
- end
- fontloader.apply_featurefile(ff, featurefile)
+local actions = { }
+
+enhancers.patches.before = allocate()
+enhancers.patches.after = allocate()
+
+local before = enhancers.patches.before
+local after = enhancers.patches.after
+
+local function enhance(name,data,filename,raw,verbose)
+ local enhancer = actions[name]
+ if enhancer then
+ if verbose then
+ report_otf("enhance: %s (%s)",name,filename)
+ ioflush()
end
+ enhancer(data,filename,raw)
+ else
+ report_otf("enhance: %s is undefined",name)
end
end
-function otf.enhance(name,data,filename,verbose)
- local enhancer = otf.enhancers[name]
- if enhancer then
- if (verbose ~= nil and verbose) or trace_loading then
- logs.report("load otf","enhance: %s (%s)",name,filename)
+function enhancers.apply(data,filename,raw,verbose)
+ local basename = file.basename(lower(filename))
+ report_otf("start enhancing: %s",filename)
+ ioflush() -- we want instant messages
+ for e=1,#ordered_enhancers do
+ local enhancer = ordered_enhancers[e]
+ local b = before[enhancer]
+ if b then
+ for pattern, action in next, b do
+ if find(basename,pattern) then
+ action(data,filename,raw)
+ end
+ end
+ end
+ enhance(enhancer,data,filename,raw,verbose)
+ local a = after[enhancer]
+ if a then
+ for pattern, action in next, a do
+ if find(basename,pattern) then
+ action(data,filename,raw)
+ end
+ end
end
- enhancer(data,filename)
+ ioflush() -- we want instant messages
end
+ report_otf("stop enhancing")
+ ioflush() -- we want instant messages
end
-local enhancers = {
- -- pack and unpack are handled separately; they might even be moved
- -- away from the enhancers namespace
- "patch bugs",
- "merge cid fonts", "prepare unicode", "cleanup ttf tables", "compact glyphs", "reverse coverage",
- "cleanup aat", "enrich with features", "add some missing characters",
- "reorganize mark classes",
- "reorganize kerns", -- moved here
- "flatten glyph lookups", "flatten anchor tables", "flatten feature tables",
- "simplify glyph lookups", -- some saving
- "prepare luatex tables",
- "analyse features", "rehash features",
- "analyse anchors", "analyse marks", "analyse unicodes", "analyse subtables",
- "check italic correction","check math",
- "share widths",
- "strip not needed data",
- "migrate metadata",
- "check math parameters",
-}
+-- enhancers.patches.register("before","migrate metadata","cambria",function() end)
+
+function enhancers.patches.register(what,where,pattern,action)
+ local ww = what[where]
+ if ww then
+ ww[pattern] = action
+ else
+ ww = { [pattern] = action}
+ end
+end
+
+function enhancers.register(what,action) -- only already registered can be overloaded
+ actions[what] = action
+end
function otf.load(filename,format,sub,featurefile)
local name = file.basename(file.removesuffix(filename))
local attr = lfs.attributes(filename)
- local size, time = attr.size or 0, attr.modification or 0
+ local size, time = attr and attr.size or 0, attr and attr.modification or 0
if featurefile then
local fattr = lfs.attributes(featurefile)
local fsize, ftime = fattr and fattr.size or 0, fattr and fattr.modification or 0
@@ -235,151 +342,467 @@ function otf.load(filename,format,sub,featurefile)
hash = hash .. "-" .. sub
end
hash = containers.cleanname(hash)
+ local featurefiles
+ if featurefile then
+ featurefiles = { }
+ for s in gmatch(featurefile,"[^,]+") do
+ local name = resolvers.findfile(file.addsuffix(s,'fea'),'fea') or ""
+ if name == "" then
+ report_otf("loading: no featurefile '%s'",s)
+ else
+ local attr = lfs.attributes(name)
+ featurefiles[#featurefiles+1] = {
+ name = name,
+ size = attr.size or 0,
+ time = attr.modification or 0,
+ }
+ end
+ end
+ if #featurefiles == 0 then
+ featurefiles = nil
+ end
+ end
local data = containers.read(otf.cache,hash)
- if not data or data.verbose ~= fonts.verbose or data.size ~= size or data.time ~= time then
- logs.report("load otf","loading: %s (hash: %s)",filename,hash)
- local ff, messages
+ local reload = not data or data.verbose ~= fonts.verbose or data.size ~= size or data.time ~= time
+ if forceload then
+ report_otf("loading: forced reload due to hard coded flag")
+ reload = true
+ end
+ if not reload then
+ local featuredata = data.featuredata
+ if featurefiles then
+ if not featuredata or #featuredata ~= #featurefiles then
+ reload = true
+ else
+ for i=1,#featurefiles do
+ local fi, fd = featurefiles[i], featuredata[i]
+ if fi.name ~= fd.name or fi.size ~= fd.size or fi.time ~= fd.time then
+ reload = true
+ break
+ end
+ end
+ end
+ elseif featuredata then
+ reload = true
+ end
+ if reload then
+ report_otf("loading: forced reload due to changed featurefile specification: %s",featurefile or "--")
+ end
+ end
+ if reload then
+ report_otf("loading: %s (hash: %s)",filename,hash)
+ local fontdata, messages, rawdata
if sub then
- ff, messages = fontloader.open(filename,sub)
+ fontdata, messages = fontloader.open(filename,sub)
else
- ff, messages = fontloader.open(filename)
+ fontdata, messages = fontloader.open(filename)
+ end
+ if fontdata then
+ mainfields = mainfields or (fontloaderfields and fontloaderfields(fontdata))
end
if trace_loading and messages and #messages > 0 then
if type(messages) == "string" then
- logs.report("load otf","warning: %s",messages)
+ report_otf("warning: %s",messages)
else
for m=1,#messages do
- logs.report("load otf","warning: %s",tostring(messages[m]))
+ report_otf("warning: %s",tostring(messages[m]))
end
end
else
- logs.report("load otf","font loaded okay")
- end
- if ff then
- load_featurefile(ff,featurefile)
- data = fontloader.to_table(ff)
- fontloader.close(ff)
- if data then
- logs.report("load otf","file size: %s", size)
- logs.report("load otf","enhancing ...")
- for e=1,#enhancers do
- otf.enhance(enhancers[e],data,filename)
- io.flush() -- we want instant messages
+ report_otf("font loaded okay")
+ end
+ if fontdata then
+ if featurefiles then
+ for i=1,#featurefiles do
+ load_featurefile(fontdata,featurefiles[i].name)
end
- if otf.pack and not fonts.verbose then
- otf.enhance("pack",data,filename)
+ end
+ report_otf("loading method: %s",loadmethod)
+ if loadmethod == "sparse" then
+ rawdata = fontdata
+ else
+ rawdata = fontloader.to_table(fontdata)
+ fontloader.close(fontdata)
+ end
+ if rawdata then
+ data = { }
+ starttiming(data)
+ local verboseindeed = verbose ~= nil and verbose or trace_loading
+ report_otf("file size: %s", size)
+ enhancers.apply(data,filename,rawdata,verboseindeed)
+ if packdata and not fonts.verbose then
+ enhance("pack",data,filename,nil,verboseindeed)
end
data.size = size
data.time = time
+ if featurefiles then
+ data.featuredata = featurefiles
+ end
data.verbose = fonts.verbose
- logs.report("load otf","saving in cache: %s",filename)
+ report_otf("saving in cache: %s",filename)
data = containers.write(otf.cache, hash, data)
- collectgarbage("collect")
+ if cleanup > 0 then
+ collectgarbage("collect")
+ end
+ stoptiming(data)
+ if elapsedtime then -- not in generic
+ report_otf("preprocessing and caching took %s seconds",elapsedtime(data))
+ end
data = containers.read(otf.cache, hash) -- this frees the old table and load the sparse one
- collectgarbage("collect")
+ if cleanup > 1 then
+ collectgarbage("collect")
+ end
else
- logs.report("load otf","loading failed (table conversion error)")
+ data = nil
+ report_otf("loading failed (table conversion error)")
+ end
+ if loadmethod == "sparse" then
+ fontloader.close(fontdata)
+ if cleanup > 2 then
+ -- collectgarbage("collect")
+ end
end
else
- logs.report("load otf","loading failed (file read error)")
+ data = nil
+ report_otf("loading failed (file read error)")
end
end
if data then
if trace_defining then
- logs.report("define font","loading from cache: %s",hash)
+ report_otf("loading from cache: %s",hash)
end
- otf.enhance("unpack",data,filename,false) -- no message here
- otf.add_dimensions(data)
+ enhance("unpack",data,filename,nil,false)
+ enhance("add dimensions",data,filename,nil,false)
if trace_sequences then
- otf.show_feature_order(data,filename)
+ showfeatureorder(data,filename)
end
end
return data
end
-function otf.add_dimensions(data)
+local mt = {
+ __index = function(t,k) -- maybe set it
+ if k == "height" then
+ local ht = t.boundingbox[4]
+ return ht < 0 and 0 or ht
+ elseif k == "depth" then
+ local dp = -t.boundingbox[2]
+ return dp < 0 and 0 or dp
+ elseif k == "width" then
+ return 0
+ elseif k == "name" then -- or maybe uni*
+ return forcenotdef and ".notdef"
+ end
+ end
+}
+
+actions["add dimensions"] = function(data,filename)
-- todo: forget about the width if it's the defaultwidth (saves mem)
-- we could also build the marks hash here (instead of storing it)
if data then
- local force = otf.notdef
local luatex = data.luatex
local defaultwidth = luatex.defaultwidth or 0
local defaultheight = luatex.defaultheight or 0
local defaultdepth = luatex.defaultdepth or 0
- for _, d in next, data.glyphs do
- local bb, wd = d.boundingbox, d.width
- if not wd then
- d.width = defaultwidth
- elseif wd ~= 0 and d.class == "mark" then
- d.width = -wd
+ if usemetatables then
+ for _, d in next, data.glyphs do
+ local wd = d.width
+ if not wd then
+ d.width = defaultwidth
+ elseif wd ~= 0 and d.class == "mark" then
+ d.width = -wd
+ end
+ setmetatable(d,mt)
end
- if force and not d.name then
- d.name = ".notdef"
+ else
+ for _, d in next, data.glyphs do
+ local bb, wd = d.boundingbox, d.width
+ if not wd then
+ d.width = defaultwidth
+ elseif wd ~= 0 and d.class == "mark" then
+ d.width = -wd
+ end
+ if forcenotdef and not d.name then
+ d.name = ".notdef"
+ end
+ if bb then
+ local ht, dp = bb[4], -bb[2]
+ if ht == 0 or ht < 0 then
+ -- not set
+ else
+ d.height = ht
+ end
+ if dp == 0 or dp < 0 then
+ -- not set
+ else
+ d.depth = dp
+ end
+ end
end
- if bb then
- local ht, dp = bb[4], -bb[2]
- if ht == 0 or ht < 0 then
- -- no need to set it and no negative heights, nil == 0
+ end
+ end
+end
+
+actions["prepare tables"] = function(data,filename,raw)
+ local luatex = {
+ filename = filename,
+ version = otf.version,
+ creator = "context mkiv",
+ }
+ data.luatex = luatex
+ data.metadata = { }
+end
+
+local function somecopy(old) -- fast one
+ if old then
+ local new = { }
+ if type(old) == "table" then
+ for k, v in next, old do
+ if k == "glyphs" then
+ -- skip
+ elseif type(v) == "table" then
+ new[k] = somecopy(v)
else
- d.height = ht
+ new[k] = v
end
- if dp == 0 or dp < 0 then
- -- no negative depths and no negative depths, nil == 0
+ end
+ else
+ for i=1,#mainfields do
+ local k = mainfields[i]
+ local v = old[k]
+ if k == "glyphs" then
+ -- skip
+ elseif type(v) == "table" then
+ new[k] = somecopy(v)
else
- d.depth = dp
+ new[k] = v
end
end
end
+ return new
+ else
+ return { }
end
end
-function otf.show_feature_order(otfdata,filename)
- local sequences = otfdata.luatex.sequences
- if sequences and #sequences > 0 then
- if trace_loading then
- logs.report("otf check","font %s has %s sequences",filename,#sequences)
- logs.report("otf check"," ")
+-- not setting italic_correction and class (when nil) during
+-- table cronstruction can save some mem
+
+actions["prepare glyphs"] = function(data,filename,raw)
+ -- we can also move the names to data.luatex.names which might
+ -- save us some more memory (at the cost of harder tracing)
+ local rawglyphs = raw.glyphs
+ local glyphs, udglyphs
+ if loadmethod == "sparse" then
+ glyphs, udglyphs = { }, { }
+ elseif loadmethod == "mixed" then
+ glyphs, udglyphs = { }, rawglyphs
+ else
+ glyphs, udglyphs = rawglyphs, rawglyphs
+ end
+ data.glyphs, data.udglyphs = glyphs, udglyphs
+ local subfonts = raw.subfonts
+ if subfonts then
+ if data.glyphs and next(data.glyphs) then
+ report_otf("replacing existing glyph table due to subfonts")
end
- for nos=1,#sequences do
- local sequence = sequences[nos]
- local typ = sequence.type or "no-type"
- local name = sequence.name or "no-name"
- local subtables = sequence.subtables or { "no-subtables" }
- local features = sequence.features
- if trace_loading then
- logs.report("otf check","%3i %-15s %-20s [%s]",nos,name,typ,concat(subtables,","))
- end
- if features then
- for feature, scripts in next, features do
- local tt = { }
- for script, languages in next, scripts do
- local ttt = { }
- for language, _ in next, languages do
- ttt[#ttt+1] = language
+ local cidinfo = raw.cidinfo
+ if cidinfo.registry then
+ local cidmap, cidname = fonts.cid.getmap(cidinfo.registry,cidinfo.ordering,cidinfo.supplement)
+ if cidmap then
+ cidinfo.usedname = cidmap.usedname
+ local uni_to_int, int_to_uni, nofnames, nofunicodes = { }, { }, 0, 0
+ local unicodes, names = cidmap.unicodes, cidmap.names
+ for cidindex=1,#subfonts do
+ local subfont = subfonts[cidindex]
+ if loadmethod == "sparse" then
+ local rawglyphs = subfont.glyphs
+ for index=0,subfont.glyphmax - 1 do
+ local g = rawglyphs[index]
+ if g then
+ local unicode, name = unicodes[index], names[index]
+ if unicode then
+ uni_to_int[unicode] = index
+ int_to_uni[index] = unicode
+ nofunicodes = nofunicodes + 1
+ elseif name then
+ nofnames = nofnames + 1
+ end
+ udglyphs[index] = g
+ glyphs[index] = {
+ width = g.width,
+ italic = g.italic_correction,
+ boundingbox = g.boundingbox,
+ class = g.class,
+ name = g.name or name or "unknown", -- uniXXXX
+ cidindex = cidindex,
+ unicode = unicode,
+ }
+ end
end
- tt[#tt+1] = format("[%s: %s]",script,concat(ttt," "))
- end
- if trace_loading then
- logs.report("otf check"," %s: %s",feature,concat(tt," "))
+ -- If we had more userdata, we would need more of this
+ -- and it would start working against us in terms of
+ -- convenience and speed.
+ subfont = somecopy(subfont)
+ subfont.glyphs = nil
+ subfont[cidindex] = subfont
+ elseif loadmethod == "mixed" then
+ for index, g in next, subfont.glyphs do
+ local unicode, name = unicodes[index], names[index]
+ if unicode then
+ uni_to_int[unicode] = index
+ int_to_uni[index] = unicode
+ nofunicodes = nofunicodes + 1
+ elseif name then
+ nofnames = nofnames + 1
+ end
+ udglyphs[index] = g
+ glyphs[index] = {
+ width = g.width,
+ italic = g.italic_correction,
+ boundingbox = g.boundingbox,
+ class = g.class,
+ name = g.name or name or "unknown", -- uniXXXX
+ cidindex = cidindex,
+ unicode = unicode,
+ }
+ end
+ subfont.glyphs = nil
+ else
+ for index, g in next, subfont.glyphs do
+ local unicode, name = unicodes[index], names[index]
+ if unicode then
+ uni_to_int[unicode] = index
+ int_to_uni[index] = unicode
+ nofunicodes = nofunicodes + 1
+ g.unicode = unicode
+ elseif name then
+ nofnames = nofnames + 1
+ end
+ g.cidindex = cidindex
+ glyphs[index] = g
+ end
+ subfont.glyphs = nil
end
end
+ if trace_loading then
+ report_otf("cid font remapped, %s unicode points, %s symbolic names, %s glyphs",nofunicodes, nofnames, nofunicodes+nofnames)
+ end
+ data.map = data.map or { }
+ data.map.map = uni_to_int
+ data.map.backmap = int_to_uni
+ elseif trace_loading then
+ report_otf("unable to remap cid font, missing cid file for %s",filename)
end
+ data.subfonts = subfonts
+ elseif trace_loading then
+ report_otf("font %s has no glyphs",filename)
end
- if trace_loading then
- logs.report("otf check","\n")
+ else
+ if loadmethod == "sparse" then
+ -- we get fields from the userdata glyph table and create
+ -- a minimal entry first
+ for index=0,raw.glyphmax - 1 do
+ local g = rawglyphs[index]
+ if g then
+ udglyphs[index] = g
+ glyphs[index] = {
+ width = g.width,
+ italic = g.italic_correction,
+ boundingbox = g.boundingbox,
+ class = g.class,
+ name = g.name,
+ unicode = g.unicode,
+ }
+ end
+ end
+ elseif loadmethod == "mixed" then
+ -- we get fields from the totable glyph table and copy to the
+ -- final glyph table so first we create a minimal entry
+ for index, g in next, rawglyphs do
+ udglyphs[index] = g
+ glyphs[index] = {
+ width = g.width,
+ italic = g.italic_correction,
+ boundingbox = g.boundingbox,
+ class = g.class,
+ name = g.name,
+ unicode = g.unicode,
+ }
+ end
+ else
+ -- we use the totable glyph table directly and manipulate the
+ -- entries in this (also final) table
end
- elseif trace_loading then
- logs.report("otf check","font %s has no sequences",filename)
+ data.map = raw.map
end
+ data.cidinfo = raw.cidinfo -- hack
end
--- todo: normalize, design_size => designsize
+-- watch copy of cidinfo: we can best make some more copies to data
-otf.enhancers["reorganize mark classes"] = function(data,filename)
- if data.mark_classes then
- local unicodes = data.luatex.unicodes
+actions["analyze glyphs"] = function(data,filename,raw) -- maybe integrate this in the previous
+ local glyphs = data.glyphs
+ -- collect info
+ local has_italic, widths, marks = false, { }, { }
+ for index, glyph in next, glyphs do
+ local italic = glyph.italic_correction
+ if not italic then
+ -- skip
+ elseif italic == 0 then
+ glyph.italic_correction = nil
+ glyph.italic = nil
+ else
+ glyph.italic_correction = nil
+ glyph.italic = italic
+ has_italic = true
+ end
+ local width = glyph.width
+ widths[width] = (widths[width] or 0) + 1
+ local class = glyph.class
+ local unicode = glyph.unicode
+ if class == "mark" then
+ marks[unicode] = true
+ -- elseif chardata[unicode].category == "mn" then
+ -- marks[unicode] = true
+ -- glyph.class = "mark"
+ end
+ local a = glyph.altuni if a then glyph.altuni = nil end
+ local d = glyph.dependents if d then glyph.dependents = nil end
+ local v = glyph.vwidth if v then glyph.vwidth = nil end
+ end
+ -- flag italic
+ data.metadata.has_italic = has_italic
+ -- flag marks
+ data.luatex.marks = marks
+ -- share most common width for cjk fonts
+ local wd, most = 0, 1
+ for k,v in next, widths do
+ if v > most then
+ wd, most = k, v
+ end
+ end
+ if most > 1000 then -- maybe 500
+ if trace_loading then
+ report_otf("most common width: %s (%s times), sharing (cjk font)",wd,most)
+ end
+ for index, glyph in next, glyphs do
+ if glyph.width == wd then
+ glyph.width = nil
+ end
+ end
+ data.luatex.defaultwidth = wd
+ end
+end
+
+actions["reorganize mark classes"] = function(data,filename,raw)
+ local mark_classes = raw.mark_classes
+ if mark_classes then
+ local luatex = data.luatex
+ local unicodes = luatex.unicodes
local reverse = { }
- for name, class in next, data.mark_classes do
+ luatex.markclasses = reverse
+ for name, class in next, mark_classes do
local t = { }
for s in gmatch(class,"[^ ]+") do
local us = unicodes[s]
@@ -393,58 +816,15 @@ otf.enhancers["reorganize mark classes"] = function(data,filename)
end
reverse[name] = t
end
- data.luatex.markclasses = reverse
- data.mark_classes = nil
+ data.mark_classes = nil -- when using table
end
end
-otf.enhancers["prepare luatex tables"] = function(data,filename)
- data.luatex = data.luatex or { }
- local luatex = data.luatex
- luatex.filename = filename
- luatex.version = otf.version
- luatex.creator = "context mkiv"
-end
-
-otf.enhancers["cleanup aat"] = function(data,filename)
- if otf.cleanup_aat then
- end
-end
-
-local function analyze_features(g, features)
- if g then
- local t, done = { }, { }
- for k=1,#g do
- local f = features or g[k].features
- if f then
- for k=1,#f do
- -- scripts and tag
- local tag = f[k].tag
- if not done[tag] then
- t[#t+1] = tag
- done[tag] = true
- end
- end
- end
- end
- if #t > 0 then
- return t
- end
- end
- return nil
-end
-
-otf.enhancers["analyse features"] = function(data,filename)
- -- local luatex = data.luatex
- -- luatex.gposfeatures = analyze_features(data.gpos)
- -- luatex.gsubfeatures = analyze_features(data.gsub)
-end
-
-otf.enhancers["rehash features"] = function(data,filename)
+actions["reorganize features"] = function(data,filename,raw) -- combine with other
local features = { }
data.luatex.features = features
for k, what in next, otf.glists do
- local dw = data[what]
+ local dw = raw[what]
if dw then
local f = { }
features[what] = f
@@ -457,8 +837,10 @@ otf.enhancers["rehash features"] = function(data,filename)
local tag = strip(lower(df.tag))
local ft = f[tag] if not ft then ft = {} f[tag] = ft end
local dscripts = df.scripts
- for script, languages in next, dscripts do
- script = strip(lower(script))
+ for i=1,#dscripts do
+ local d = dscripts[i]
+ local languages = d.langs
+ local script = strip(lower(d.script))
local fts = ft[script] if not fts then fts = {} ft[script] = fts end
for i=1,#languages do
fts[strip(lower(languages[i]))] = true
@@ -471,8 +853,8 @@ otf.enhancers["rehash features"] = function(data,filename)
end
end
-otf.enhancers["analyse anchors"] = function(data,filename)
- local classes = data.anchor_classes
+actions["reorganize anchor classes"] = function(data,filename,raw)
+ local classes = raw.anchor_classes -- anchor classes not in final table
local luatex = data.luatex
local anchor_to_lookup, lookup_to_anchor = { }, { }
luatex.anchor_to_lookup, luatex.lookup_to_anchor = anchor_to_lookup, lookup_to_anchor
@@ -497,218 +879,200 @@ otf.enhancers["analyse anchors"] = function(data,filename)
end
end
-otf.enhancers["analyse marks"] = function(data,filename)
- local glyphs = data.glyphs
- local marks = { }
- data.luatex.marks = marks
- for unicode, index in next, data.luatex.indices do
- local glyph = glyphs[index]
- if glyph.class == "mark" then
- marks[unicode] = true
- end
- end
+actions["prepare tounicode"] = function(data,filename,raw)
+ fonts.map.addtounicode(data,filename)
end
-otf.enhancers["analyse unicodes"] = fonts.map.add_to_unicode
-
-otf.enhancers["analyse subtables"] = function(data,filename)
- data.luatex = data.luatex or { }
+actions["reorganize subtables"] = function(data,filename,raw)
local luatex = data.luatex
- local sequences = { }
- local lookups = { }
- luatex.sequences = sequences
- luatex.lookups = lookups
- for _, g in next, { data.gsub, data.gpos } do
- for k=1,#g do
- local gk = g[k]
- local typ = gk.type
- if typ == "gsub_contextchain" or typ == "gpos_contextchain" then
- gk.chain = 1
- elseif typ == "gsub_reversecontextchain" or typ == "gpos_reversecontextchain" then
- gk.chain = -1
- else
- gk.chain = 0
- end
- local features = gk.features
- if features then
- sequences[#sequences+1] = gk
- -- scripts, tag, ismac
- local t = { }
- for f=1,#features do
- local feature = features[f]
- local hash = { }
- -- only script and langs matter
- for s, languages in next, feature.scripts do
- s = lower(s)
- local h = hash[s]
- if not h then h = { } hash[s] = h end
- for l=1,#languages do
- h[strip(lower(languages[l]))] = true
- end
+ local sequences, lookups = { }, { }
+ luatex.sequences, luatex.lookups = sequences, lookups
+ for _, what in next, otf.glists do
+ local dw = raw[what]
+ if dw then
+ for k=1,#dw do
+ local gk = dw[k]
+ local typ = gk.type
+ local chain =
+ (typ == "gsub_contextchain" or typ == "gpos_contextchain") and 1 or
+ (typ == "gsub_reversecontextchain" or typ == "gpos_reversecontextchain") and -1 or 0
+ --
+ local subtables = gk.subtables
+ if subtables then
+ local t = { }
+ for s=1,#subtables do
+ local subtable = subtables[s]
+ local name = subtable.name
+ t[#t+1] = name
end
- t[feature.tag] = hash
- end
- gk.features = t
- else
- lookups[gk.name] = gk
- gk.name = nil
- end
- local subtables = gk.subtables
- if subtables then
- local t = { }
- for s=1,#subtables do
- local subtable = subtables[s]
- local name = subtable.name
- t[#t+1] = name
+ subtables = t
end
- gk.subtables = t
- end
- local flags = gk.flags
- if flags then
- gk.flags = { -- forcing false packs nicer
- (flags.ignorecombiningmarks and "mark") or false,
- (flags.ignoreligatures and "ligature") or false,
- (flags.ignorebaseglyphs and "base") or false,
- flags.r2l or false,
- }
- if flags.mark_class then
- gk.markclass = luatex.markclasses[flags.mark_class]
+ local flags, markclass = gk.flags, nil
+ if flags then
+ local t = { -- forcing false packs nicer
+ (flags.ignorecombiningmarks and "mark") or false,
+ (flags.ignoreligatures and "ligature") or false,
+ (flags.ignorebaseglyphs and "base") or false,
+ flags.r2l or false,
+ }
+ markclass = flags.mark_class
+ if markclass then
+ markclass = luatex.markclasses[markclass]
+ end
+ flags = t
end
- end
- end
- end
-end
-
-otf.enhancers["merge cid fonts"] = function(data,filename)
- -- we can also move the names to data.luatex.names which might
- -- save us some more memory (at the cost of harder tracing)
- if data.subfonts then
- if data.glyphs and next(data.glyphs) then
- logs.report("load otf","replacing existing glyph table due to subfonts")
- end
- local cidinfo = data.cidinfo
- local verbose = fonts.verbose
- if cidinfo.registry then
- local cidmap, cidname = fonts.cid.getmap(cidinfo.registry,cidinfo.ordering,cidinfo.supplement)
- if cidmap then
- cidinfo.usedname = cidmap.usedname
- local glyphs, uni_to_int, int_to_uni, nofnames, nofunicodes = { }, { }, { }, 0, 0
- local unicodes, names = cidmap.unicodes, cidmap.names
- for n, subfont in next, data.subfonts do
- for index, g in next, subfont.glyphs do
- if not next(g) then
- -- dummy entry
- else
- local unicode, name = unicodes[index], names[index]
- g.cidindex = n
- g.boundingbox = g.boundingbox -- or zerobox
- g.name = g.name or name or "unknown"
- if unicode then
- uni_to_int[unicode] = index
- int_to_uni[index] = unicode
- nofunicodes = nofunicodes + 1
- g.unicode = unicode
- elseif name then
- nofnames = nofnames + 1
- g.unicode = -1
+ --
+ local name = gk.name
+ --
+ local features = gk.features
+ if features then
+ -- scripts, tag, ismac
+ local f = { }
+ for i=1,#features do
+ local df = features[i]
+ local tag = strip(lower(df.tag))
+ local ft = f[tag] if not ft then ft = {} f[tag] = ft end
+ local dscripts = df.scripts
+ for i=1,#dscripts do
+ local d = dscripts[i]
+ local languages = d.langs
+ local script = strip(lower(d.script))
+ local fts = ft[script] if not fts then fts = {} ft[script] = fts end
+ for i=1,#languages do
+ fts[strip(lower(languages[i]))] = true
end
- glyphs[index] = g
end
end
- subfont.glyphs = nil
- end
- if trace_loading then
- logs.report("load otf","cid font remapped, %s unicode points, %s symbolic names, %s glyphs",nofunicodes, nofnames, nofunicodes+nofnames)
+ sequences[#sequences+1] = {
+ type = typ,
+ chain = chain,
+ flags = flags,
+ name = name,
+ subtables = subtables,
+ markclass = markclass,
+ features = f,
+ }
+ else
+ lookups[name] = {
+ type = typ,
+ chain = chain,
+ flags = flags,
+ subtables = subtables,
+ markclass = markclass,
+ }
end
- data.glyphs = glyphs
- data.map = data.map or { }
- data.map.map = uni_to_int
- data.map.backmap = int_to_uni
- elseif trace_loading then
- logs.report("load otf","unable to remap cid font, missing cid file for %s",filename)
end
- elseif trace_loading then
- logs.report("load otf","font %s has no glyphs",filename)
end
end
end
-otf.enhancers["prepare unicode"] = function(data,filename)
+actions["prepare unicodes"] = function(data,filename,raw)
local luatex = data.luatex
- if not luatex then luatex = { } data.luatex = luatex end
local indices, unicodes, multiples, internals = { }, { }, { }, { }
- local glyphs = data.glyphs
- local mapmap = data.map
+ local mapmap = data.map or raw.map
+ local mapenc = nil -- will go away
if not mapmap then
- logs.report("load otf","no map in %s",filename)
+ report_otf("no map in %s",filename)
mapmap = { }
data.map = { map = mapmap }
elseif not mapmap.map then
- logs.report("load otf","no unicode map in %s",filename)
+ report_otf("no unicode map in %s",filename)
mapmap = { }
data.map.map = mapmap
else
+ mapenc = mapmap.enc -- will go away
mapmap = mapmap.map
end
- local criterium = fonts.private
- local private = fonts.private
+ local criterium = fonts.privateoffset
+ local private = criterium
+ local glyphs = data.glyphs
+ -- todo: nofmultiples
for index, glyph in next, glyphs do
if index > 0 then
- local name = glyph.name
+ local name = glyph.name -- really needed ?
if name then
local unicode = glyph.unicode
- if unicode == -1 or unicode >= criterium then
+ if not unicode or unicode == -1 or unicode >= criterium then
glyph.unicode = private
indices[private] = index
unicodes[name] = private
internals[index] = true
if trace_private then
- logs.report("load otf","enhance: glyph %s at index U+%04X is moved to private unicode slot U+%04X",name,index,private)
+ report_otf("enhance: glyph %s at index U+%04X is moved to private unicode slot U+%04X",name,index,private)
end
private = private + 1
else
indices[unicode] = index
unicodes[name] = unicode
end
+ -- maybe deal with altuni here in the future but first we need
+ -- to encounter a proper font that sets them; we have to wait till
+ -- a next luatex binary as currently the unicode numbers can be out
+ -- of bounds
+ if false then
+ local altuni = glyph.altuni
+ if altuni then
+ local un = { unicodes[name] }
+ for i=1,#altuni do
+ local unicode = altuni[i].unicode
+ multiples[#multiples+1] = name
+ un[i+1] = unicode
+ indices[unicode] = index -- maybe check for duplicates
+ end
+ unicodes[name] = un
+ end
+ end
+ else
+ -- message that something is wrong
end
end
end
-- beware: the indices table is used to initialize the tfm table
- for unicode, index in next, mapmap do
- if not internals[index] then
- local name = glyphs[index].name
- if name then
- local un = unicodes[name]
- if not un then
- unicodes[name] = unicode -- or 0
- elseif type(un) == "number" then
- if un ~= unicode then
- multiples[#multiples+1] = name
- unicodes[name] = { un, unicode }
- indices[unicode] = index
- end
- else
- local ok = false
- for u=1,#un do
- if un[u] == unicode then
- ok = true
- break
+ local encname = lower(data.enc_name or (mapenc and mapenc[1] and mapenc[1].enc_name) or "") -- mapenc will go away
+ -- will become: local encname = lower(data.enc_name or "")
+ if encname == "" or encname == "unicodebmp" or encname == "unicodefull" then -- maybe find(encname,"unicode")
+ if trace_loading then
+ report_otf("using extra unicode map")
+ end
+ -- ok -- we can also consider using the altuni
+ for unicode, index in next, mapmap do
+ if not internals[index] then
+ local name = glyphs[index].name
+ if name then
+ local un = unicodes[name]
+ if not un then
+ unicodes[name] = unicode -- or 0
+ elseif type(un) == "number" then -- tonumber(un)
+ if un ~= unicode then
+ multiples[#multiples+1] = name
+ unicodes[name] = { un, unicode }
+ indices[unicode] = index
+ end
+ else
+ local ok = false
+ for u=1,#un do
+ if un[u] == unicode then
+ ok = true
+ break
+ end
+ end
+ if not ok then
+ multiples[#multiples+1] = name
+ un[#un+1] = unicode
+ indices[unicode] = index
end
- end
- if not ok then
- multiples[#multiples+1] = name
- un[#un+1] = unicode
- indices[unicode] = index
end
end
end
end
+ else
+ report_otf("warning: non unicode map '%s', only using glyph unicode data",encname or "whatever")
end
if trace_loading then
if #multiples > 0 then
- logs.report("load otf","%s glyph are reused: %s",#multiples, concat(multiples," "))
+ report_otf("%s glyphs are reused: %s",#multiples, concat(multiples," "))
else
- logs.report("load otf","no glyph are reused")
+ report_otf("no glyphs are reused")
end
end
luatex.indices = indices
@@ -716,26 +1080,14 @@ otf.enhancers["prepare unicode"] = function(data,filename)
luatex.private = private
end
-otf.enhancers["cleanup ttf tables"] = function(data,filename)
- local ttf_tables = data.ttf_tables
- if ttf_tables then
- for k=1,#ttf_tables do
- if ttf_tables[k].data then ttf_tables[k].data = "deleted" end
- end
- end
- data.ttf_tab_saved = nil
-end
-
-otf.enhancers["compact glyphs"] = function(data,filename)
- table.compact(data.glyphs) -- needed?
- if data.subfonts then
- for _, subfont in next, data.subfonts do
- table.compact(subfont.glyphs) -- needed?
- end
+actions["prepare lookups"] = function(data,filename,raw)
+ local lookups = raw.lookups
+ if lookups then
+ data.lookups = lookups
end
end
-otf.enhancers["reverse coverage"] = function(data,filename)
+actions["reorganize lookups"] = function(data,filename,raw)
-- we prefer the before lookups in a normal order
if data.lookups then
for _, v in next, data.lookups do
@@ -743,7 +1095,7 @@ otf.enhancers["reverse coverage"] = function(data,filename)
for _, vv in next, v.rules do
local c = vv.coverage
if c and c.before then
- c.before = table.reverse(c.before)
+ c.before = reversed(c.before)
end
end
end
@@ -751,35 +1103,19 @@ otf.enhancers["reverse coverage"] = function(data,filename)
end
end
-otf.enhancers["check italic correction"] = function(data,filename)
- local glyphs = data.glyphs
- local ok = false
- for index, glyph in next, glyphs do
- local ic = glyph.italic_correction
- if ic then
- if ic ~= 0 then
- glyph.italic = ic
- end
- glyph.italic_correction = nil
- ok = true
- end
- end
- -- we can use this to avoid calculations
- otf.tables.valid_fields[#otf.tables.valid_fields+1] = "has_italic"
- data.has_italic = true
-end
-
-otf.enhancers["check math"] = function(data,filename)
- if data.math then
+actions["analyze math"] = function(data,filename,raw)
+ if raw.math then
+data.metadata.math = raw.math
-- we move the math stuff into a math subtable because we then can
-- test faster in the tfm copy
- local glyphs = data.glyphs
+ local glyphs, udglyphs = data.glyphs, data.udglyphs
local unicodes = data.luatex.unicodes
- for index, glyph in next, glyphs do
- local mk = glyph.mathkern
- local hv = glyph.horiz_variants
- local vv = glyph.vert_variants
+ for index, udglyph in next, udglyphs do
+ local mk = udglyph.mathkern
+ local hv = udglyph.horiz_variants
+ local vv = udglyph.vert_variants
if mk or hv or vv then
+ local glyph = glyphs[index]
local math = { }
glyph.math = math
if mk then
@@ -789,7 +1125,6 @@ otf.enhancers["check math"] = function(data,filename)
end
end
math.kerns = mk
- glyph.mathkern = nil
end
if hv then
math.horiz_variants = hv.variants
@@ -805,7 +1140,6 @@ otf.enhancers["check math"] = function(data,filename)
if ic and ic ~= 0 then
math.horiz_italic_correction = ic
end
- glyph.horiz_variants = nil
end
if vv then
local uc = unicodes[index]
@@ -822,227 +1156,54 @@ otf.enhancers["check math"] = function(data,filename)
if ic and ic ~= 0 then
math.vert_italic_correction = ic
end
- glyph.vert_variants = nil
end
local ic = glyph.italic_correction
if ic then
if ic ~= 0 then
math.italic_correction = ic
end
- glyph.italic_correction = nil
end
end
end
end
end
-otf.enhancers["share widths"] = function(data,filename)
- local glyphs = data.glyphs
- local widths = { }
- for index, glyph in next, glyphs do
- local width = glyph.width
- widths[width] = (widths[width] or 0) + 1
- end
- -- share width for cjk fonts
- local wd, most = 0, 1
- for k,v in next, widths do
- if v > most then
- wd, most = k, v
- end
- end
- if most > 1000 then
- if trace_loading then
- logs.report("load otf", "most common width: %s (%s times), sharing (cjk font)",wd,most)
- end
- for k, v in next, glyphs do
- if v.width == wd then
- v.width = nil
- end
- end
- data.luatex.defaultwidth = wd
- end
-end
-
--- kern: ttf has a table with kerns
-
--- Weird, as maxfirst and maxseconds can have holes, first seems to be indexed, but
--- seconds can start at 2 .. this need to be fixed as getn as well as # are sort of
--- unpredictable alternatively we could force an [1] if not set (maybe I will do that
--- anyway).
-
---~ otf.enhancers["reorganize kerns"] = function(data,filename)
---~ local glyphs, mapmap, unicodes = data.glyphs, data.luatex.indices, data.luatex.unicodes
---~ local mkdone = false
---~ for index, glyph in next, glyphs do
---~ if glyph.kerns then
---~ local mykerns = { }
---~ for k,v in next, glyph.kerns do
---~ local vc, vo, vl = v.char, v.off, v.lookup
---~ if vc and vo and vl then -- brrr, wrong! we miss the non unicode ones
---~ local uvc = unicodes[vc]
---~ if not uvc then
---~ if trace_loading then
---~ logs.report("load otf","problems with unicode %s of kern %s at glyph %s",vc,k,index)
---~ end
---~ else
---~ if type(vl) ~= "table" then
---~ vl = { vl }
---~ end
---~ for l=1,#vl do
---~ local vll = vl[l]
---~ local mkl = mykerns[vll]
---~ if not mkl then
---~ mkl = { }
---~ mykerns[vll] = mkl
---~ end
---~ if type(uvc) == "table" then
---~ for u=1,#uvc do
---~ mkl[uvc[u]] = vo
---~ end
---~ else
---~ mkl[uvc] = vo
---~ end
---~ end
---~ end
---~ end
---~ end
---~ glyph.mykerns = mykerns
---~ glyph.kerns = nil -- saves space and time
---~ mkdone = true
---~ end
---~ end
---~ if trace_loading and mkdone then
---~ logs.report("load otf", "replacing 'kerns' tables by 'mykerns' tables")
---~ end
---~ if data.kerns then
---~ if trace_loading then
---~ logs.report("load otf", "removing global 'kern' table")
---~ end
---~ data.kerns = nil
---~ end
---~ local dgpos = data.gpos
---~ if dgpos then
---~ local separator = lpeg.P(" ")
---~ local other = ((1 - separator)^0) / unicodes
---~ local splitter = lpeg.Ct(other * (separator * other)^0)
---~ for gp=1,#dgpos do
---~ local gpos = dgpos[gp]
---~ local subtables = gpos.subtables
---~ if subtables then
---~ for s=1,#subtables do
---~ local subtable = subtables[s]
---~ local kernclass = subtable.kernclass -- name is inconsistent with anchor_classes
---~ if kernclass then -- the next one is quite slow
---~ local split = { } -- saves time
---~ for k=1,#kernclass do
---~ local kcl = kernclass[k]
---~ local firsts, seconds, offsets, lookups = kcl.firsts, kcl.seconds, kcl.offsets, kcl.lookup -- singular
---~ if type(lookups) ~= "table" then
---~ lookups = { lookups }
---~ end
---~ local maxfirsts, maxseconds = getn(firsts), getn(seconds)
---~ for _, s in next, firsts do
---~ split[s] = split[s] or lpegmatch(splitter,s)
---~ end
---~ for _, s in next, seconds do
---~ split[s] = split[s] or lpegmatch(splitter,s)
---~ end
---~ for l=1,#lookups do
---~ local lookup = lookups[l]
---~ local function do_it(fk,first_unicode)
---~ local glyph = glyphs[mapmap[first_unicode]]
---~ if glyph then
---~ local mykerns = glyph.mykerns
---~ if not mykerns then
---~ mykerns = { } -- unicode indexed !
---~ glyph.mykerns = mykerns
---~ end
---~ local lookupkerns = mykerns[lookup]
---~ if not lookupkerns then
---~ lookupkerns = { }
---~ mykerns[lookup] = lookupkerns
---~ end
---~ local baseoffset = (fk-1) * maxseconds
---~ for sk=2,maxseconds do -- we can avoid this loop with a table
---~ local sv = seconds[sk]
---~ local splt = split[sv]
---~ if splt then
---~ local offset = offsets[baseoffset + sk]
---~ --~ local offset = offsets[sk] -- (fk-1) * maxseconds + sk]
---~ if offset then
---~ for i=1,#splt do
---~ local second_unicode = splt[i]
---~ if tonumber(second_unicode) then
---~ lookupkerns[second_unicode] = offset
---~ else for s=1,#second_unicode do
---~ lookupkerns[second_unicode[s]] = offset
---~ end end
---~ end
---~ end
---~ end
---~ end
---~ elseif trace_loading then
---~ logs.report("load otf", "no glyph data for U+%04X", first_unicode)
---~ end
---~ end
---~ for fk=1,#firsts do
---~ local fv = firsts[fk]
---~ local splt = split[fv]
---~ if splt then
---~ for i=1,#splt do
---~ local first_unicode = splt[i]
---~ if tonumber(first_unicode) then
---~ do_it(fk,first_unicode)
---~ else for f=1,#first_unicode do
---~ do_it(fk,first_unicode[f])
---~ end end
---~ end
---~ end
---~ end
---~ end
---~ end
---~ subtable.comment = "The kernclass table is merged into mykerns in the indexed glyph tables."
---~ subtable.kernclass = { }
---~ end
---~ end
---~ end
---~ end
---~ end
---~ end
-
-otf.enhancers["reorganize kerns"] = function(data,filename)
- local glyphs, mapmap, unicodes = data.glyphs, data.luatex.indices, data.luatex.unicodes
+actions["reorganize glyph kerns"] = function(data,filename,raw)
+ local luatex = data.luatex
+ local udglyphs, glyphs, mapmap, unicodes = data.udglyphs, data.glyphs, luatex.indices, luatex.unicodes
local mkdone = false
- local function do_it(lookup,first_unicode,kerns)
+ local function do_it(lookup,first_unicode,extrakerns) -- can be moved inline but seldom used
local glyph = glyphs[mapmap[first_unicode]]
if glyph then
- local mykerns = glyph.mykerns
- if not mykerns then
- mykerns = { } -- unicode indexed !
- glyph.mykerns = mykerns
+ local kerns = glyph.kerns
+ if not kerns then
+ kerns = { } -- unicode indexed !
+ glyph.kerns = kerns
end
- local lookupkerns = mykerns[lookup]
+ local lookupkerns = kerns[lookup]
if not lookupkerns then
lookupkerns = { }
- mykerns[lookup] = lookupkerns
+ kerns[lookup] = lookupkerns
end
- for second_unicode, kern in next, kerns do
+ for second_unicode, kern in next, extrakerns do
lookupkerns[second_unicode] = kern
end
elseif trace_loading then
- logs.report("load otf", "no glyph data for U+%04X", first_unicode)
+ report_otf("no glyph data for U+%04X", first_unicode)
end
end
- for index, glyph in next, glyphs do
- if glyph.kerns then
- local mykerns = { }
- for k,v in next, glyph.kerns do
+ for index, udglyph in next, data.udglyphs do
+ local kerns = udglyph.kerns
+ if kerns then
+ local glyph = glyphs[index]
+ local newkerns = { }
+ for k,v in next, kerns do
local vc, vo, vl = v.char, v.off, v.lookup
if vc and vo and vl then -- brrr, wrong! we miss the non unicode ones
local uvc = unicodes[vc]
if not uvc then
if trace_loading then
- logs.report("load otf","problems with unicode %s of kern %s at glyph %s",vc,k,index)
+ report_otf("problems with unicode %s of kern %s at glyph %s",vc,k,index)
end
else
if type(vl) ~= "table" then
@@ -1050,10 +1211,10 @@ otf.enhancers["reorganize kerns"] = function(data,filename)
end
for l=1,#vl do
local vll = vl[l]
- local mkl = mykerns[vll]
+ local mkl = newkerns[vll]
if not mkl then
mkl = { }
- mykerns[vll] = mkl
+ newkerns[vll] = mkl
end
if type(uvc) == "table" then
for u=1,#uvc do
@@ -1066,21 +1227,14 @@ otf.enhancers["reorganize kerns"] = function(data,filename)
end
end
end
- glyph.mykerns = mykerns
- glyph.kerns = nil -- saves space and time
+ glyph.kerns = newkerns -- udglyph.kerns = nil when in mixed mode
mkdone = true
end
end
if trace_loading and mkdone then
- logs.report("load otf", "replacing 'kerns' tables by 'mykerns' tables")
- end
- if data.kerns then
- if trace_loading then
- logs.report("load otf", "removing global 'kern' table")
- end
- data.kerns = nil
+ report_otf("replacing 'kerns' tables by a new 'kerns' tables")
end
- local dgpos = data.gpos
+ local dgpos = raw.gpos
if dgpos then
local separator = lpeg.P(" ")
local other = ((1 - separator)^0) / unicodes
@@ -1145,7 +1299,7 @@ otf.enhancers["reorganize kerns"] = function(data,filename)
end
end
end
- subtable.comment = "The kernclass table is merged into mykerns in the indexed glyph tables."
+ subtable.comment = "The kernclass table is merged into kerns in the indexed glyph tables."
subtable.kernclass = { }
end
end
@@ -1154,22 +1308,10 @@ otf.enhancers["reorganize kerns"] = function(data,filename)
end
end
-
-
-
-
-
-
-
-
-otf.enhancers["strip not needed data"] = function(data,filename)
+actions["check glyphs"] = function(data,filename,raw)
local verbose = fonts.verbose
local int_to_uni = data.luatex.unicodes
for k, v in next, data.glyphs do
- local d = v.dependents
- if d then v.dependents = nil end
- local a = v.altuni
- if a then v.altuni = nil end
if verbose then
local code = int_to_uni[k]
-- looks like this is done twice ... bug?
@@ -1191,33 +1333,65 @@ otf.enhancers["strip not needed data"] = function(data,filename)
v.unicode = nil
v.index = nil
end
+ -- only needed on non sparse/mixed mode
+ if v.math then
+ if v.mathkern then v.mathkern = nil end
+ if v.horiz_variant then v.horiz_variant = nil end
+ if v.vert_variants then v.vert_variants = nil end
+ end
+ --
end
- data.luatex.comment = "Glyph tables have their original index. When present, mykern tables are indexed by unicode."
- data.map = nil
- data.names = nil -- funny names for editors
- data.glyphcnt = nil
- data.glyphmax = nil
- if true then
- data.gpos = nil
- data.gsub = nil
- data.anchor_classes = nil
- end
+ data.luatex.comment = "Glyph tables have their original index. When present, kern tables are indexed by unicode."
end
-otf.enhancers["migrate metadata"] = function(data,filename)
- local global_fields = otf.tables.global_fields
- local metadata = { }
- for k,v in next, data do
- if not global_fields[k] then
- metadata[k] = v
- data[k] = nil
+actions["check metadata"] = function(data,filename,raw)
+ local metadata = data.metadata
+ metadata.method = loadmethod
+ if loadmethod == "sparse" then
+ for _, k in next, mainfields do
+ if valid_fields[k] then
+ local v = raw[k]
+ if global_fields[k] then
+ if not data[k] then
+ data[k] = v
+ end
+ else
+ if not metadata[k] then
+ metadata[k] = v
+ end
+ end
+ end
+ end
+ else
+ for k, v in next, raw do
+ if valid_fields[k] then
+ if global_fields[k] then
+ if not data[k] then
+ data[v] = v
+ end
+ else
+ if not metadata[k] then
+ metadata[k] = v
+ end
+ end
+ end
end
end
- data.metadata = metadata
- -- goodies
- local pfminfo = data.pfminfo
- metadata.isfixedpitch = metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose["proportion"] == "Monospaced")
- metadata.charwidth = pfminfo and pfminfo.avgwidth
+ local pfminfo = raw.pfminfo
+ if pfminfo then
+ data.pfminfo = pfminfo
+ metadata.isfixedpitch = metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose.proportion == "Monospaced")
+ metadata.charwidth = pfminfo and pfminfo.avgwidth
+ end
+ local ttftables = metadata.ttf_tables
+ if ttftables then
+ for i=1,#ttftables do
+ ttftables[i].data = "deleted"
+ end
+ end
+ metadata.xuid = nil
+ data.udglyphs = nil
+ data.map = nil
end
local private_math_parameters = {
@@ -1225,14 +1399,14 @@ local private_math_parameters = {
"FractionDelimiterDisplayStyleSize",
}
-otf.enhancers["check math parameters"] = function(data,filename)
+actions["check math parameters"] = function(data,filename,raw)
local mathdata = data.metadata.math
if mathdata then
for m=1,#private_math_parameters do
local pmp = private_math_parameters[m]
if not mathdata[pmp] then
if trace_loading then
- logs.report("load otf", "setting math parameter '%s' to 0", pmp)
+ report_otf("setting math parameter '%s' to 0", pmp)
end
mathdata[pmp] = 0
end
@@ -1240,96 +1414,100 @@ otf.enhancers["check math parameters"] = function(data,filename)
end
end
-otf.enhancers["flatten glyph lookups"] = function(data,filename)
- for k, v in next, data.glyphs do
- local lookups = v.lookups
+
+-- kern: ttf has a table with kerns
+--
+-- Weird, as maxfirst and maxseconds can have holes, first seems to be indexed, but
+-- seconds can start at 2 .. this need to be fixed as getn as well as # are sort of
+-- unpredictable alternatively we could force an [1] if not set (maybe I will do that
+-- anyway).
+
+actions["reorganize glyph lookups"] = function(data,filename,raw)
+ local glyphs = data.glyphs
+ for index, udglyph in next, data.udglyphs do
+ local lookups = udglyph.lookups
if lookups then
+ local glyph = glyphs[index]
+ local l = { }
for kk, vv in next, lookups do
+ local aa = { }
+ l[kk] = aa
for kkk=1,#vv do
local vvv = vv[kkk]
local s = vvv.specification
- if s then
- local t = vvv.type
- if t == "ligature" then
- vv[kkk] = { "ligature", s.components, s.char }
- elseif t == "alternate" then
- vv[kkk] = { "alternate", s.components }
- elseif t == "substitution" then
- vv[kkk] = { "substitution", s.variant }
- elseif t == "multiple" then
- vv[kkk] = { "multiple", s.components }
- elseif t == "position" then
- vv[kkk] = { "position", { s.x or 0, s.y or 0, s.h or 0, s.v or 0 } }
- elseif t == "pair" then
- local one, two, paired = s.offsets[1], s.offsets[2], s.paired or ""
- if one then
- if two then
- vv[kkk] = { "pair", paired, { one.x or 0, one.y or 0, one.h or 0, one.v or 0 }, { two.x or 0, two.y or 0, two.h or 0, two.v or 0 } }
- else
- vv[kkk] = { "pair", paired, { one.x or 0, one.y or 0, one.h or 0, one.v or 0 } }
- end
+ local t = vvv.type
+ -- #aa+1
+ if t == "ligature" then
+ aa[kkk] = { "ligature", s.components, s.char }
+ elseif t == "alternate" then
+ aa[kkk] = { "alternate", s.components }
+ elseif t == "substitution" then
+ aa[kkk] = { "substitution", s.variant }
+ elseif t == "multiple" then
+ aa[kkk] = { "multiple", s.components }
+ elseif t == "position" then
+ aa[kkk] = { "position", { s.x or 0, s.y or 0, s.h or 0, s.v or 0 } }
+ elseif t == "pair" then
+ -- maybe flatten this one
+ local one, two, paired = s.offsets[1], s.offsets[2], s.paired or ""
+ if one then
+ if two then
+ aa[kkk] = { "pair", paired, { one.x or 0, one.y or 0, one.h or 0, one.v or 0 }, { two.x or 0, two.y or 0, two.h or 0, two.v or 0 } }
else
- if two then
- vv[kkk] = { "pair", paired, { }, { two.x or 0, two.y or 0, two.h or 0, two.v or 0} } -- maybe nil instead of { }
- else
- vv[kkk] = { "pair", paired }
- end
+ aa[kkk] = { "pair", paired, { one.x or 0, one.y or 0, one.h or 0, one.v or 0 } }
end
else
- if trace_loading then
- logs.report("load otf", "flattening needed, report to context list")
- end
- for a, b in next, s do
- if trace_loading and vvv[a] then
- logs.report("load otf", "flattening conflict, report to context list")
- end
- vvv[a] = b
+ if two then
+ aa[kkk] = { "pair", paired, { }, { two.x or 0, two.y or 0, two.h or 0, two.v or 0} } -- maybe nil instead of { }
+ else
+ aa[kkk] = { "pair", paired }
end
- vvv.specification = nil
end
end
end
end
- end
- end
-end
-
-otf.enhancers["simplify glyph lookups"] = function(data,filename)
- for k, v in next, data.glyphs do
- local lookups = v.lookups
- if lookups then
+ -- we could combine this
local slookups, mlookups
- for kk, vv in next, lookups do
+ for kk, vv in next, l do
if #vv == 1 then
if not slookups then
slookups = { }
- v.slookups = slookups
+ glyph.slookups = slookups
end
slookups[kk] = vv[1]
else
if not mlookups then
mlookups = { }
- v.mlookups = mlookups
+ glyph.mlookups = mlookups
end
mlookups[kk] = vv
end
end
- v.lookups = nil
+ glyph.lookups = nil -- when using table
end
end
end
-otf.enhancers["flatten anchor tables"] = function(data,filename)
- for k, v in next, data.glyphs do
- if v.anchors then
- for kk, vv in next, v.anchors do
+actions["reorganize glyph anchors"] = function(data,filename,raw)
+ local glyphs = data.glyphs
+ for index, udglyph in next, data.udglyphs do
+ local anchors = udglyph.anchors
+ if anchors then
+ local glyph = glyphs[index]
+ local a = { }
+ glyph.anchors = a
+ for kk, vv in next, anchors do
+ local aa = { }
+ a[kk] = aa
for kkk, vvv in next, vv do
if vvv.x or vvv.y then
- vv[kkk] = { vvv.x or 0, vvv.y or 0 }
+ aa[kkk] = { vvv.x , vvv.y }
else
+ local aaa = { }
+ aa[kkk] = aaa
for kkkk=1,#vvv do
local vvvv = vvv[kkkk]
- vvv[kkkk] = { vvvv.x or 0, vvvv.y or 0 }
+ aaa[kkkk] = { vvvv.x, vvvv.y }
end
end
end
@@ -1338,48 +1516,12 @@ otf.enhancers["flatten anchor tables"] = function(data,filename)
end
end
-otf.enhancers["flatten feature tables"] = function(data,filename)
- -- is this needed? do we still use them at all?
- for _, tag in next, otf.glists do
- if data[tag] then
- if trace_loading then
- logs.report("load otf", "flattening %s table", tag)
- end
- for k, v in next, data[tag] do
- local features = v.features
- if features then
- for kk=1,#features do
- local vv = features[kk]
- local t = { }
- local scripts = vv.scripts
- for kkk=1,#scripts do
- local vvv = scripts[kkk]
- t[vvv.script] = vvv.langs
- end
- vv.scripts = t
- end
- end
- end
- end
- end
-end
-
-otf.enhancers.patches = otf.enhancers.patches or { }
-
-otf.enhancers["patch bugs"] = function(data,filename)
- local basename = file.basename(lower(filename))
- for pattern, action in next, otf.enhancers.patches do
- if find(basename,pattern) then
- action(data,filename)
- end
- end
-end
-
--- tex features
+--~ actions["check extra features"] = function(data,filename,raw)
+--~ -- later, ctx only
+--~ end
-fonts.otf.enhancers["enrich with features"] = function(data,filename)
- -- later, ctx only
-end
+-- -- -- -- -- --
+-- -- -- -- -- --
function otf.features.register(name,default)
otf.features.list[#otf.features.list+1] = name
@@ -1388,22 +1530,23 @@ end
-- for context this will become a task handler
-function otf.set_features(tfmdata,features)
+local lists = { -- why local
+ fonts.triggers,
+ fonts.processors,
+ fonts.manipulators,
+}
+
+function otf.setfeatures(tfmdata,features)
local processes = { }
if features and next(features) then
- local lists = { -- why local
- fonts.triggers,
- fonts.processors,
- fonts.manipulators,
- }
- local mode = tfmdata.mode or fonts.mode -- or features.mode
+ local mode = tfmdata.mode or features.mode or "base"
local initializers = fonts.initializers
local fi = initializers[mode]
if fi then
local fiotf = fi.otf
if fiotf then
local done = { }
- for l=1,4 do
+ for l=1,#lists do
local list = lists[l]
if list then
for i=1,#list do
@@ -1412,10 +1555,10 @@ function otf.set_features(tfmdata,features)
if value and fiotf[f] then -- brr
if not done[f] then -- so, we can move some to triggers
if trace_features then
- logs.report("define otf","initializing feature %s to %s for mode %s for font %s",f,tostring(value),mode or 'unknown', tfmdata.fullname or 'unknown')
+ report_otf("initializing feature %s to %s for mode %s for font %s",f,tostring(value),mode or 'unknown', tfmdata.fullname or 'unknown')
end
fiotf[f](tfmdata,value) -- can set mode (no need to pass otf)
- mode = tfmdata.mode or fonts.mode -- keep this, mode can be set local !
+ mode = tfmdata.mode or features.mode or "base"
local im = initializers[mode]
if im then
fiotf = initializers[mode].otf
@@ -1428,18 +1571,19 @@ function otf.set_features(tfmdata,features)
end
end
end
+tfmdata.mode = mode
local fm = fonts.methods[mode] -- todo: zonder node/mode otf/...
if fm then
local fmotf = fm.otf
if fmotf then
- for l=1,4 do
+ for l=1,#lists do
local list = lists[l]
if list then
for i=1,#list do
local f = list[i]
if fmotf[f] then -- brr
if trace_features then
- logs.report("define otf","installing feature handler %s for mode %s for font %s",f,mode or 'unknown', tfmdata.fullname or 'unknown')
+ report_otf("installing feature handler %s for mode %s for font %s",f,mode or 'unknown', tfmdata.fullname or 'unknown')
end
processes[#processes+1] = fmotf[f]
end
@@ -1454,71 +1598,6 @@ function otf.set_features(tfmdata,features)
return processes, features
end
-function otf.otf_to_tfm(specification)
- local name = specification.name
- local sub = specification.sub
- local filename = specification.filename
- local format = specification.format
- local features = specification.features.normal
- local cache_id = specification.hash
- local tfmdata = containers.read(tfm.cache,cache_id)
---~ print(cache_id)
- if not tfmdata then
- local otfdata = otf.load(filename,format,sub,features and features.featurefile)
- if otfdata and next(otfdata) then
- otfdata.shared = otfdata.shared or {
- featuredata = { },
- anchorhash = { },
- initialized = false,
- }
- tfmdata = otf.copy_to_tfm(otfdata,cache_id)
- if tfmdata and next(tfmdata) then
- tfmdata.unique = tfmdata.unique or { }
- tfmdata.shared = tfmdata.shared or { } -- combine
- local shared = tfmdata.shared
- shared.otfdata = otfdata
- shared.features = features -- default
- shared.dynamics = { }
- shared.processes = { }
- shared.set_dynamics = otf.set_dynamics -- fast access and makes other modules independent
- -- this will be done later anyway, but it's convenient to have
- -- them already for fast access
- tfmdata.luatex = otfdata.luatex
- tfmdata.indices = otfdata.luatex.indices
- tfmdata.unicodes = otfdata.luatex.unicodes
- tfmdata.marks = otfdata.luatex.marks
- tfmdata.originals = otfdata.luatex.originals
- tfmdata.changed = { }
- tfmdata.has_italic = otfdata.metadata.has_italic
- if not tfmdata.language then tfmdata.language = 'dflt' end
- if not tfmdata.script then tfmdata.script = 'dflt' end
- shared.processes, shared.features = otf.set_features(tfmdata,fonts.define.check(features,otf.features.default))
- end
- end
- containers.write(tfm.cache,cache_id,tfmdata)
- end
- return tfmdata
-end
-
---~ {
---~ ['boundingbox']={ 95, -458, 733, 1449 },
---~ ['class']="base",
---~ ['name']="braceleft",
---~ ['unicode']=123,
---~ ['vert_variants']={
---~ ['italic_correction']=0,
---~ ['parts']={
---~ { ['component']="uni23A9", ['endConnectorLength']=1000, ['fullAdvance']=2546, ['is_extender']=0, ['startConnectorLength']=0, }, -- bot
---~ { ['component']="uni23AA", ['endConnectorLength']=2500, ['fullAdvance']=2501, ['is_extender']=1, ['startConnectorLength']=2500, }, -- rep
---~ { ['component']="uni23A8", ['endConnectorLength']=1000, ['fullAdvance']=4688, ['is_extender']=0, ['startConnectorLength']=1000, }, -- mid
---~ { ['component']="uni23AA", ['endConnectorLength']=2500, ['fullAdvance']=2501, ['is_extender']=1, ['startConnectorLength']=2500, }, -- rep
---~ { ['component']="uni23A7", ['endConnectorLength']=0, ['fullAdvance']=2546, ['is_extender']=0, ['startConnectorLength']=1000, }, -- top
---~ },
---~ ['variants']="braceleft braceleft.vsize1 braceleft.vsize2 braceleft.vsize3 braceleft.vsize4 braceleft.vsize5 braceleft.vsize6 braceleft.vsize7",
---~ },
---~ ['width']=793,
---~ },
-
-- the first version made a top/mid/not extensible table, now we just pass on the variants data
-- and deal with it in the tfm scaler (there is no longer an extensible table anyway)
@@ -1530,12 +1609,13 @@ fonts.formats.ttc = "truetype"
fonts.formats.ttf = "truetype"
fonts.formats.otf = "opentype"
-function otf.copy_to_tfm(data,cache_id) -- we can save a copy when we reorder the tma to unicode (nasty due to one->many)
+local function copytotfm(data,cache_id) -- we can save a copy when we reorder the tma to unicode (nasty due to one->many)
if data then
local glyphs, pfminfo, metadata = data.glyphs or { }, data.pfminfo or { }, data.metadata or { }
local luatex = data.luatex
local unicodes = luatex.unicodes -- names to unicodes
local indices = luatex.indices
+ local mode = data.mode or "base"
local characters, parameters, math_parameters, descriptions = { }, { }, { }, { }
local designsize = metadata.designsize or metadata.design_size or 100
if designsize == 0 then
@@ -1622,10 +1702,10 @@ function otf.copy_to_tfm(data,cache_id) -- we can save a copy when we reorder th
end
spaceunits = tonumber(spaceunits) or tfm.units/2 -- 500 -- brrr
-- we need a runtime lookup because of running from cdrom or zip, brrr (shouldn't we use the basename then?)
- local filename = fonts.tfm.checked_filename(luatex)
+ local filename = fonts.tfm.checkedfilename(luatex)
local fontname = metadata.fontname
local fullname = metadata.fullname or fontname
- local cidinfo = data.cidinfo
+ local cidinfo = data.cidinfo -- or { }
local units = metadata.units_per_em or 1000
--
cidinfo.registry = cidinfo and cidinfo.registry or "" -- weird here, fix upstream
@@ -1646,7 +1726,7 @@ function otf.copy_to_tfm(data,cache_id) -- we can save a copy when we reorder th
if metadata.isfixedpitch then
parameters.space_stretch = 0
parameters.space_shrink = 0
- elseif otf.syncspace then --
+ elseif syncspace then --
parameters.space_stretch = spaceunits/2
parameters.space_shrink = spaceunits/3
end
@@ -1677,6 +1757,7 @@ function otf.copy_to_tfm(data,cache_id) -- we can save a copy when we reorder th
designsize = (designsize/10)*65536,
spacer = "500 units",
encodingbytes = 2,
+ mode = mode,
filename = filename,
fontname = fontname,
fullname = fullname,
@@ -1695,10 +1776,56 @@ function otf.copy_to_tfm(data,cache_id) -- we can save a copy when we reorder th
end
end
+local function otftotfm(specification)
+ local name = specification.name
+ local sub = specification.sub
+ local filename = specification.filename
+ local format = specification.format
+ local features = specification.features.normal
+ local cache_id = specification.hash
+ local tfmdata = containers.read(tfm.cache,cache_id)
+--~ print(cache_id)
+ if not tfmdata then
+ local otfdata = otf.load(filename,format,sub,features and features.featurefile)
+ if otfdata and next(otfdata) then
+ otfdata.shared = otfdata.shared or {
+ featuredata = { },
+ anchorhash = { },
+ initialized = false,
+ }
+ tfmdata = copytotfm(otfdata,cache_id)
+ if tfmdata and next(tfmdata) then
+ tfmdata.unique = tfmdata.unique or { }
+ tfmdata.shared = tfmdata.shared or { } -- combine
+ local shared = tfmdata.shared
+ shared.otfdata = otfdata
+ shared.features = features -- default
+ shared.dynamics = { }
+ shared.processes = { }
+ shared.setdynamics = otf.setdynamics -- fast access and makes other modules independent
+ -- this will be done later anyway, but it's convenient to have
+ -- them already for fast access
+ tfmdata.luatex = otfdata.luatex
+ tfmdata.indices = otfdata.luatex.indices
+ tfmdata.unicodes = otfdata.luatex.unicodes
+ tfmdata.marks = otfdata.luatex.marks
+ tfmdata.originals = otfdata.luatex.originals
+ tfmdata.changed = { }
+ tfmdata.has_italic = otfdata.metadata.has_italic
+ if not tfmdata.language then tfmdata.language = 'dflt' end
+ if not tfmdata.script then tfmdata.script = 'dflt' end
+ shared.processes, shared.features = otf.setfeatures(tfmdata,definers.check(features,otf.features.default))
+ end
+ end
+ containers.write(tfm.cache,cache_id,tfmdata)
+ end
+ return tfmdata
+end
+
otf.features.register('mathsize')
-function tfm.read_from_open_type(specification)
- local tfmtable = otf.otf_to_tfm(specification)
+function tfm.read_from_otf(specification) -- wrong namespace
+ local tfmtable = otftotfm(specification)
if tfmtable then
local otfdata = tfmtable.shared.otfdata
tfmtable.name = specification.name
@@ -1717,7 +1844,7 @@ function tfm.read_from_open_type(specification)
if p then
local ps = p * specification.textsize / 100
if trace_math then
- logs.report("define font","asked script size: %s, used: %s (%2.2f %%)",s,ps,(ps/s)*100)
+ report_otf("asked script size: %s, used: %s (%2.2f %%)",s,ps,(ps/s)*100)
end
s = ps
end
@@ -1726,7 +1853,7 @@ function tfm.read_from_open_type(specification)
if p then
local ps = p * specification.textsize / 100
if trace_math then
- logs.report("define font","asked scriptscript size: %s, used: %s (%2.2f %%)",s,ps,(ps/s)*100)
+ report_otf("asked scriptscript size: %s, used: %s (%2.2f %%)",s,ps,(ps/s)*100)
end
s = ps
end
@@ -1735,13 +1862,13 @@ function tfm.read_from_open_type(specification)
end
end
tfmtable = tfm.scale(tfmtable,s,specification.relativeid)
- if tfm.fontname_mode == "specification" then
+ if tfm.fontnamemode == "specification" then
-- not to be used in context !
local specname = specification.specification
if specname then
tfmtable.name = specname
if trace_defining then
- logs.report("define font","overloaded fontname: '%s'",specname)
+ report_otf("overloaded fontname: '%s'",specname)
end
end
end
@@ -1753,7 +1880,7 @@ end
-- helpers
-function otf.collect_lookups(otfdata,kind,script,language)
+function otf.collectlookups(otfdata,kind,script,language)
-- maybe store this in the font
local sequences = otfdata.luatex.sequences
if sequences then
diff --git a/otfl-font-oti.lua b/otfl-font-oti.lua
index 4cb2706..e531ba8 100644
--- a/otfl-font-oti.lua
+++ b/otfl-font-oti.lua
@@ -6,19 +6,17 @@ if not modules then modules = { } end modules ['font-oti'] = {
license = "see context related readme files"
}
--- i need to check features=yes|no also in relation to hashing
-
local lower = string.lower
-local otf = fonts.otf
+local fonts = fonts
-otf.default_language = 'latn'
-otf.default_script = 'dflt'
+local otf = fonts.otf
+local initializers = fonts.initializers
-local languages = otf.tables.languages
-local scripts = otf.tables.scripts
+local languages = otf.tables.languages
+local scripts = otf.tables.scripts
-function otf.features.language(tfmdata,value)
+local function set_language(tfmdata,value)
if value then
value = lower(value)
if languages[value] then
@@ -27,7 +25,7 @@ function otf.features.language(tfmdata,value)
end
end
-function otf.features.script(tfmdata,value)
+local function set_script(tfmdata,value)
if value then
value = lower(value)
if scripts[value] then
@@ -36,21 +34,24 @@ function otf.features.script(tfmdata,value)
end
end
-function otf.features.mode(tfmdata,value)
+local function set_mode(tfmdata,value)
if value then
tfmdata.mode = lower(value)
end
end
-fonts.initializers.base.otf.language = otf.features.language
-fonts.initializers.base.otf.script = otf.features.script
-fonts.initializers.base.otf.mode = otf.features.mode
-fonts.initializers.base.otf.method = otf.features.mode
+local base_initializers = initializers.base.otf
+local node_initializers = initializers.node.otf
+
+base_initializers.language = set_language
+base_initializers.script = set_script
+base_initializers.mode = set_mode
+base_initializers.method = set_mode
-fonts.initializers.node.otf.language = otf.features.language
-fonts.initializers.node.otf.script = otf.features.script
-fonts.initializers.node.otf.mode = otf.features.mode
-fonts.initializers.node.otf.method = otf.features.mode
+node_initializers.language = set_language
+node_initializers.script = set_script
+node_initializers.mode = set_mode
+node_initializers.method = set_mode
otf.features.register("features",true) -- we always do features
table.insert(fonts.processors,"features") -- we need a proper function for doing this
diff --git a/otfl-font-otn.lua b/otfl-font-otn.lua
index 6a6a046..ec246d2 100644
--- a/otfl-font-otn.lua
+++ b/otfl-font-otn.lua
@@ -124,6 +124,9 @@ local concat, insert, remove = table.concat, table.insert, table.remove
local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
local type, next, tonumber, tostring = type, next, tonumber, tostring
local lpegmatch = lpeg.match
+local random = math.random
+
+local logs, trackers, fonts, nodes, attributes = logs, trackers, fonts, nodes, attributes
local otf = fonts.otf
local tfm = fonts.tfm
@@ -145,6 +148,12 @@ local trace_steps = false trackers.register("otf.steps", function
local trace_skips = false trackers.register("otf.skips", function(v) trace_skips = v end)
local trace_directions = false trackers.register("otf.directions", function(v) trace_directions = v end)
+local report_direct = logs.new("otf direct")
+local report_subchain = logs.new("otf subchain")
+local report_chain = logs.new("otf chain")
+local report_process = logs.new("otf process")
+local report_prepare = logs.new("otf prepare")
+
trackers.register("otf.verbose_chain", function(v) otf.setcontextchain(v and "verbose") end)
trackers.register("otf.normal_chain", function(v) otf.setcontextchain(v and "normal") end)
@@ -167,13 +176,21 @@ local zwj = 0x200D
local wildcard = "*"
local default = "dflt"
-local split_at_space = lpeg.splitters[" "] or lpeg.Ct(lpeg.splitat(" ")) -- no trailing or multiple spaces anyway
+local split_at_space = lpeg.Ct(lpeg.splitat(" ")) -- no trailing or multiple spaces anyway
+
+local nodecodes = nodes.nodecodes
+local whatcodes = nodes.whatcodes
+local glyphcodes = nodes.glyphcodes
+
+local glyph_code = nodecodes.glyph
+local glue_code = nodecodes.glue
+local disc_code = nodecodes.disc
+local whatsit_code = nodecodes.whatsit
-local glyph = node.id('glyph')
-local glue = node.id('glue')
-local kern = node.id('kern')
-local disc = node.id('disc')
-local whatsit = node.id('whatsit')
+local dir_code = whatcodes.dir
+local localpar_code = whatcodes.localpar
+
+local ligature_code = glyphcodes.ligature
local state = attributes.private('state')
local markbase = attributes.private('markbase')
@@ -184,10 +201,11 @@ local curscurs = attributes.private('curscurs')
local cursdone = attributes.private('cursdone')
local kernpair = attributes.private('kernpair')
-local set_mark = nodes.set_mark
-local set_cursive = nodes.set_cursive
-local set_kern = nodes.set_kern
-local set_pair = nodes.set_pair
+local injections = nodes.injections
+local setmark = injections.setmark
+local setcursive = injections.setcursive
+local setkern = injections.setkern
+local setpair = injections.setpair
local markonce = true
local cursonce = true
@@ -216,9 +234,10 @@ local featurevalue = false
-- we cheat a bit and assume that a font,attr combination are kind of ranged
-local context_setups = fonts.define.specify.context_setups
-local context_numbers = fonts.define.specify.context_numbers
-local context_merged = fonts.define.specify.context_merged
+local specifiers = fonts.definers.specifiers
+local contextsetups = specifiers.contextsetups
+local contextnumbers = specifiers.contextnumbers
+local contextmerged = specifiers.contextmerged
-- we cannot optimize with "start = first_character(head)" because then we don't
-- know which rlmode we're in which messes up cursive handling later on
@@ -242,10 +261,10 @@ local function logprocess(...)
if trace_steps then
registermessage(...)
end
- logs.report("otf direct",...)
+ report_direct(...)
end
local function logwarning(...)
- logs.report("otf direct",...)
+ report_direct(...)
end
local function gref(n)
@@ -263,9 +282,9 @@ local function gref(n)
local num, nam = { }, { }
for i=1,#n do
local ni = n[i]
- num[#num+1] = format("U+%04X",ni)
- local dni = descriptions[ni]
- nam[#num] = (dni and dni.name) or "?"
+ local di = descriptions[ni]
+ num[i] = format("U+%04X",ni)
+ nam[i] = di and di.name or "?"
end
return format("%s (%s)",concat(num," "), concat(nam," "))
end
@@ -303,7 +322,7 @@ local function markstoligature(kind,lookupname,start,stop,char)
snext.prev = current
end
start.prev, stop.next = nil, nil
- current.char, current.subtype, current.components = char, 2, start
+ current.char, current.subtype, current.components = char, ligature_code, start
return keep
end
@@ -313,16 +332,16 @@ local function toligature(kind,lookupname,start,stop,char,markflag,discfound) --
--~ local lignode = copy_node(start)
--~ lignode.font = start.font
--~ lignode.char = char
---~ lignode.subtype = 2
+--~ lignode.subtype = ligature_code
--~ start = node.do_ligature_n(start, stop, lignode)
---~ if start.id == disc then
+--~ if start.id == disc_code then
--~ local prev = start.prev
--~ start = start.next
--~ end
if discfound then
-- print("start->stop",nodes.tosequence(start,stop))
local lignode = copy_node(start)
- lignode.font, lignode.char, lignode.subtype = start.font, char, 2
+ lignode.font, lignode.char, lignode.subtype = start.font, char, ligature_code
local next, prev = stop.next, start.prev
stop.next = nil
lignode = node.do_ligature_n(start, stop, lignode)
@@ -344,7 +363,7 @@ local function toligature(kind,lookupname,start,stop,char,markflag,discfound) --
snext.prev = current
end
start.prev, stop.next = nil, nil
- current.char, current.subtype, current.components = char, 2, start
+ current.char, current.subtype, current.components = char, ligature_code, start
local head = current
if deletemarks then
if trace_marks then
@@ -370,7 +389,7 @@ local function toligature(kind,lookupname,start,stop,char,markflag,discfound) --
start = start.next
end
start = current.next
- while start and start.id == glyph do
+ while start and start.id == glyph_code do
if marks[start.char] then
set_attribute(start,markdone,i)
if trace_marks then
@@ -401,7 +420,7 @@ end
local function alternative_glyph(start,alternatives,kind,chainname,chainlookupname,lookupname) -- chainname and chainlookupname optional
local value, choice, n = featurevalue or tfmdata.shared.features[kind], nil, #alternatives -- global value, brrr
if value == "random" then
- local r = math.random(1,n)
+ local r = random(1,n)
value, choice = format("random, choice %s",r), alternatives[r]
elseif value == "first" then
value, choice = format("first, choice %s",1), alternatives[1]
@@ -465,7 +484,7 @@ function handlers.gsub_ligature(start,kind,lookupname,ligature,sequence) --or ma
if marks[startchar] then
while s do
local id = s.id
- if id == glyph and s.subtype<256 then
+ if id == glyph_code and s.subtype<256 then
if s.font == currentfont then
local char = s.char
local lg = ligature[1][char]
@@ -497,7 +516,7 @@ function handlers.gsub_ligature(start,kind,lookupname,ligature,sequence) --or ma
local skipmark = sequence.flags[1]
while s do
local id = s.id
- if id == glyph and s.subtype<256 then
+ if id == glyph_code and s.subtype<256 then
if s.font == currentfont then
local char = s.char
if skipmark and marks[char] then
@@ -515,7 +534,7 @@ function handlers.gsub_ligature(start,kind,lookupname,ligature,sequence) --or ma
else
break
end
- elseif id == disc then
+ elseif id == disc_code then
discfound = true
s = s.next
else
@@ -545,12 +564,12 @@ function handlers.gpos_mark2base(start,kind,lookupname,markanchors,sequence)
local markchar = start.char
if marks[markchar] then
local base = start.prev -- [glyph] [start=mark]
- if base and base.id == glyph and base.subtype<256 and base.font == currentfont then
+ if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then
local basechar = base.char
if marks[basechar] then
while true do
base = base.prev
- if base and base.id == glyph and base.subtype<256 and base.font == currentfont then
+ if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then
basechar = base.char
if not marks[basechar] then
break
@@ -575,7 +594,7 @@ function handlers.gpos_mark2base(start,kind,lookupname,markanchors,sequence)
if al[anchor] then
local ma = markanchors[anchor]
if ma then
- local dx, dy, bound = set_mark(start,base,tfmdata.factor,rlmode,ba,ma)
+ local dx, dy, bound = setmark(start,base,tfmdata.factor,rlmode,ba,ma)
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%s,%s)",
pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
@@ -590,7 +609,7 @@ function handlers.gpos_mark2base(start,kind,lookupname,markanchors,sequence)
end
else -- if trace_bugs then
-- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
- fonts.register_message(currentfont,basechar,"no base anchors")
+ fonts.registermessage(currentfont,basechar,"no base anchors")
end
elseif trace_bugs then
logwarning("%s: prev node is no char",pref(kind,lookupname))
@@ -607,13 +626,13 @@ function handlers.gpos_mark2ligature(start,kind,lookupname,markanchors,sequence)
if marks[markchar] then
local base = start.prev -- [glyph] [optional marks] [start=mark]
local index = 1
- if base and base.id == glyph and base.subtype<256 and base.font == currentfont then
+ if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then
local basechar = base.char
if marks[basechar] then
index = index + 1
while true do
base = base.prev
- if base and base.id == glyph and base.subtype<256 and base.font == currentfont then
+ if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then
basechar = base.char
if marks[basechar] then
index = index + 1
@@ -643,7 +662,7 @@ function handlers.gpos_mark2ligature(start,kind,lookupname,markanchors,sequence)
if ma then
ba = ba[index]
if ba then
- local dx, dy, bound = set_mark(start,base,tfmdata.factor,rlmode,ba,ma,index)
+ local dx, dy, bound = setmark(start,base,tfmdata.factor,rlmode,ba,ma,index)
if trace_marks then
logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%s,%s)",
pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy)
@@ -660,7 +679,7 @@ function handlers.gpos_mark2ligature(start,kind,lookupname,markanchors,sequence)
end
else -- if trace_bugs then
-- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
- fonts.register_message(currentfont,basechar,"no base anchors")
+ fonts.registermessage(currentfont,basechar,"no base anchors")
end
elseif trace_bugs then
logwarning("%s: prev node is no char",pref(kind,lookupname))
@@ -677,7 +696,7 @@ function handlers.gpos_mark2mark(start,kind,lookupname,markanchors,sequence)
--~ local alreadydone = markonce and has_attribute(start,markmark)
--~ if not alreadydone then
local base = start.prev -- [glyph] [basemark] [start=mark]
- if base and base.id == glyph and base.subtype<256 and base.font == currentfont then -- subtype test can go
+ if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then -- subtype test can go
local basechar = base.char
local baseanchors = descriptions[basechar]
if baseanchors then
@@ -690,7 +709,7 @@ function handlers.gpos_mark2mark(start,kind,lookupname,markanchors,sequence)
if al[anchor] then
local ma = markanchors[anchor]
if ma then
- local dx, dy, bound = set_mark(start,base,tfmdata.factor,rlmode,ba,ma)
+ local dx, dy, bound = setmark(start,base,tfmdata.factor,rlmode,ba,ma)
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%s,%s)",
pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
@@ -706,7 +725,7 @@ function handlers.gpos_mark2mark(start,kind,lookupname,markanchors,sequence)
end
else -- if trace_bugs then
-- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
- fonts.register_message(currentfont,basechar,"no base anchors")
+ fonts.registermessage(currentfont,basechar,"no base anchors")
end
elseif trace_bugs then
logwarning("%s: prev node is no mark",pref(kind,lookupname))
@@ -731,7 +750,7 @@ function handlers.gpos_cursive(start,kind,lookupname,exitanchors,sequence) -- to
end
else
local nxt = start.next
- while not done and nxt and nxt.id == glyph and nxt.subtype<256 and nxt.font == currentfont do
+ while not done and nxt and nxt.id == glyph_code and nxt.subtype<256 and nxt.font == currentfont do
local nextchar = nxt.char
if marks[nextchar] then
-- should not happen (maybe warning)
@@ -748,7 +767,7 @@ function handlers.gpos_cursive(start,kind,lookupname,exitanchors,sequence) -- to
if al[anchor] then
local exit = exitanchors[anchor]
if exit then
- local dx, dy, bound = set_cursive(start,nxt,tfmdata.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ local dx, dy, bound = setcursive(start,nxt,tfmdata.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
if trace_cursive then
logprocess("%s: moving %s to %s cursive (%s,%s) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
end
@@ -761,7 +780,7 @@ function handlers.gpos_cursive(start,kind,lookupname,exitanchors,sequence) -- to
end
else -- if trace_bugs then
-- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar))
- fonts.register_message(currentfont,startchar,"no entry anchors")
+ fonts.registermessage(currentfont,startchar,"no entry anchors")
end
break
end
@@ -778,7 +797,7 @@ end
function handlers.gpos_single(start,kind,lookupname,kerns,sequence)
local startchar = start.char
- local dx, dy, w, h = set_pair(start,tfmdata.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
+ local dx, dy, w, h = setpair(start,tfmdata.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
if trace_kerns then
logprocess("%s: shifting single %s by (%s,%s) and correction (%s,%s)",pref(kind,lookupname),gref(startchar),dx,dy,w,h)
end
@@ -794,9 +813,9 @@ function handlers.gpos_pair(start,kind,lookupname,kerns,sequence)
else
local prev, done = start, false
local factor = tfmdata.factor
- while snext and snext.id == glyph and snext.subtype<256 and snext.font == currentfont do
+ while snext and snext.id == glyph_code and snext.subtype<256 and snext.font == currentfont do
local nextchar = snext.char
-local krn = kerns[nextchar]
+ local krn = kerns[nextchar]
if not krn and marks[nextchar] then
prev = snext
snext = snext.next
@@ -809,23 +828,23 @@ local krn = kerns[nextchar]
local a, b = krn[3], krn[4]
if a and #a > 0 then
local startchar = start.char
- local x, y, w, h = set_pair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
+ local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
if trace_kerns then
logprocess("%s: shifting first of pair %s and %s by (%s,%s) and correction (%s,%s)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
end
end
if b and #b > 0 then
local startchar = start.char
- local x, y, w, h = set_pair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
+ local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
if trace_kerns then
logprocess("%s: shifting second of pair %s and %s by (%s,%s) and correction (%s,%s)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
end
end
else
- logs.report("%s: check this out (old kern stuff)",pref(kind,lookupname))
+ report_process("%s: check this out (old kern stuff)",pref(kind,lookupname))
local a, b = krn[3], krn[7]
if a and a ~= 0 then
- local k = set_kern(snext,factor,rlmode,a)
+ local k = setkern(snext,factor,rlmode,a)
if trace_kerns then
logprocess("%s: inserting first kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
end
@@ -836,7 +855,7 @@ local krn = kerns[nextchar]
end
done = true
elseif krn ~= 0 then
- local k = set_kern(snext,factor,rlmode,krn)
+ local k = setkern(snext,factor,rlmode,krn)
if trace_kerns then
logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
end
@@ -861,12 +880,11 @@ local function logprocess(...)
if trace_steps then
registermessage(...)
end
- logs.report("otf subchain",...)
-end
-local function logwarning(...)
- logs.report("otf subchain",...)
+ report_subchain(...)
end
+local logwarning = report_subchain
+
-- ['coverage']={
-- ['after']={ "r" },
-- ['before']={ "q" },
@@ -904,12 +922,11 @@ local function logprocess(...)
if trace_steps then
registermessage(...)
end
- logs.report("otf chain",...)
-end
-local function logwarning(...)
- logs.report("otf chain",...)
+ report_chain(...)
end
+local logwarning = report_chain
+
-- We could share functions but that would lead to extra function calls with many
-- arguments, redundant tests and confusing messages.
@@ -976,7 +993,7 @@ function chainprocs.gsub_single(start,stop,kind,chainname,currentcontext,cache,c
local current = start
local subtables = currentlookup.subtables
while current do
- if current.id == glyph then
+ if current.id == glyph_code then
local currentchar = current.char
local lookupname = subtables[1]
local replacement = cache.gsub_single[lookupname]
@@ -1064,7 +1081,7 @@ function chainprocs.gsub_alternate(start,stop,kind,chainname,currentcontext,cach
local current = start
local subtables = currentlookup.subtables
while current do
- if current.id == glyph then
+ if current.id == glyph_code then
local currentchar = current.char
local lookupname = subtables[1]
local alternatives = cache.gsub_alternate[lookupname]
@@ -1121,7 +1138,7 @@ function chainprocs.gsub_ligature(start,stop,kind,chainname,currentcontext,cache
local s, discfound, last, nofreplacements = start.next, false, stop, 0
while s do
local id = s.id
- if id == disc then
+ if id == disc_code then
s = s.next
discfound = true
else
@@ -1182,12 +1199,12 @@ function chainprocs.gpos_mark2base(start,stop,kind,chainname,currentcontext,cach
end
if markanchors then
local base = start.prev -- [glyph] [start=mark]
- if base and base.id == glyph and base.subtype<256 and base.font == currentfont then
+ if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then
local basechar = base.char
if marks[basechar] then
while true do
base = base.prev
- if base and base.id == glyph and base.subtype<256 and base.font == currentfont then
+ if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then
basechar = base.char
if not marks[basechar] then
break
@@ -1209,7 +1226,7 @@ function chainprocs.gpos_mark2base(start,stop,kind,chainname,currentcontext,cach
if al[anchor] then
local ma = markanchors[anchor]
if ma then
- local dx, dy, bound = set_mark(start,base,tfmdata.factor,rlmode,ba,ma)
+ local dx, dy, bound = setmark(start,base,tfmdata.factor,rlmode,ba,ma)
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%s,%s)",
cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
@@ -1247,13 +1264,13 @@ function chainprocs.gpos_mark2ligature(start,stop,kind,chainname,currentcontext,
if markanchors then
local base = start.prev -- [glyph] [optional marks] [start=mark]
local index = 1
- if base and base.id == glyph and base.subtype<256 and base.font == currentfont then
+ if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then
local basechar = base.char
if marks[basechar] then
index = index + 1
while true do
base = base.prev
- if base and base.id == glyph and base.subtype<256 and base.font == currentfont then
+ if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then
basechar = base.char
if marks[basechar] then
index = index + 1
@@ -1282,7 +1299,7 @@ function chainprocs.gpos_mark2ligature(start,stop,kind,chainname,currentcontext,
if ma then
ba = ba[index]
if ba then
- local dx, dy, bound = set_mark(start,base,tfmdata.factor,rlmode,ba,ma,index)
+ local dx, dy, bound = setmark(start,base,tfmdata.factor,rlmode,ba,ma,index)
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%s,%s)",
cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy)
@@ -1323,7 +1340,7 @@ function chainprocs.gpos_mark2mark(start,stop,kind,chainname,currentcontext,cach
end
if markanchors then
local base = start.prev -- [glyph] [basemark] [start=mark]
- if base and base.id == glyph and base.subtype<256 and base.font == currentfont then -- subtype test can go
+ if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then -- subtype test can go
local basechar = base.char
local baseanchors = descriptions[basechar].anchors
if baseanchors then
@@ -1334,7 +1351,7 @@ function chainprocs.gpos_mark2mark(start,stop,kind,chainname,currentcontext,cach
if al[anchor] then
local ma = markanchors[anchor]
if ma then
- local dx, dy, bound = set_mark(start,base,tfmdata.factor,rlmode,ba,ma)
+ local dx, dy, bound = setmark(start,base,tfmdata.factor,rlmode,ba,ma)
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%s,%s)",
cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
@@ -1383,7 +1400,7 @@ function chainprocs.gpos_cursive(start,stop,kind,chainname,currentcontext,cache,
end
else
local nxt = start.next
- while not done and nxt and nxt.id == glyph and nxt.subtype<256 and nxt.font == currentfont do
+ while not done and nxt and nxt.id == glyph_code and nxt.subtype<256 and nxt.font == currentfont do
local nextchar = nxt.char
if marks[nextchar] then
-- should not happen (maybe warning)
@@ -1400,7 +1417,7 @@ function chainprocs.gpos_cursive(start,stop,kind,chainname,currentcontext,cache,
if al[anchor] then
local exit = exitanchors[anchor]
if exit then
- local dx, dy, bound = set_cursive(start,nxt,tfmdata.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ local dx, dy, bound = setcursive(start,nxt,tfmdata.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
if trace_cursive then
logprocess("%s: moving %s to %s cursive (%s,%s) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
end
@@ -1413,7 +1430,7 @@ function chainprocs.gpos_cursive(start,stop,kind,chainname,currentcontext,cache,
end
else -- if trace_bugs then
-- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar))
- fonts.register_message(currentfont,startchar,"no entry anchors")
+ fonts.registermessage(currentfont,startchar,"no entry anchors")
end
break
end
@@ -1439,7 +1456,7 @@ function chainprocs.gpos_single(start,stop,kind,chainname,currentcontext,cache,c
if kerns then
kerns = kerns[startchar]
if kerns then
- local dx, dy, w, h = set_pair(start,tfmdata.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
+ local dx, dy, w, h = setpair(start,tfmdata.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
if trace_kerns then
logprocess("%s: shifting single %s by (%s,%s) and correction (%s,%s)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h)
end
@@ -1463,7 +1480,7 @@ function chainprocs.gpos_pair(start,stop,kind,chainname,currentcontext,cache,cur
if kerns then
local prev, done = start, false
local factor = tfmdata.factor
- while snext and snext.id == glyph and snext.subtype<256 and snext.font == currentfont do
+ while snext and snext.id == glyph_code and snext.subtype<256 and snext.font == currentfont do
local nextchar = snext.char
local krn = kerns[nextchar]
if not krn and marks[nextchar] then
@@ -1477,23 +1494,23 @@ function chainprocs.gpos_pair(start,stop,kind,chainname,currentcontext,cache,cur
local a, b = krn[3], krn[4]
if a and #a > 0 then
local startchar = start.char
- local x, y, w, h = set_pair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
+ local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
if trace_kerns then
logprocess("%s: shifting first of pair %s and %s by (%s,%s) and correction (%s,%s)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
end
end
if b and #b > 0 then
local startchar = start.char
- local x, y, w, h = set_pair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
+ local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
if trace_kerns then
logprocess("%s: shifting second of pair %s and %s by (%s,%s) and correction (%s,%s)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
end
end
else
- logs.report("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname))
+ report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname))
local a, b = krn[3], krn[7]
if a and a ~= 0 then
- local k = set_kern(snext,factor,rlmode,a)
+ local k = setkern(snext,factor,rlmode,a)
if trace_kerns then
logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
end
@@ -1504,7 +1521,7 @@ function chainprocs.gpos_pair(start,stop,kind,chainname,currentcontext,cache,cur
end
done = true
elseif krn ~= 0 then
- local k = set_kern(snext,factor,rlmode,krn)
+ local k = setkern(snext,factor,rlmode,krn)
if trace_kerns then
logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
end
@@ -1551,7 +1568,7 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
-- f..l = mid string
if s == 1 then
-- never happens
- match = current.id == glyph and current.subtype<256 and current.font == currentfont and seq[1][current.char]
+ match = current.id == glyph_code and current.subtype<256 and current.font == currentfont and seq[1][current.char]
else
-- todo: better space check (maybe check for glue)
local f, l = ck[4], ck[5]
@@ -1565,12 +1582,12 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
-- we cannot optimize for n=2 because there can be disc nodes
-- if not someskip and n == l then
-- -- n=2 and no skips then faster loop
- -- match = last and last.id == glyph and last.subtype<256 and last.font == currentfont and seq[n][last.char]
+ -- match = last and last.id == glyph_code and last.subtype<256 and last.font == currentfont and seq[n][last.char]
-- else
while n <= l do
if last then
local id = last.id
- if id == glyph then
+ if id == glyph_code then
if last.subtype<256 and last.font == currentfont then
local char = last.char
local ccd = descriptions[char]
@@ -1596,7 +1613,7 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
else
match = false break
end
- elseif id == disc then -- what to do with kerns?
+ elseif id == disc_code then -- what to do with kerns?
last = last.next
else
match = false break
@@ -1615,7 +1632,7 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
while n >= 1 do
if prev then
local id = prev.id
- if id == glyph then
+ if id == glyph_code then
if prev.subtype<256 and prev.font == currentfont then -- normal char
local char = prev.char
local ccd = descriptions[char]
@@ -1637,7 +1654,7 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
else
match = false break
end
- elseif id == disc then
+ elseif id == disc_code then
-- skip 'm
elseif seq[n][32] then
n = n -1
@@ -1670,7 +1687,7 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
while n <= s do
if current then
local id = current.id
- if id == glyph then
+ if id == glyph_code then
if current.subtype<256 and current.font == currentfont then -- normal char
local char = current.char
local ccd = descriptions[char]
@@ -1692,7 +1709,7 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
else
match = false break
end
- elseif id == disc then
+ elseif id == disc_code then
-- skip 'm
elseif seq[n][32] then -- brrr
n = n + 1
@@ -1768,22 +1785,22 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
local i = 1
repeat
-if skipped then
- while true do
- local char = start.char
- local ccd = descriptions[char]
- if ccd then
- local class = ccd.class
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- start = start.next
- else
- break
- end
- else
- break
- end
- end
-end
+ if skipped then
+ while true do
+ local char = start.char
+ local ccd = descriptions[char]
+ if ccd then
+ local class = ccd.class
+ if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
+ start = start.next
+ else
+ break
+ end
+ else
+ break
+ end
+ end
+ end
local chainlookupname = chainlookups[i]
local chainlookup = lookuptable[chainlookupname]
local cp = chainmores[chainlookup.type]
@@ -1864,12 +1881,11 @@ local function logprocess(...)
if trace_steps then
registermessage(...)
end
- logs.report("otf process",...)
-end
-local function logwarning(...)
- logs.report("otf process",...)
+ report_process(...)
end
+local logwarning = report_process
+
local function report_missing_cache(typ,lookup)
local f = missing[currentfont] if not f then f = { } missing[currentfont] = f end
local t = f[typ] if not t then t = { } f[typ] = t end
@@ -1909,8 +1925,8 @@ function fonts.methods.node.otf.features(head,font,attr)
local script, language, s_enabled, a_enabled, dyn
local attribute_driven = attr and attr ~= 0
if attribute_driven then
- local features = context_setups[context_numbers[attr]] -- could be a direct list
- dyn = context_merged[attr] or 0
+ local features = contextsetups[contextnumbers[attr]] -- could be a direct list
+ dyn = contextmerged[attr] or 0
language, script = features.language or "dflt", features.script or "dflt"
a_enabled = features -- shared.features -- can be made local to the resolver
if dyn == 2 or dyn == -2 then
@@ -1967,7 +1983,7 @@ function fonts.methods.node.otf.features(head,font,attr)
end
if trace_applied then
local typ, action = match(sequence.type,"(.*)_(.*)")
- logs.report("otf node mode",
+ report_process(
"%s font: %03i, dynamic: %03i, kind: %s, lookup: %3i, script: %-4s, language: %-4s (%-4s), type: %s, action: %s, name: %s",
(valid and "+") or "-",font,attr or 0,kind,s,script,language,what,typ,action,sequence.name)
end
@@ -1995,7 +2011,7 @@ function fonts.methods.node.otf.features(head,font,attr)
local start = find_node_tail(head) -- slow (we can store tail because there's always a skip at the end): todo
while start do
local id = start.id
- if id == glyph then
+ if id == glyph_code then
if start.subtype<256 and start.font == font then
local a = has_attribute(start,0)
if a then
@@ -2044,7 +2060,7 @@ function fonts.methods.node.otf.features(head,font,attr)
else
while start do
local id = start.id
- if id == glyph then
+ if id == glyph_code then
if start.subtype<256 and start.font == font then
local a = has_attribute(start,0)
if a then
@@ -2069,7 +2085,7 @@ function fonts.methods.node.otf.features(head,font,attr)
else
start = start.next
end
- -- elseif id == glue then
+ -- elseif id == glue_code then
-- if p[5] then -- chain
-- local pc = pp[32]
-- if pc then
@@ -2084,9 +2100,9 @@ function fonts.methods.node.otf.features(head,font,attr)
-- else
-- start = start.next
-- end
- elseif id == whatsit then
+ elseif id == whatsit_code then
local subtype = start.subtype
- if subtype == 7 then
+ if subtype == dir_code then
local dir = start.dir
if dir == "+TRT" or dir == "+TLT" then
insert(txtdir,dir)
@@ -2102,9 +2118,9 @@ function fonts.methods.node.otf.features(head,font,attr)
rlmode = pardir
end
if trace_directions then
- logs.report("fonts","directions after textdir %s: pardir=%s, txtdir=%s:%s, rlmode=%s",dir,pardir,#txtdir,txtdir[#txtdir] or "unset",rlmode)
+ report_process("directions after textdir %s: pardir=%s, txtdir=%s:%s, rlmode=%s",dir,pardir,#txtdir,txtdir[#txtdir] or "unset",rlmode)
end
- elseif subtype == 6 then
+ elseif subtype == localpar_code then
local dir = start.dir
if dir == "TRT" then
pardir = -1
@@ -2116,7 +2132,7 @@ function fonts.methods.node.otf.features(head,font,attr)
rlmode = pardir
--~ txtdir = { }
if trace_directions then
- logs.report("fonts","directions after pardir %s: pardir=%s, txtdir=%s:%s, rlmode=%s",dir,pardir,#txtdir,txtdir[#txtdir] or "unset",rlmode)
+ report_process("directions after pardir %s: pardir=%s, txtdir=%s:%s, rlmode=%s",dir,pardir,#txtdir,txtdir[#txtdir] or "unset",rlmode)
end
end
start = start.next
@@ -2128,7 +2144,7 @@ function fonts.methods.node.otf.features(head,font,attr)
else
while start do
local id = start.id
- if id == glyph then
+ if id == glyph_code then
if start.subtype<256 and start.font == font then
local a = has_attribute(start,0)
if a then
@@ -2162,7 +2178,7 @@ function fonts.methods.node.otf.features(head,font,attr)
else
start = start.next
end
- -- elseif id == glue then
+ -- elseif id == glue_code then
-- if p[5] then -- chain
-- local pc = pp[32]
-- if pc then
@@ -2177,9 +2193,9 @@ function fonts.methods.node.otf.features(head,font,attr)
-- else
-- start = start.next
-- end
- elseif id == whatsit then
+ elseif id == whatsit_code then
local subtype = start.subtype
- if subtype == 7 then
+ if subtype == dir_code then
local dir = start.dir
if dir == "+TRT" or dir == "+TLT" then
insert(txtdir,dir)
@@ -2195,9 +2211,9 @@ function fonts.methods.node.otf.features(head,font,attr)
rlmode = pardir
end
if trace_directions then
- logs.report("fonts","directions after textdir %s: pardir=%s, txtdir=%s:%s, rlmode=%s",dir,pardir,#txtdir,txtdir[#txtdir] or "unset",rlmode)
+ report_process("directions after textdir %s: pardir=%s, txtdir=%s:%s, rlmode=%s",dir,pardir,#txtdir,txtdir[#txtdir] or "unset",rlmode)
end
- elseif subtype == 6 then
+ elseif subtype == localpar_code then
local dir = start.dir
if dir == "TRT" then
pardir = -1
@@ -2209,7 +2225,7 @@ function fonts.methods.node.otf.features(head,font,attr)
rlmode = pardir
--~ txtdir = { }
if trace_directions then
- logs.report("fonts","directions after pardir %s: pardir=%s, txtdir=%s:%s, rlmode=%s",dir,pardir,#txtdir,txtdir[#txtdir] or "unset",rlmode)
+ report_process("directions after pardir %s: pardir=%s, txtdir=%s:%s, rlmode=%s",dir,pardir,#txtdir,txtdir[#txtdir] or "unset",rlmode)
end
end
start = start.next
@@ -2238,13 +2254,14 @@ otf.features.prepare = { }
local function split(replacement,original,cache,unicodes)
-- we can cache this too, but not the same (although unicode is a unique enough hash)
- local o, t, n = { }, { }, 0
+ local o, t, n, no = { }, { }, 0, 0
for s in gmatch(original,"[^ ]+") do
local us = unicodes[s]
+ no = no + 1
if type(us) == "number" then -- tonumber(us)
- o[#o+1] = us
+ o[no] = us
else
- o[#o+1] = us[1]
+ o[no] = us[1]
end
end
for s in gmatch(replacement,"[^ ]+") do
@@ -2261,9 +2278,11 @@ end
local function uncover(covers,result,cache,unicodes)
-- lpeg hardly faster (.005 sec on mk)
+ local nofresults = #result
for n=1,#covers do
local c = covers[n]
local cc = cache[c]
+ nofresults = nofresults + 1
if not cc then
local t = { }
for s in gmatch(c,"[^ ]+") do
@@ -2277,9 +2296,9 @@ local function uncover(covers,result,cache,unicodes)
end
end
cache[c] = t
- result[#result+1] = t
+ result[nofresults] = t
else
- result[#result+1] = cc
+ result[nofresults] = cc
end
end
end
@@ -2317,46 +2336,48 @@ local function prepare_lookups(tfmdata)
if not s then s = { } single[lookup] = s end
s[old] = new
--~ if trace_lookups then
- --~ logs.report("define otf","lookup %s: substitution %s => %s",lookup,old,new)
+ --~ report_prepare("lookup %s: substitution %s => %s",lookup,old,new)
--~ end
end,
multiple = function (p,lookup,glyph,unicode)
- local old, new = unicode, { }
+ local old, new, nnew = unicode, { }, 0
local m = multiple[lookup]
if not m then m = { } multiple[lookup] = m end
m[old] = new
for pc in gmatch(p[2],"[^ ]+") do
local upc = unicodes[pc]
+ nnew = nnew + 1
if type(upc) == "number" then
- new[#new+1] = upc
+ new[nnew] = upc
else
- new[#new+1] = upc[1]
+ new[nnew] = upc[1]
end
end
--~ if trace_lookups then
- --~ logs.report("define otf","lookup %s: multiple %s => %s",lookup,old,concat(new," "))
+ --~ report_prepare("lookup %s: multiple %s => %s",lookup,old,concat(new," "))
--~ end
end,
alternate = function(p,lookup,glyph,unicode)
- local old, new = unicode, { }
+ local old, new, nnew = unicode, { }, 0
local a = alternate[lookup]
if not a then a = { } alternate[lookup] = a end
a[old] = new
for pc in gmatch(p[2],"[^ ]+") do
local upc = unicodes[pc]
+ nnew = nnew + 1
if type(upc) == "number" then
- new[#new+1] = upc
+ new[nnew] = upc
else
- new[#new+1] = upc[1]
+ new[nnew] = upc[1]
end
end
--~ if trace_lookups then
- --~ logs.report("define otf","lookup %s: alternate %s => %s",lookup,old,concat(new,"|"))
+ --~ report_prepare("lookup %s: alternate %s => %s",lookup,old,concat(new,"|"))
--~ end
end,
ligature = function (p,lookup,glyph,unicode)
--~ if trace_lookups then
- --~ logs.report("define otf","lookup %s: ligature %s => %s",lookup,p[2],glyph.name)
+ --~ report_prepare("lookup %s: ligature %s => %s",lookup,p[2],glyph.name)
--~ end
local first = true
local t = ligature[lookup]
@@ -2365,7 +2386,7 @@ local function prepare_lookups(tfmdata)
if first then
local u = unicodes[s]
if not u then
- logs.report("define otf","lookup %s: ligature %s => %s ignored due to invalid unicode",lookup,p[2],glyph.name)
+ report_prepare("lookup %s: ligature %s => %s ignored due to invalid unicode",lookup,p[2],glyph.name)
break
elseif type(u) == "number" then
if not t[u] then
@@ -2435,7 +2456,7 @@ local function prepare_lookups(tfmdata)
end
end
--~ if trace_lookups then
- --~ logs.report("define otf","lookup %s: pair for U+%04X",lookup,unicode)
+ --~ report_prepare("lookup %s: pair for U+%04X",lookup,unicode)
--~ end
end,
}
@@ -2456,14 +2477,14 @@ local function prepare_lookups(tfmdata)
end
end
end
- local list = glyph.mykerns
+ local list = glyph.kerns
if list then
for lookup, krn in next, list do
local k = kerns[lookup]
if not k then k = { } kerns[lookup] = k end
k[unicode] = krn -- ref to glyph, saves lookup
--~ if trace_lookups then
- --~ logs.report("define otf","lookup %s: kern for U+%04X",lookup,unicode)
+ --~ report_prepare("lookup %s: kern for U+%04X",lookup,unicode)
--~ end
end
end
@@ -2479,7 +2500,7 @@ local function prepare_lookups(tfmdata)
if not f then f = { } mark[lookup] = f end
f[unicode] = anchors -- ref to glyph, saves lookup
--~ if trace_lookups then
- --~ logs.report("define otf","lookup %s: mark anchor %s for U+%04X",lookup,name,unicode)
+ --~ report_prepare("lookup %s: mark anchor %s for U+%04X",lookup,name,unicode)
--~ end
end
end
@@ -2493,7 +2514,7 @@ local function prepare_lookups(tfmdata)
if not f then f = { } cursive[lookup] = f end
f[unicode] = anchors -- ref to glyph, saves lookup
--~ if trace_lookups then
- --~ logs.report("define otf","lookup %s: exit anchor %s for U+%04X",lookup,name,unicode)
+ --~ report_prepare("lookup %s: exit anchor %s for U+%04X",lookup,name,unicode)
--~ end
end
end
@@ -2526,7 +2547,7 @@ local function prepare_contextchains(tfmdata)
for lookupname, lookupdata in next, otfdata.lookups do
local lookuptype = lookupdata.type
if not lookuptype then
- logs.report("otf process","missing lookuptype for %s",lookupname)
+ report_prepare("missing lookuptype for %s",lookupname)
else
local rules = lookupdata.rules
if rules then
@@ -2534,14 +2555,14 @@ local function prepare_contextchains(tfmdata)
-- contextchain[lookupname][unicode]
if fmt == "coverage" then
if lookuptype ~= "chainsub" and lookuptype ~= "chainpos" then
- logs.report("otf process","unsupported coverage %s for %s",lookuptype,lookupname)
+ report_prepare("unsupported coverage %s for %s",lookuptype,lookupname)
else
local contexts = contextchain[lookupname]
if not contexts then
contexts = { }
contextchain[lookupname] = contexts
end
- local t = { }
+ local t, nt = { }, 0
for nofrules=1,#rules do -- does #rules>1 happen often?
local rule = rules[nofrules]
local coverage = rule.coverage
@@ -2557,7 +2578,8 @@ local function prepare_contextchains(tfmdata)
uncover(after,sequence,cache,unicodes)
end
if sequence[1] then
- t[#t+1] = { nofrules, lookuptype, sequence, start, stop, rule.lookups }
+ nt = nt + 1
+ t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups }
for unic, _ in next, sequence[start] do
local cu = contexts[unic]
if not cu then
@@ -2570,14 +2592,14 @@ local function prepare_contextchains(tfmdata)
end
elseif fmt == "reversecoverage" then
if lookuptype ~= "reversesub" then
- logs.report("otf process","unsupported reverse coverage %s for %s",lookuptype,lookupname)
+ report_prepare("unsupported reverse coverage %s for %s",lookuptype,lookupname)
else
local contexts = reversecontextchain[lookupname]
if not contexts then
contexts = { }
reversecontextchain[lookupname] = contexts
end
- local t = { }
+ local t, nt = { }, 0
for nofrules=1,#rules do
local rule = rules[nofrules]
local reversecoverage = rule.reversecoverage
@@ -2597,7 +2619,8 @@ local function prepare_contextchains(tfmdata)
end
if sequence[1] then
-- this is different from normal coverage, we assume only replacements
- t[#t+1] = { nofrules, lookuptype, sequence, start, stop, rule.lookups, replacements }
+ nt = nt + 1
+ t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups, replacements }
for unic, _ in next, sequence[start] do
local cu = contexts[unic]
if not cu then
@@ -2610,14 +2633,14 @@ local function prepare_contextchains(tfmdata)
end
elseif fmt == "glyphs" then
if lookuptype ~= "chainsub" and lookuptype ~= "chainpos" then
- logs.report("otf process","unsupported coverage %s for %s",lookuptype,lookupname)
+ report_prepare("unsupported coverage %s for %s",lookuptype,lookupname)
else
local contexts = contextchain[lookupname]
if not contexts then
contexts = { }
contextchain[lookupname] = contexts
end
- local t = { }
+ local t, nt = { }, 0
for nofrules=1,#rules do
-- nearly the same as coverage so we could as well rename it
local rule = rules[nofrules]
@@ -2637,7 +2660,8 @@ local function prepare_contextchains(tfmdata)
uncover(back,sequence,cache,unicodes)
end
if sequence[1] then
- t[#t+1] = { nofrules, lookuptype, sequence, start, stop, rule.lookups }
+ nt = nt + 1
+ t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups }
for unic, _ in next, sequence[start] do
local cu = contexts[unic]
if not cu then
@@ -2681,7 +2705,7 @@ function fonts.initializers.node.otf.features(tfmdata,value)
prepare_lookups(tfmdata)
otfdata.shared.initialized = true
if trace_preparing then
- logs.report("otf process","preparation time is %0.3f seconds for %s",os.clock()-t,tfmdata.fullname or "?")
+ report_prepare("preparation time is %0.3f seconds for %s",os.clock()-t,tfmdata.fullname or "?")
end
end
end
diff --git a/otfl-font-ott.lua b/otfl-font-ott.lua
index c56e984..ec915b8 100644
--- a/otfl-font-ott.lua
+++ b/otfl-font-ott.lua
@@ -7,17 +7,23 @@ if not modules then modules = { } end modules ['font-otf'] = {
}
local type, next, tonumber, tostring = type, next, tonumber, tostring
-local gsub, lower = string.gsub, string.lower
+local gsub, lower, format = string.gsub, string.lower, string.format
+local is_boolean = string.is_boolean
-fonts = fonts or { }
-fonts.otf = fonts.otf or { }
+local allocate = utilities.storage.allocate
-local otf = fonts.otf
+fonts = fonts or { } -- needed for font server
+local fonts = fonts
+fonts.otf = fonts.otf or { }
+local otf = fonts.otf
-otf.tables = otf.tables or { }
-otf.meanings = otf.meanings or { }
+otf.tables = otf.tables or { }
+local tables = otf.tables
-otf.tables.scripts = {
+otf.meanings = otf.meanings or { }
+local meanings = otf.meanings
+
+local scripts = allocate {
['dflt'] = 'Default',
['arab'] = 'Arabic',
@@ -90,7 +96,7 @@ otf.tables.scripts = {
['yi' ] = 'Yi',
}
-otf.tables.languages = {
+local languages = allocate {
['dflt'] = 'Default',
['aba'] = 'Abaza',
@@ -484,7 +490,7 @@ otf.tables.languages = {
['zul'] = 'Zulu'
}
-otf.tables.features = {
+local features = allocate {
['aalt'] = 'Access All Alternates',
['abvf'] = 'Above-Base Forms',
['abvm'] = 'Above-Base Mark Positioning',
@@ -622,7 +628,7 @@ otf.tables.features = {
['tlig'] = 'Traditional TeX Ligatures',
}
-otf.tables.baselines = {
+local baselines = allocate {
['hang'] = 'Hanging baseline',
['icfb'] = 'Ideographic character face bottom edge baseline',
['icft'] = 'Ideographic character face tope edige baseline',
@@ -632,10 +638,36 @@ otf.tables.baselines = {
['romn'] = 'Roman baseline'
}
--- can be sped up by local tables
-function otf.tables.to_tag(id)
- return stringformat("%4s",lower(id))
+local function swap(h) -- can be a tables.swap when we get a better name
+ local r = { }
+ for k, v in next, h do
+ r[v] = lower(gsub(k," ",""))
+ end
+ return r
+end
+
+local verbosescripts = allocate(swap(scripts ))
+local verboselanguages = allocate(swap(languages))
+local verbosefeatures = allocate(swap(features ))
+
+tables.scripts = scripts
+tables.languages = languages
+tables.features = features
+tables.baselines = baselines
+
+tables.verbosescripts = verbosescripts
+tables.verboselanguages = verboselanguages
+tables.verbosefeatures = verbosefeatures
+
+for k, v in next, verbosefeatures do
+ local stripped = gsub(k,"%-"," ")
+ verbosefeatures[stripped] = v
+ local stripped = gsub(k,"[^a-zA-Z0-9]","")
+ verbosefeatures[stripped] = v
+end
+for k, v in next, verbosefeatures do
+ verbosefeatures[lower(k)] = v
end
local function resolve(tab,id)
@@ -647,87 +679,59 @@ local function resolve(tab,id)
end
end
-function otf.meanings.script(id)
- return resolve(otf.tables.scripts,id)
-end
-function otf.meanings.language(id)
- return resolve(otf.tables.languages,id)
-end
-function otf.meanings.feature(id)
- return resolve(otf.tables.features,id)
-end
-function otf.meanings.baseline(id)
- return resolve(otf.tables.baselines,id)
-end
-
-otf.tables.to_scripts = table.reverse_hash(otf.tables.scripts )
-otf.tables.to_languages = table.reverse_hash(otf.tables.languages)
-otf.tables.to_features = table.reverse_hash(otf.tables.features )
+function meanings.script (id) return resolve(scripts, id) end
+function meanings.language(id) return resolve(languages,id) end
+function meanings.feature (id) return resolve(features, id) end
+function meanings.baseline(id) return resolve(baselines,id) end
-local scripts = otf.tables.scripts
-local languages = otf.tables.languages
-local features = otf.tables.features
-
-local to_scripts = otf.tables.to_scripts
-local to_languages = otf.tables.to_languages
-local to_features = otf.tables.to_features
-
-for k, v in next, to_features do
- local stripped = gsub(k,"%-"," ")
- to_features[stripped] = v
- local stripped = gsub(k,"[^a-zA-Z0-9]","")
- to_features[stripped] = v
-end
-for k, v in next, to_features do
- to_features[lower(k)] = v
-end
-
-otf.meanings.checkers = {
+local checkers = {
rand = function(v)
return v and "random"
end
}
-local checkers = otf.meanings.checkers
+meanings.checkers = checkers
-function otf.meanings.normalize(features)
- local h = { }
- for k,v in next, features do
- k = lower(k)
- if k == "language" or k == "lang" then
- v = gsub(lower(v),"[^a-z0-9%-]","")
- if not languages[v] then
- h.language = to_languages[v] or "dflt"
- else
- h.language = v
- end
- elseif k == "script" then
- v = gsub(lower(v),"[^a-z0-9%-]","")
- if not scripts[v] then
- h.script = to_scripts[v] or "dflt"
- else
- h.script = v
- end
- else
- if type(v) == "string" then
- local b = v:is_boolean()
- if type(b) == "nil" then
- v = tonumber(v) or lower(v)
+function meanings.normalize(features)
+ if features then
+ local h = { }
+ for k,v in next, features do
+ k = lower(k)
+ if k == "language" or k == "lang" then
+ v = gsub(lower(v),"[^a-z0-9%-]","")
+ if not languages[v] then
+ h.language = verboselanguages[v] or "dflt"
else
- v = b
+ h.language = v
+ end
+ elseif k == "script" then
+ v = gsub(lower(v),"[^a-z0-9%-]","")
+ if not scripts[v] then
+ h.script = verbosescripts[v] or "dflt"
+ else
+ h.script = v
+ end
+ else
+ if type(v) == "string" then
+ local b = is_boolean(v)
+ if type(b) == "nil" then
+ v = tonumber(v) or lower(v)
+ else
+ v = b
+ end
end
+ k = verbosefeatures[k] or k
+ local c = checkers[k]
+ h[k] = c and c(v) or v
end
- k = to_features[k] or k
- local c = checkers[k]
- h[k] = c and c(v) or v
end
+ return h
end
- return h
end
-- When I feel the need ...
---~ otf.tables.aat = {
+--~ tables.aat = {
--~ [ 0] = {
--~ name = "allTypographicFeaturesType",
--~ [ 0] = "allTypeFeaturesOnSelector",
diff --git a/otfl-font-tfm.lua b/otfl-font-tfm.lua
index 560ba1c..e8b2427 100644
--- a/otfl-font-tfm.lua
+++ b/otfl-font-tfm.lua
@@ -11,9 +11,13 @@ local utf = unicode.utf8
local next, format, match, lower, gsub = next, string.format, string.match, string.lower, string.gsub
local concat, sortedkeys, utfbyte, serialize = table.concat, table.sortedkeys, utf.byte, table.serialize
+local allocate = utilities.storage.allocate
+
local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end)
local trace_scaling = false trackers.register("fonts.scaling" , function(v) trace_scaling = v end)
+local report_define = logs.new("define fonts")
+
-- tfmdata has also fast access to indices and unicodes
-- to be checked: otf -> tfm -> tfmscaled
--
@@ -23,32 +27,32 @@ local trace_scaling = false trackers.register("fonts.scaling" , function(v) tr
<p>Here we only implement a few helper functions.</p>
--ldx]]--
-fonts = fonts or { }
-fonts.tfm = fonts.tfm or { }
-fonts.ids = fonts.ids or { }
-
-local tfm = fonts.tfm
+local fonts = fonts
+local tfm = fonts.tfm
-fonts.loaded = fonts.loaded or { }
-fonts.dontembed = fonts.dontembed or { }
-fonts.triggers = fonts.triggers or { } -- brrr
-fonts.initializers = fonts.initializers or { }
+fonts.loaded = allocate()
+fonts.dontembed = allocate()
+fonts.triggers = fonts.triggers or { } -- brrr
+fonts.initializers = fonts.initializers or { }
fonts.initializers.common = fonts.initializers.common or { }
-local fontdata = fonts.ids
-local disc = node.id('disc')
-local glyph = node.id('glyph')
local set_attribute = node.set_attribute
+local fontdata = fonts.ids
+local nodecodes = nodes.nodecodes
+
+local disc_code = nodecodes.disc
+local glyph_code = nodecodes.glyph
+
--[[ldx--
<p>The next function encapsulates the standard <l n='tfm'/> loader as
supplied by <l n='luatex'/>.</p>
--ldx]]--
-tfm.resolve_vf = true -- false
-tfm.share_base_kerns = false -- true (.5 sec slower on mk but brings down mem from 410M to 310M, beware: then script/lang share too)
-tfm.mathactions = { }
-tfm.fontname_mode = "fullpath"
+tfm.resolvevirtualtoo = true -- false
+tfm.sharebasekerns = false -- true (.5 sec slower on mk but brings down mem from 410M to 310M, beware: then script/lang share too)
+tfm.mathactions = { }
+tfm.fontnamemode = "fullpath"
tfm.enhance = tfm.enhance or function() end
@@ -58,12 +62,12 @@ function tfm.read_from_tfm(specification)
local fname, tfmdata = specification.filename or "", nil
if fname ~= "" then
if trace_defining then
- logs.report("define font","loading tfm file %s at size %s",fname,specification.size)
+ report_define("loading tfm file %s at size %s",fname,specification.size)
end
tfmdata = font.read_tfm(fname,specification.size) -- not cached, fast enough
if tfmdata then
tfmdata.descriptions = tfmdata.descriptions or { }
- if tfm.resolve_vf then
+ if tfm.resolvevirtualtoo then
fonts.logger.save(tfmdata,file.extname(fname),specification) -- strange, why here
fname = resolvers.findbinfile(specification.name, 'ovf')
if fname and fname ~= "" then
@@ -81,7 +85,7 @@ function tfm.read_from_tfm(specification)
tfm.enhance(tfmdata,specification)
end
elseif trace_defining then
- logs.report("define font","loading tfm with name %s fails",specification.name)
+ report_define("loading tfm with name %s fails",specification.name)
end
return tfmdata
end
@@ -124,19 +128,34 @@ end
to scale virtual characters.</p>
--ldx]]--
-function tfm.get_virtual_id(tfmdata)
+--~ function tfm.getvirtualid(tfmdata)
+--~ -- since we don't know the id yet, we use 0 as signal
+--~ local tf = tfmdata.fonts
+--~ if not tf then
+--~ tfmdata.type = "virtual"
+--~ tfmdata.fonts = { { id = 0 } }
+--~ return 1
+--~ else
+--~ local ntf = #tf + 1
+--~ tf[ntf] = { id = 0 }
+--~ return ntf
+--~ end
+--~ end
+
+function tfm.getvirtualid(tfmdata)
-- since we don't know the id yet, we use 0 as signal
- if not tfmdata.fonts then
+ local tf = tfmdata.fonts
+ if not tf then
+ tf = { }
tfmdata.type = "virtual"
- tfmdata.fonts = { { id = 0 } }
- return 1
- else
- tfmdata.fonts[#tfmdata.fonts+1] = { id = 0 }
- return #tfmdata.fonts
+ tfmdata.fonts = tf
end
+ local ntf = #tf + 1
+ tf[ntf] = { id = 0 }
+ return ntf
end
-function tfm.check_virtual_id(tfmdata, id)
+function tfm.checkvirtualid(tfmdata, id)
if tfmdata and tfmdata.type == "virtual" then
if not tfmdata.fonts or #tfmdata.fonts == 0 then
tfmdata.type, tfmdata.fonts = "real", nil
@@ -166,7 +185,7 @@ fonts.trace_scaling = false
-- sharedkerns are unscaled and are be hashed by concatenated indexes
--~ function tfm.check_base_kerns(tfmdata)
---~ if tfm.share_base_kerns then
+--~ if tfm.sharebasekerns then
--~ local sharedkerns = tfmdata.sharedkerns
--~ if sharedkerns then
--~ local basekerns = { }
@@ -178,7 +197,7 @@ fonts.trace_scaling = false
--~ end
--~ function tfm.prepare_base_kerns(tfmdata)
---~ if tfm.share_base_kerns and not tfmdata.sharedkerns then
+--~ if tfm.sharebasekerns and not tfmdata.sharedkerns then
--~ local sharedkerns = { }
--~ tfmdata.sharedkerns = sharedkerns
--~ for u, chr in next, tfmdata.characters do
@@ -207,7 +226,47 @@ local charactercache = { }
-- a virtual font has italic correction make sure to set the
-- has_italic flag. Some more flags will be added in the future.
-function tfm.calculate_scale(tfmtable, scaledpoints, relativeid)
+--[[ldx--
+<p>The reason why the scaler was originally split, is that for a while we experimented
+with a helper function. However, in practice the <l n='api'/> calls are too slow to
+make this profitable and the <l n='lua'/> based variant was just faster. A days
+wasted day but an experience richer.</p>
+--ldx]]--
+
+tfm.autocleanup = true
+
+local lastfont = nil
+
+-- we can get rid of the tfm instance when we have fast access to the
+-- scaled character dimensions at the tex end, e.g. a fontobject.width
+--
+-- flushing the kern and ligature tables from memory saves a lot (only
+-- base mode) but it complicates vf building where the new characters
+-- demand this data .. solution: functions that access them
+
+-- we don't need the glyph data as we can use the description .. but we will
+-- have to wait till we can access the internal tfm table efficiently in which
+-- case characters will become a metatable afterwards
+
+function tfm.cleanuptable(tfmdata) -- we need a cleanup callback, now we miss the last one
+ if tfm.autocleanup then -- ok, we can hook this into everyshipout or so ... todo
+ if tfmdata.type == 'virtual' or tfmdata.virtualized then
+ for k, v in next, tfmdata.characters do
+ if v.commands then v.commands = nil end
+ -- if v.kerns then v.kerns = nil end
+ end
+ else
+ -- for k, v in next, tfmdata.characters do
+ -- if v.kerns then v.kerns = nil end
+ -- end
+ end
+ end
+end
+
+function tfm.cleanup(tfmdata) -- we need a cleanup callback, now we miss the last one
+end
+
+function tfm.calculatescale(tfmtable, scaledpoints)
if scaledpoints < 0 then
scaledpoints = (- scaledpoints/1000) * tfmtable.designsize -- already in sp
end
@@ -216,10 +275,10 @@ function tfm.calculate_scale(tfmtable, scaledpoints, relativeid)
return scaledpoints, delta, units
end
-function tfm.do_scale(tfmtable, scaledpoints, relativeid)
+function tfm.scale(tfmtable, scaledpoints, relativeid)
-- tfm.prepare_base_kerns(tfmtable) -- optimalization
local t = { } -- the new table
- local scaledpoints, delta, units = tfm.calculate_scale(tfmtable, scaledpoints, relativeid)
+ local scaledpoints, delta, units = tfm.calculatescale(tfmtable, scaledpoints, relativeid)
t.units_per_em = units or 1000
local hdelta, vdelta = delta, delta
-- unicoded unique descriptions shared cidinfo characters changed parameters indices
@@ -249,18 +308,23 @@ function tfm.do_scale(tfmtable, scaledpoints, relativeid)
local nodemode = tfmtable.mode == "node"
local hasquality = tfmtable.auto_expand or tfmtable.auto_protrude
local hasitalic = tfmtable.has_italic
+ local descriptions = tfmtable.descriptions or { }
+ --
+ if hasmath then
+ t.has_math = true -- this will move to elsewhere
+ end
--
t.parameters = { }
t.characters = { }
t.MathConstants = { }
-- fast access
- local descriptions = tfmtable.descriptions or { }
+ t.unscaled = tfmtable -- the original unscaled one (temp)
t.unicodes = tfmtable.unicodes
t.indices = tfmtable.indices
t.marks = tfmtable.marks
-t.goodies = tfmtable.goodies
-t.colorscheme = tfmtable.colorscheme
---~ t.embedding = tfmtable.embedding
+ t.goodies = tfmtable.goodies
+ t.colorscheme = tfmtable.colorscheme
+ -- t.embedding = tfmtable.embedding
t.descriptions = descriptions
if tfmtable.fonts then
t.fonts = table.fastcopy(tfmtable.fonts) -- hm also at the end
@@ -296,7 +360,7 @@ t.colorscheme = tfmtable.colorscheme
local scaledheight = defaultheight * vdelta
local scaleddepth = defaultdepth * vdelta
local stackmath = tfmtable.ignore_stack_math ~= true
- local private = fonts.private
+ local private = fonts.privateoffset
local sharedkerns = { }
for k,v in next, characters do
local chr, description, index
@@ -357,7 +421,7 @@ t.colorscheme = tfmtable.colorscheme
end
end
-- if trace_scaling then
- -- logs.report("define font","t=%s, u=%s, i=%s, n=%s c=%s",k,chr.tounicode or k,description.index,description.name or '-',description.class or '-')
+ -- report_define("t=%s, u=%s, i=%s, n=%s c=%s",k,chr.tounicode or k,description.index,description.name or '-',description.class or '-')
-- end
if tounicode then
local tu = tounicode[index] -- nb: index!
@@ -394,7 +458,7 @@ t.colorscheme = tfmtable.colorscheme
if vn then
chr.next = vn
--~ if v.vert_variants or v.horiz_variants then
- --~ logs.report("glyph 0x%05X has combination of next, vert_variants and horiz_variants",index)
+ --~ report_define("glyph 0x%05X has combination of next, vert_variants and horiz_variants",index)
--~ end
else
local vv = v.vert_variants
@@ -515,13 +579,13 @@ t.colorscheme = tfmtable.colorscheme
local ivc = vc[i]
local key = ivc[1]
if key == "right" then
- tt[#tt+1] = { key, ivc[2]*hdelta }
+ tt[i] = { key, ivc[2]*hdelta }
elseif key == "down" then
- tt[#tt+1] = { key, ivc[2]*vdelta }
+ tt[i] = { key, ivc[2]*vdelta }
elseif key == "rule" then
- tt[#tt+1] = { key, ivc[2]*vdelta, ivc[3]*hdelta }
+ tt[i] = { key, ivc[2]*vdelta, ivc[3]*hdelta }
else -- not comment
- tt[#tt+1] = ivc -- shared since in cache and untouched
+ tt[i] = ivc -- shared since in cache and untouched
end
end
chr.commands = tt
@@ -565,11 +629,11 @@ t.colorscheme = tfmtable.colorscheme
-- can have multiple subfonts
if hasmath then
if trace_defining then
- logs.report("define font","math enabled for: name '%s', fullname: '%s', filename: '%s'",t.name or "noname",t.fullname or "nofullname",t.filename or "nofilename")
+ report_define("math enabled for: name '%s', fullname: '%s', filename: '%s'",t.name or "noname",t.fullname or "nofullname",t.filename or "nofilename")
end
else
if trace_defining then
- logs.report("define font","math disabled for: name '%s', fullname: '%s', filename: '%s'",t.name or "noname",t.fullname or "nofullname",t.filename or "nofilename")
+ report_define("math disabled for: name '%s', fullname: '%s', filename: '%s'",t.name or "noname",t.fullname or "nofullname",t.filename or "nofilename")
end
t.nomath, t.MathConstants = true, nil
end
@@ -578,58 +642,17 @@ t.colorscheme = tfmtable.colorscheme
t.psname = t.fontname or (t.fullname and fonts.names.cleanname(t.fullname))
end
if trace_defining then
- logs.report("define font","used for accesing subfont: '%s'",t.psname or "nopsname")
- logs.report("define font","used for subsetting: '%s'",t.fontname or "nofontname")
- end
---~ print(t.fontname,table.serialize(t.MathConstants))
- return t, delta
-end
-
---[[ldx--
-<p>The reason why the scaler is split, is that for a while we experimented
-with a helper function. However, in practice the <l n='api'/> calls are too slow to
-make this profitable and the <l n='lua'/> based variant was just faster. A days
-wasted day but an experience richer.</p>
---ldx]]--
-
-tfm.auto_cleanup = true
-
-local lastfont = nil
-
--- we can get rid of the tfm instance when we have fast access to the
--- scaled character dimensions at the tex end, e.g. a fontobject.width
---
--- flushing the kern and ligature tables from memory saves a lot (only
--- base mode) but it complicates vf building where the new characters
--- demand this data .. solution: functions that access them
-
-function tfm.cleanup_table(tfmdata) -- we need a cleanup callback, now we miss the last one
- if tfm.auto_cleanup then -- ok, we can hook this into everyshipout or so ... todo
- if tfmdata.type == 'virtual' or tfmdata.virtualized then
- for k, v in next, tfmdata.characters do
- if v.commands then v.commands = nil end
- -- if v.kerns then v.kerns = nil end
- end
- else
- -- for k, v in next, tfmdata.characters do
- -- if v.kerns then v.kerns = nil end
- -- end
- end
+ report_define("used for accessing (sub)font: '%s'",t.psname or "nopsname")
+ report_define("used for subsetting: '%s'",t.fontname or "nofontname")
end
-end
-
-function tfm.cleanup(tfmdata) -- we need a cleanup callback, now we miss the last one
-end
-
-function tfm.scale(tfmtable, scaledpoints, relativeid)
- local t, factor = tfm.do_scale(tfmtable, scaledpoints, relativeid)
- t.factor = factor
- t.ascender = factor*(tfmtable.ascender or 0)
- t.descender = factor*(tfmtable.descender or 0)
+ -- this will move up (side effect of merging split call)
+ t.factor = delta
+ t.ascender = delta*(tfmtable.ascender or 0)
+ t.descender = delta*(tfmtable.descender or 0)
t.shared = tfmtable.shared or { }
t.unique = table.fastcopy(tfmtable.unique or {})
---~ print("scaling", t.name, t.factor) -- , tfm.hash_features(tfmtable.specification))
tfm.cleanup(t)
+ -- print(t.fontname,table.serialize(t.MathConstants))
return t
end
@@ -638,10 +661,12 @@ end
process features right.</p>
--ldx]]--
-fonts.analyzers = fonts.analyzers or { }
-fonts.analyzers.aux = fonts.analyzers.aux or { }
-fonts.analyzers.methods = fonts.analyzers.methods or { }
-fonts.analyzers.initializers = fonts.analyzers.initializers or { }
+fonts.analyzers = fonts.analyzers or { }
+local analyzers = fonts.analyzers
+
+analyzers.aux = analyzers.aux or { }
+analyzers.methods = analyzers.methods or { }
+analyzers.initializers = analyzers.initializers or { }
-- todo: analyzers per script/lang, cross font, so we need an font id hash -> script
-- e.g. latin -> hyphenate, arab -> 1/2/3 analyze
@@ -650,17 +675,19 @@ fonts.analyzers.initializers = fonts.analyzers.initializers or { }
local state = attributes.private('state')
-function fonts.analyzers.aux.setstate(head,font)
+function analyzers.aux.setstate(head,font)
+ local useunicodemarks = analyzers.useunicodemarks
local tfmdata = fontdata[font]
local characters = tfmdata.characters
local descriptions = tfmdata.descriptions
local first, last, current, n, done = nil, nil, head, 0, false -- maybe make n boolean
while current do
local id = current.id
- if id == glyph and current.font == font then
- local d = descriptions[current.char]
+ if id == glyph_code and current.font == font then
+ local char = current.char
+ local d = descriptions[char]
if d then
- if d.class == "mark" then
+ if d.class == "mark" or (useunicodemarks and categories[char] == "mn") then
done = true
set_attribute(current,state,5) -- mark
elseif n == 0 then
@@ -678,7 +705,7 @@ function fonts.analyzers.aux.setstate(head,font)
end
first, last, n = nil, nil, 0
end
- elseif id == disc then
+ elseif id == disc_code then
-- always in the middle
set_attribute(current,state,2) -- midi
last = current
@@ -711,25 +738,25 @@ end
-- checking
-function tfm.checked_filename(metadata,whatever)
+function tfm.checkedfilename(metadata,whatever)
local foundfilename = metadata.foundfilename
if not foundfilename then
local askedfilename = metadata.filename or ""
if askedfilename ~= "" then
foundfilename = resolvers.findbinfile(askedfilename,"") or ""
if foundfilename == "" then
- logs.report("fonts","source file '%s' is not found",askedfilename)
+ report_define("source file '%s' is not found",askedfilename)
foundfilename = resolvers.findbinfile(file.basename(askedfilename),"") or ""
if foundfilename ~= "" then
- logs.report("fonts","using source file '%s' (cache mismatch)",foundfilename)
+ report_define("using source file '%s' (cache mismatch)",foundfilename)
end
end
elseif whatever then
- logs.report("fonts","no source file for '%s'",whatever)
+ report_define("no source file for '%s'",whatever)
foundfilename = ""
end
metadata.foundfilename = foundfilename
- -- logs.report("fonts","using source file '%s'",foundfilename)
+ -- report_define("using source file '%s'",foundfilename)
end
return foundfilename
end
diff --git a/otfl-font-xtx.lua b/otfl-font-xtx.lua
index 8237851..574e161 100644
--- a/otfl-font-xtx.lua
+++ b/otfl-font-xtx.lua
@@ -31,36 +31,14 @@ well and that does not work too well with the general design
of the specifier.</p>
--ldx]]--
---~ function fonts.define.specify.colonized(specification) -- xetex mode
---~ local list = { }
---~ if specification.detail and specification.detail ~= "" then
---~ for v in gmatch(specification.detail,"%s*([^;]+)%s*") do
---~ local a, b = match(v,"^(%S*)%s*=%s*(%S*)$")
---~ if a and b then
---~ list[a] = b:is_boolean()
---~ if type(list[a]) == "nil" then
---~ list[a] = b
---~ end
---~ else
---~ local a, b = match(v,"^([%+%-]?)%s*(%S+)$")
---~ if a and b then
---~ list[b] = a ~= "-"
---~ end
---~ end
---~ end
---~ end
---~ specification.features.normal = list
---~ return specification
---~ end
-
---~ check("oeps/BI:+a;-b;c=d")
---~ check("[oeps]/BI:+a;-b;c=d")
---~ check("file:oeps/BI:+a;-b;c=d")
---~ check("name:oeps/BI:+a;-b;c=d")
+local fonts = fonts
+local definers = fonts.definers
+local specifiers = definers.specifiers
+local normalize_meanings = fonts.otf.meanings.normalize
local list = { }
-fonts.define.specify.colonized_default_lookup = "file"
+specifiers.colonizedpreference = "file"
local function isstyle(s)
local style = string.lower(s):split("/")
@@ -157,11 +135,15 @@ local function parse_script(script)
end
end
-local function issome () list.lookup = fonts.define.specify.colonized_default_lookup end
+specifiers.colonizedpreference = "file"
+
+local function issome () list.lookup = specifiers.colonizedpreference end
local function isfile () list.lookup = 'file' end
local function isname () list.lookup = 'name' end
local function thename(s) list.name = s end
local function issub (v) list.sub = v end
+local function istrue (s) list[s] = true end
+local function isfalse(s) list[s] = false end
local function iskey (k,v)
if k == "script" then
parse_script(v)
@@ -169,28 +151,25 @@ local function iskey (k,v)
list[k] = v
end
-local function istrue (s) list[s] = true end
-local function isfalse(s) list[s] = false end
-
-local spaces = lpeg.P(" ")^0
-local namespec = (1-lpeg.S("/:("))^0 -- was: (1-lpeg.S("/: ("))^0
-local filespec = (lpeg.R("az", "AZ") * lpeg.P(":"))^-1 * (1-lpeg.S(":("))^1
-local crapspec = spaces * lpeg.P("/") * (((1-lpeg.P(":"))^0)/isstyle) * spaces
-local filename = (lpeg.P("file:")/isfile * (filespec/thename)) + (lpeg.P("[") * lpeg.P(true)/isfile * (((1-lpeg.P("]"))^0)/thename) * lpeg.P("]"))
-local fontname = (lpeg.P("name:")/isname * (namespec/thename)) + lpeg.P(true)/issome * (namespec/thename)
-local sometext = (lpeg.R("az","AZ","09") + lpeg.S("+-."))^1
-local truevalue = lpeg.P("+") * spaces * (sometext/istrue)
-local falsevalue = lpeg.P("-") * spaces * (sometext/isfalse)
-local keyvalue = lpeg.P("+") + (lpeg.C(sometext) * spaces * lpeg.P("=") * spaces * lpeg.C(sometext))/iskey
+local P, S, R, C = lpeg.P, lpeg.S, lpeg.R, lpeg.C
+
+local spaces = P(" ")^0
+local namespec = (1-S("/:("))^0 -- was: (1-S("/: ("))^0
+local filespec = (R("az", "AZ") * P(":"))^-1 * (1-S(":("))^1
+local stylespec = spaces * P("/") * (((1-P(":"))^0)/isstyle) * spaces
+local filename = (P("file:")/isfile * (filespec/thename)) + (P("[") * P(true)/isname * (((1-P("]"))^0)/thename) * P("]"))
+local fontname = (P("name:")/isname * (namespec/thename)) + P(true)/issome * (namespec/thename)
+local sometext = (R("az","AZ","09") + S("+-."))^1
+local truevalue = P("+") * spaces * (sometext/istrue)
+local falsevalue = P("-") * spaces * (sometext/isfalse)
+local keyvalue = P("+") + (C(sometext) * spaces * P("=") * spaces * C(sometext))/iskey
local somevalue = sometext/istrue
-local subvalue = lpeg.P("(") * (lpeg.C(lpeg.P(1-lpeg.S("()"))^1)/issub) * lpeg.P(")") -- for Kim
+local subvalue = P("(") * (C(P(1-S("()"))^1)/issub) * P(")") -- for Kim
local option = spaces * (keyvalue + falsevalue + truevalue + somevalue) * spaces
-local options = lpeg.P(":") * spaces * (lpeg.P(";")^0 * option)^0
-local pattern = (filename + fontname) * subvalue^0 * crapspec^0 * options^0
-
-local normalize_meanings = fonts.otf.meanings.normalize
+local options = P(":") * spaces * (P(";")^0 * option)^0
+local pattern = (filename + fontname) * subvalue^0 * stylespec^0 * options^0
-function fonts.define.specify.colonized(specification) -- xetex mode
+local function colonized(specification) -- xetex mode
list = { }
lpegmatch(pattern,specification.specification)
if list.style then
@@ -202,10 +181,10 @@ function fonts.define.specify.colonized(specification) -- xetex mode
list.optsize = nil
end
if list.name then
- if resolvers.find_file(list.name, "tfm") then
+ if resolvers.findfile(list.name, "tfm") then
list.lookup = "file"
list.name = file.addsuffix(list.name, "tfm")
- elseif resolvers.find_file(list.name, "ofm") then
+ elseif resolvers.findfile(list.name, "ofm") then
list.lookup = "file"
list.name = file.addsuffix(list.name, "ofm")
end
@@ -221,9 +200,9 @@ function fonts.define.specify.colonized(specification) -- xetex mode
specification.sub = list.sub
list.sub = nil
end
--- specification.features.normal = list
+ -- specification.features.normal = list
specification.features.normal = normalize_meanings(list)
return specification
end
-fonts.define.register_split(":", fonts.define.specify.colonized)
+definers.registersplit(":",colonized)
diff --git a/otfl-luat-dum.lua b/otfl-luat-dum.lua
index 2f6627f..bd10e89 100644
--- a/otfl-luat-dum.lua
+++ b/otfl-luat-dum.lua
@@ -12,6 +12,7 @@ statistics = {
register = dummyfunction,
starttiming = dummyfunction,
stoptiming = dummyfunction,
+ elapsedtime = nil,
}
directives = {
register = dummyfunction,
@@ -28,23 +29,28 @@ experiments = {
enable = dummyfunction,
disable = dummyfunction,
}
-storage = {
+storage = { -- probably no longer needed
register = dummyfunction,
shared = { },
}
logs = {
+ new = function() return dummyfunction end,
report = dummyfunction,
simple = dummyfunction,
}
-tasks = {
- new = dummyfunction,
- actions = dummyfunction,
- appendaction = dummyfunction,
- prependaction = dummyfunction,
-}
callbacks = {
register = function(n,f) return callback.register(n,f) end,
}
+utilities = {
+ storage = {
+ allocate = function(t) return t or { } end,
+ mark = function(t) return t or { } end,
+ },
+}
+
+characters = characters or {
+ data = { }
+}
-- we need to cheat a bit here
@@ -61,7 +67,7 @@ local remapper = {
fea = "font feature files",
}
-function resolvers.find_file(name,kind)
+function resolvers.findfile(name,kind)
name = string.gsub(name,"\\","\/")
kind = string.lower(kind)
return kpse.find_file(name,(kind and kind ~= "" and (remapper[kind] or kind)) or file.extname(name,"tex"))
@@ -71,7 +77,7 @@ function resolvers.findbinfile(name,kind)
if not kind or kind == "" then
kind = file.extname(name) -- string.match(name,"%.([^%.]-)$")
end
- return resolvers.find_file(name,(kind and remapper[kind]) or kind)
+ return resolvers.findfile(name,(kind and remapper[kind]) or kind)
end
-- Caches ... I will make a real stupid version some day when I'm in the
@@ -160,9 +166,9 @@ local function makefullname(path,name)
end
end
-function caches.iswritable(path,name)
+function caches.is_writable(path,name)
local fullname = makefullname(path,name)
- return fullname and file.iswritable(fullname)
+ return fullname and file.is_writable(fullname)
end
function caches.loaddata(paths,name)
diff --git a/otfl-node-dum.lua b/otfl-node-dum.lua
index 9483e51..5127481 100644
--- a/otfl-node-dum.lua
+++ b/otfl-node-dum.lua
@@ -10,22 +10,30 @@ nodes = nodes or { }
fonts = fonts or { }
attributes = attributes or { }
+nodes.pool = nodes.pool or { }
+nodes.handlers = nodes.handlers or { }
+
+local nodecodes = { } for k,v in next, node.types () do nodecodes[string.gsub(v,"_","")] = k end
+local whatcodes = { } for k,v in next, node.whatsits() do whatcodes[string.gsub(v,"_","")] = k end
+local glyphcodes = { [0] = "character", "glyph", "ligature", "ghost", "left", "right" }
+
+nodes.nodecodes = nodecodes
+nodes.whatcodes = whatcodes
+nodes.whatsitcodes = whatcodes
+nodes.glyphcodes = glyphcodes
+
local traverse_id = node.traverse_id
local free_node = node.free
local remove_node = node.remove
local new_node = node.new
-local glyph = node.id('glyph')
-
--- fonts
-
-local fontdata = fonts.ids or { }
+local glyph_code = nodecodes.glyph
function nodes.simple_font_handler(head)
-- lang.hyphenate(head)
- head = nodes.process_characters(head)
- nodes.inject_kerns(head)
- nodes.protect_glyphs(head)
+ head = nodes.handlers.characters(head)
+ nodes.injections.handler(head)
+ nodes.handlers.protectglyphs(head)
head = node.ligaturing(head)
head = node.kerning(head)
return head
@@ -36,52 +44,57 @@ if tex.attribute[0] ~= 0 then
texio.write_nl("log","!")
texio.write_nl("log","! Attribute 0 is reserved for ConTeXt's font feature management and has to be")
texio.write_nl("log","! set to zero. Also, some attributes in the range 1-255 are used for special")
- texio.write_nl("log","! purposed so setting them at the TeX end might break the font handler.")
+ texio.write_nl("log","! purposes so setting them at the TeX end might break the font handler.")
texio.write_nl("log","!")
tex.attribute[0] = 0 -- else no features
end
-nodes.protect_glyphs = node.protect_glyphs
-nodes.unprotect_glyphs = node.unprotect_glyphs
-
-function nodes.process_characters(head)
- local usedfonts, done, prevfont = { }, false, nil
- for n in traverse_id(glyph,head) do
- local font = n.font
- if font ~= prevfont then
- prevfont = font
- local used = usedfonts[font]
- if not used then
- local tfmdata = fontdata[font]
- if tfmdata then
- local shared = tfmdata.shared -- we need to check shared, only when same features
- if shared then
- local processors = shared.processes
- if processors and #processors > 0 then
- usedfonts[font] = processors
- done = true
+nodes.handlers.protectglyphs = node.protect_glyphs
+nodes.handlers.unprotectglyphs = node.unprotect_glyphs
+
+function nodes.handlers.characters(head)
+ local fontdata = fonts.identifiers
+ if fontdata then
+ local usedfonts, done, prevfont = { }, false, nil
+ for n in traverse_id(glyph_code,head) do
+ local font = n.font
+ if font ~= prevfont then
+ prevfont = font
+ local used = usedfonts[font]
+ if not used then
+ local tfmdata = fontdata[font] --
+ if tfmdata then
+ local shared = tfmdata.shared -- we need to check shared, only when same features
+ if shared then
+ local processors = shared.processes
+ if processors and #processors > 0 then
+ usedfonts[font] = processors
+ done = true
+ end
end
end
end
end
end
- end
- if done then
- for font, processors in next, usedfonts do
- for i=1,#processors do
- local h, d = processors[i](head,font,0)
- head, done = h or head, done or d
+ if done then
+ for font, processors in next, usedfonts do
+ for i=1,#processors do
+ local h, d = processors[i](head,font,0)
+ head, done = h or head, done or d
+ end
end
end
+ return head, true
+ else
+ return head, false
end
- return head, true
end
-- helper
-function nodes.kern(k)
+function nodes.pool.kern(k)
local n = new_node("kern",1)
n.kern = k
return n
diff --git a/otfl-node-inj.lua b/otfl-node-inj.lua
index fdea7f1..e4380a4 100644
--- a/otfl-node-inj.lua
+++ b/otfl-node-inj.lua
@@ -17,14 +17,22 @@ local next = next
local trace_injections = false trackers.register("nodes.injections", function(v) trace_injections = v end)
+local report_injections = logs.new("injections")
+
+local attributes, nodes, node = attributes, nodes, node
+
fonts = fonts or { }
fonts.tfm = fonts.tfm or { }
fonts.ids = fonts.ids or { }
-local fontdata = fonts.ids
+nodes.injections = nodes.injections or { }
+local injections = nodes.injections
-local glyph = node.id('glyph')
-local kern = node.id('kern')
+local fontdata = fonts.ids
+local nodecodes = nodes.nodecodes
+local glyph_code = nodecodes.glyph
+local nodepool = nodes.pool
+local newkern = nodepool.kern
local traverse_id = node.traverse_id
local unset_attribute = node.unset_attribute
@@ -33,8 +41,6 @@ local set_attribute = node.set_attribute
local insert_node_before = node.insert_before
local insert_node_after = node.insert_after
-local newkern = nodes.kern
-
local markbase = attributes.private('markbase')
local markmark = attributes.private('markmark')
local markdone = attributes.private('markdone')
@@ -54,7 +60,7 @@ local kerns = { }
-- for the moment we pass the r2l key ... volt/arabtype tests
-function nodes.set_cursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext)
+function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext)
local dx, dy = factor*(exit[1]-entry[1]), factor*(exit[2]-entry[2])
local ws, wn = tfmstart.width, tfmnext.width
local bound = #cursives + 1
@@ -64,7 +70,7 @@ function nodes.set_cursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext)
return dx, dy, bound
end
-function nodes.set_pair(current,factor,rlmode,r2lflag,spec,tfmchr)
+function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr)
local x, y, w, h = factor*spec[1], factor*spec[2], factor*spec[3], factor*spec[4]
-- dy = y - h
if x ~= 0 or w ~= 0 or y ~= 0 or h ~= 0 then
@@ -83,7 +89,7 @@ function nodes.set_pair(current,factor,rlmode,r2lflag,spec,tfmchr)
return x, y, w, h -- no bound
end
-function nodes.set_kern(current,factor,rlmode,x,tfmchr)
+function injections.setkern(current,factor,rlmode,x,tfmchr)
local dx = factor*x
if dx ~= 0 then
local bound = #kerns + 1
@@ -95,7 +101,7 @@ function nodes.set_kern(current,factor,rlmode,x,tfmchr)
end
end
-function nodes.set_mark(start,base,factor,rlmode,ba,ma,index) --ba=baseanchor, ma=markanchor
+function injections.setmark(start,base,factor,rlmode,ba,ma,index) --ba=baseanchor, ma=markanchor
local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2])
local bound = has_attribute(base,markbase)
if bound then
@@ -107,7 +113,7 @@ function nodes.set_mark(start,base,factor,rlmode,ba,ma,index) --ba=baseanchor, m
set_attribute(start,markdone,index)
return dx, dy, bound
else
- logs.report("nodes mark", "possible problem, U+%04X is base without data (id: %s)",base.char,bound)
+ report_injections("possible problem, U+%04X is base mark without data (id: %s)",base.char,bound)
end
end
index = index or 1
@@ -119,15 +125,13 @@ function nodes.set_mark(start,base,factor,rlmode,ba,ma,index) --ba=baseanchor, m
return dx, dy, bound
end
-function nodes.trace_injection(head)
- local function dir(n)
- return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or ("unset")
- end
- local function report(...)
- logs.report("nodes finisher",...)
- end
- report("begin run")
- for n in traverse_id(glyph,head) do
+local function dir(n)
+ return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset"
+end
+
+local function trace(head)
+ report_injections("begin run")
+ for n in traverse_id(glyph_code,head) do
if n.subtype < 256 then
local kp = has_attribute(n,kernpair)
local mb = has_attribute(n,markbase)
@@ -135,61 +139,62 @@ function nodes.trace_injection(head)
local md = has_attribute(n,markdone)
local cb = has_attribute(n,cursbase)
local cc = has_attribute(n,curscurs)
- report("char U+%05X, font=%s",n.char,n.font)
+ report_injections("char U+%05X, font=%s",n.char,n.font)
if kp then
local k = kerns[kp]
if k[3] then
- report(" pairkern: dir=%s, x=%s, y=%s, w=%s, h=%s",dir(k[1]),k[2] or "?",k[3] or "?",k[4] or "?",k[5] or "?")
+ report_injections(" pairkern: dir=%s, x=%s, y=%s, w=%s, h=%s",dir(k[1]),k[2] or "?",k[3] or "?",k[4] or "?",k[5] or "?")
else
- report(" kern: dir=%s, dx=%s",dir(k[1]),k[2] or "?")
+ report_injections(" kern: dir=%s, dx=%s",dir(k[1]),k[2] or "?")
end
end
if mb then
- report(" markbase: bound=%s",mb)
+ report_injections(" markbase: bound=%s",mb)
end
if mm then
local m = marks[mm]
if mb then
local m = m[mb]
if m then
- report(" markmark: bound=%s, index=%s, dx=%s, dy=%s",mm,md or "?",m[1] or "?",m[2] or "?")
+ report_injections(" markmark: bound=%s, index=%s, dx=%s, dy=%s",mm,md or "?",m[1] or "?",m[2] or "?")
else
- report(" markmark: bound=%s, missing index",mm)
+ report_injections(" markmark: bound=%s, missing index",mm)
end
else
m = m[1]
- report(" markmark: bound=%s, dx=%s, dy=%s",mm,m[1] or "?",m[2] or "?")
+ report_injections(" markmark: bound=%s, dx=%s, dy=%s",mm,m[1] or "?",m[2] or "?")
end
end
if cb then
- report(" cursbase: bound=%s",cb)
+ report_injections(" cursbase: bound=%s",cb)
end
if cc then
local c = cursives[cc]
- report(" curscurs: bound=%s, dir=%s, dx=%s, dy=%s",cc,dir(c[1]),c[2] or "?",c[3] or "?")
+ report_injections(" curscurs: bound=%s, dir=%s, dx=%s, dy=%s",cc,dir(c[1]),c[2] or "?",c[3] or "?")
end
end
end
- report("end run")
+ report_injections("end run")
end
-- todo: reuse tables (i.e. no collection), but will be extra fields anyway
-- todo: check for attribute
-function nodes.inject_kerns(head,where,keep)
+function injections.handler(head,where,keep)
local has_marks, has_cursives, has_kerns = next(marks), next(cursives), next(kerns)
if has_marks or has_cursives then
--~ if has_marks or has_cursives or has_kerns then
if trace_injections then
- nodes.trace_injection(head)
+ trace(head)
end
-- in the future variant we will not copy items but refs to tables
- local done, ky, rl, valid, cx, wx, mk = false, { }, { }, { }, { }, { }, { }
+ local done, ky, rl, valid, cx, wx, mk, nofvalid = false, { }, { }, { }, { }, { }, { }, 0
if has_kerns then -- move outside loop
local nf, tm = nil, nil
- for n in traverse_id(glyph,head) do
+ for n in traverse_id(glyph_code,head) do
if n.subtype < 256 then
- valid[#valid+1] = n
+ nofvalid = nofvalid + 1
+ valid[nofvalid] = n
if n.font ~= nf then
nf = n.font
tm = fontdata[nf].marks
@@ -215,9 +220,10 @@ function nodes.inject_kerns(head,where,keep)
end
else
local nf, tm = nil, nil
- for n in traverse_id(glyph,head) do
+ for n in traverse_id(glyph_code,head) do
if n.subtype < 256 then
- valid[#valid+1] = n
+ nofvalid = nofvalid + 1
+ valid[nofvalid] = n
if n.font ~= nf then
nf = n.font
tm = fontdata[nf].marks
@@ -226,7 +232,7 @@ function nodes.inject_kerns(head,where,keep)
end
end
end
- if #valid > 0 then
+ if nofvalid > 0 then
-- we can assume done == true because we have cursives and marks
local cx = { }
if has_kerns and next(ky) then
@@ -239,7 +245,7 @@ function nodes.inject_kerns(head,where,keep)
local p_cursbase, p = nil, nil
-- since we need valid[n+1] we can also use a "while true do"
local t, d, maxt = { }, { }, 0
- for i=1,#valid do -- valid == glyphs
+ for i=1,nofvalid do -- valid == glyphs
local n = valid[i]
if not mk[n] then
local n_cursbase = has_attribute(n,cursbase)
@@ -303,12 +309,12 @@ function nodes.inject_kerns(head,where,keep)
end
end
if has_marks then
- for i=1,#valid do
+ for i=1,nofvalid do
local p = valid[i]
local p_markbase = has_attribute(p,markbase)
if p_markbase then
local mrks = marks[p_markbase]
- for n in traverse_id(glyph,p.next) do
+ for n in traverse_id(glyph_code,p.next) do
local n_markmark = has_attribute(n,markmark)
if p_markbase == n_markmark then
local index = has_attribute(n,markdone) or 1
@@ -391,9 +397,9 @@ function nodes.inject_kerns(head,where,keep)
end
elseif has_kerns then
if trace_injections then
- nodes.trace_injection(head)
+ trace(head)
end
- for n in traverse_id(glyph,head) do
+ for n in traverse_id(glyph_code,head) do
if n.subtype < 256 then
local k = has_attribute(n,kernpair)
if k then