summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore11
-rw-r--r--Makefile64
-rw-r--r--NEWS17
-rw-r--r--filegraph.dot178
-rw-r--r--font-age.lua3744
-rwxr-xr-xfontdbutil.lua395
-rw-r--r--luaotfload-basics-gen.lua (renamed from otfl-basics-gen.lua)83
-rw-r--r--luaotfload-basics-nod.lua (renamed from otfl-basics-nod.lua)0
-rw-r--r--luaotfload-blacklist.cnf (renamed from otfl-blacklist.cnf)0
-rw-r--r--luaotfload-colors.lua (renamed from otfl-font-clr.lua)68
-rw-r--r--luaotfload-database.lua1049
-rw-r--r--luaotfload-features.lua575
-rw-r--r--luaotfload-fonts-cbk.lua (renamed from otfl-fonts-cbk.lua)0
-rw-r--r--luaotfload-fonts-def.lua97
-rw-r--r--luaotfload-fonts-enc.lua (renamed from otfl-fonts-enc.lua)0
-rw-r--r--luaotfload-fonts-ext.lua (renamed from otfl-fonts-ext.lua)16
-rw-r--r--luaotfload-fonts-lua.lua (renamed from otfl-fonts-lua.lua)0
-rw-r--r--luaotfload-fonts-tfm.lua (renamed from otfl-fonts-tfm.lua)0
-rw-r--r--luaotfload-lib-dir.lua449
-rw-r--r--luaotfload-loaders.lua24
-rw-r--r--luaotfload-merged.lua11041
-rw-r--r--luaotfload-override.lua81
-rw-r--r--luaotfload.dtx1663
-rwxr-xr-xmkglyphlist143
-rwxr-xr-xmkluatexfontdb.lua101
-rw-r--r--otfl-data-con.lua135
-rw-r--r--otfl-font-cid.lua165
-rw-r--r--otfl-font-con.lua1332
-rw-r--r--otfl-font-def.lua440
-rw-r--r--otfl-font-ini.lua38
-rw-r--r--otfl-font-ltx.lua195
-rw-r--r--otfl-font-map.lua307
-rw-r--r--otfl-font-nms.lua770
-rw-r--r--otfl-font-ota.lua373
-rw-r--r--otfl-font-otb.lua636
-rw-r--r--otfl-font-otc.lua334
-rw-r--r--otfl-font-otf.lua2080
-rw-r--r--otfl-font-oti.lua92
-rw-r--r--otfl-font-otn.lua2712
-rw-r--r--otfl-font-pfb.lua8
-rw-r--r--otfl-luat-ovr.lua36
-rw-r--r--otfl-node-inj.lua498
-rw-r--r--tests/alternate_sub.tex1
-rw-r--r--tests/color.tex6
-rw-r--r--tests/fontspec_lookup.ltx41
-rw-r--r--tests/fullname.tex6
-rw-r--r--tests/lookups.tex14
-rw-r--r--tests/marks.tex10
-rw-r--r--tests/math.tex1
-rw-r--r--tests/microtypography.tex31
-rw-r--r--tests/opbd.tex30
-rw-r--r--tests/zero_width_marks_lig.tex16
52 files changed, 15551 insertions, 14555 deletions
diff --git a/.gitignore b/.gitignore
index 33bb4d9..5c508a2 100644
--- a/.gitignore
+++ b/.gitignore
@@ -20,3 +20,14 @@ luaotfload.zip
# Temporary files in the tests directory
tests/*.log
tests/*.pdf
+tests/*.lua
+tests/*.otf
+tests/*.aux
+tests/phg-*.tex
+tests/*.pfb
+tests/*.afm
+tests/*.tfm
+tests/*.dvi
+tests/*.ofm
+tests/*.ovp
+tests/*.ovf
diff --git a/Makefile b/Makefile
index 69c5ede..4821619 100644
--- a/Makefile
+++ b/Makefile
@@ -1,26 +1,34 @@
# Makefile for luaotfload
-NAME = luaotfload
-DOC = $(NAME).pdf
-DTX = $(NAME).dtx
-OTFL = $(wildcard otfl-*.lua) otfl-blacklist.cnf font-age.lua
-SCRIPT = mkluatexfontdb.lua
+NAME = luaotfload
+DOC = $(NAME).pdf
+DTX = $(NAME).dtx
+OTFL = $(wildcard otfl-*.lua) otfl-blacklist.cnf font-age.lua
+SCRIPT = fontdbutil
+
+GLYPHSCRIPT = mkglyphlist
+GLYPHSOURCE = glyphlist.txt
+
+GRAPH = filegraph
+DOTPDF = $(GRAPH).pdf
+DOT = $(GRAPH).dot
# Files grouped by generation mode
-COMPILED = $(DOC)
-UNPACKED = luaotfload.sty luaotfload.lua
-GENERATED = $(COMPILED) $(UNPACKED)
-SOURCE = $(DTX) $(OTFL) README Makefile NEWS $(SCRIPT)
+GLYPHS = font-age.lua
+GRAPHED = $(DOTPDF)
+COMPILED = $(DOC)
+UNPACKED = luaotfload.sty luaotfload.lua
+GENERATED = $(GRAPHED) $(COMPILED) $(UNPACKED) $(GLYPHS)
+SOURCE = $(DTX) $(OTFL) README Makefile NEWS $(SCRIPT) $(GLYPHSCRIPT)
# test files
-TESTDIR = tests
-TESTFILES = $(wildcard $(TESTDIR)/*.tex)
-TESTFILES_SYS = $(TESTDIR)/systemfonts.tex $(TESTDIR)/fontconfig_conf_reading.tex
-TESTFILES_TL = $(filter-out $(TESTFILES_SYS), $(TESTFILES))
-TESTENV = env TEXINPUTS='.;..;$$TEXMF/tex/{luatex,plain,generic,}//' TEXMFVAR='../var'
+TESTDIR = tests
+TESTFILES = $(wildcard $(TESTDIR)/*.tex $(TESTDIR)/*.ltx)
+TESTFILES_SYS = $(TESTDIR)/systemfonts.tex $(TESTDIR)/fontconfig_conf_reading.tex
+TESTFILES_TL = $(filter-out $(TESTFILES_SYS), $(TESTFILES))
# Files grouped by installation location
-SCRIPTFILES = $(SCRIPT)
+SCRIPTFILES = $(SCRIPT) $(GLYPHSCRIPT)
RUNFILES = $(UNPACKED) $(OTFL)
DOCFILES = $(DOC) README NEWS
SRCFILES = $(DTX) Makefile
@@ -38,19 +46,29 @@ SRCDIR = $(TEXMFROOT)/source/$(FORMAT)/$(NAME)
TEXMFROOT = $(shell kpsewhich --var-value TEXMFHOME)
CTAN_ZIP = $(NAME).zip
-TDS_ZIP = $(NAME).tds.zip
-ZIPS = $(CTAN_ZIP) $(TDS_ZIP)
+TDS_ZIP = $(NAME).tds.zip
+ZIPS = $(CTAN_ZIP) $(TDS_ZIP)
-DO_TEX = tex --interaction=batchmode $< >/dev/null
-DO_LATEX = latexmk -pdf -pdflatex=lualatex -silent $< >/dev/null
+DO_TEX = tex --interaction=batchmode $< >/dev/null
+DO_LATEX = latexmk -pdf -pdflatex=lualatex -silent $< >/dev/null
+DO_GRAPHVIZ = dot -Tpdf -o $@ $< > /dev/null
+DO_GLYPHLIST = texlua ./mkglyphlist > /dev/null
all: $(GENERATED)
-doc: $(COMPILED)
+graph: $(GRAPHED)
+doc: $(GRAPHED) $(COMPILED)
unpack: $(UNPACKED)
+glyphs: $(GLYPHS)
ctan: check $(CTAN_ZIP)
tds: $(TDS_ZIP)
world: all ctan
+$(GLYPHS): /dev/null
+ $(DO_GLYPHLIST)
+
+$(GRAPHED): $(DOT)
+ $(DO_GRAPHVIZ)
+
$(COMPILED): $(DTX)
$(DO_LATEX)
@@ -85,9 +103,9 @@ install: $(ALL_FILES)
check: $(RUNFILES) $(TESTFILES_TL)
@rm -rf var
- @cd $(TESTDIR); for f in $(TESTFILES_TL); do \
+ @for f in $(TESTFILES_TL); do \
echo "check: luatex $$f"; \
- $(TESTENV) luatex --interaction=batchmode ../$$f \
+ luatex --interaction=batchmode $$f \
> /dev/null || exit $$?; \
done
@@ -109,5 +127,5 @@ clean:
@$(RM) -- *.log *.aux *.toc *.idx *.ind *.ilg *.out $(TESTDIR)/*.log
mrproper: clean
- @$(RM) -- $(GENERATED) $(ZIPS) $(TESTDIR)/*.pdf
+ @$(RM) -- $(GENERATED) $(ZIPS) $(GLYPHSOURCE) $(TESTDIR)/*.pdf
diff --git a/NEWS b/NEWS
index 42e6e14..754cbcd 100644
--- a/NEWS
+++ b/NEWS
@@ -1,6 +1,23 @@
Change History
--------------
+2013/04/xx, luaotfload v2.2:
+ * Synchronisation with ConTeXt from TeXLive 2013, inducing
+ backward-incompatible changes in the font structure (fontspec and
+ unicode-math must be updated)
+ * Synchronisation with ConTeXt is now easier and can be done by just
+ updating otfl-fonts-merged.lua (available in ConTeXt)
+ * Improve documentation
+ * renaming mkluatexfontdb into fontdbutil, with more search functionalities
+
+2013/04/11, luaotfload v1.28:
+ * Adapting to LuaTeX 0.75, keeping backward-compatibility
+ * Fix small documentation issues in mkluatexfontdb
+ * Fix possibility of infite loop with fontconfig config files references
+ * Adding semibold synonym for bold
+ * file:xxx syntax now uses the same search function as name: which
+ make more fonts recognized
+
2011/04/21, luaotfload v1.25:
* Fix bug loading *.dfont fonts
* Misc. documentation fixes
diff --git a/filegraph.dot b/filegraph.dot
new file mode 100644
index 0000000..f1283f0
--- /dev/null
+++ b/filegraph.dot
@@ -0,0 +1,178 @@
+strict digraph luaotfload_files { //looks weird with circo ...
+ compound = true;
+
+// label = "Schematic of the files included in Luaotfload.";
+// labelloc = "b";
+
+ fontsize = "14.4";
+ labelfontname = "Iwona Medium Regular";
+ fontname = "Iwona Light Regular";
+ size = "21cm";
+
+ rankdir = LR;
+ ranksep = 0.618;
+ nodesep = 1.618;
+
+ edge [
+ arrowhead = onormal,
+ fontname = "Iwona Cond Regular",
+ penwidth = 1.0,
+ ];
+ node [
+ //penwidth = 0.7,
+ fontname = "Liberation Mono",
+ fontsize = 12,
+ ];
+
+/* ····································································
+ * file structure
+ * ································································· */
+ luaotfload -> otfl_fonts_merged [label="merged"]
+ luaotfload -> merged_lua_libs [label="unmerged", style=solid]
+ luaotfload -> merged_luatex_fonts [label="unmerged", style=solid]
+ luaotfload -> merged_context_libs [label="unmerged", style=solid]
+
+ luaotfload -> luaotfload_libs
+ luaotfload -> otfl_blacklist_cnf
+
+
+ otfl_fonts_merged -> merged_lua_libs [label="merged",
+ style=dotted,
+ lhead=cluster_merged]
+ otfl_fonts_merged -> merged_luatex_fonts [label="merged",
+ style=dotted,
+ lhead=cluster_merged]
+ otfl_fonts_merged -> merged_context_libs [label="merged",
+ style=dotted,
+ lhead=cluster_merged]
+
+
+
+/* ····································································
+ * main files
+ * ································································· */
+
+ luaotfload [label = "luaotfload.lua",
+ shape = rect,
+ width = "3.2cm",
+ height = "1.2cm",
+ color = "#01012222",
+ style = "filled,rounded",
+ penwidth=2]
+ /*
+ *otfl_fonts [label = "luaotfload-fonts.lua",
+ * shape = rect,
+ * width = "3.2cm",
+ * height = "1.2cm",
+ * color = "#01012222",
+ * style = "filled,rounded",
+ * penwidth=2]
+ */
+ otfl_fonts_merged [label = "luaotfload-merged.lua",
+ shape = rect,
+ width = "3.2cm",
+ height = "1.2cm",
+ color = "#01012222",
+ style = "filled,rounded",
+ penwidth=2]
+
+/* ····································································
+ * luaotfload files
+ * ································································· */
+
+
+ otfl_blacklist_cnf [style = "filled,dashed",
+ shape = rect,
+ width = "3.2cm",
+ fillcolor = "#01012222",
+ color = grey40,
+ style = "filled,dotted,rounded",
+ label = "luaotfload-blacklist.cnf"]
+
+ luaotfload_libs [
+ shape = box,
+ style = "filled,rounded",
+ color = "grey90:goldenrod4",
+ fontsize = 10,
+ label = <
+ <table cellborder="0" bgcolor="#FFFFFFAA">
+ <th> <td colspan="2"> <font point-size="12" face="Iwona Italic">Luaotfload Libraries</font> </td> </th>
+ <tr> <td>luaotfload-lib-dir.lua</td> <td>luaotfload-features.lua</td> </tr>
+ <tr> <td>luaotfload-override.lua</td> <td>luaotfload-loaders.lua</td> </tr>
+ <tr> <td>luaotfload-database.lua</td> <td>luaotfload-color.lua</td> </tr>
+ </table>
+ >,
+ ]
+
+/* ····································································
+ * merged files
+ * ································································· */
+
+ subgraph cluster_merged {
+ node [style=filled, color=white];
+ style = "filled,rounded";
+ color = "grey90:dodgerblue4";
+ //nodesep = "3.0";
+ rank = same;
+ label = "Merged Libraries";
+ gradientangle=0;
+ merged_lua_libs;
+ merged_luatex_fonts;
+ merged_context_libs;
+ }
+
+ otfl_fonts_merged -> merged_lua_libs
+ otfl_fonts_merged -> merged_luatex_fonts
+ otfl_fonts_merged -> merged_context_libs
+
+ merged_lua_libs [
+ shape = box,
+ style = "filled,rounded",
+ color = "#FFFFFFAA",
+ fontsize = 10,
+ label = <
+ <table border="0">
+ <th> <td colspan="3"> <font point-size="12" face="Iwona Italic">Lua Libraries from Context</font> </td> </th>
+ <tr> <td>l-lua.lua</td> <td>l-lpeg.lua</td> <td>l-function.lua</td> </tr>
+ <tr> <td>l-string.lua</td> <td>l-table.lua</td> <td>l-io.lua</td> </tr>
+ <tr> <td>l-file.lua</td> <td>l-boolean.lua</td> <td>l-math.lua</td> </tr>
+ <tr> <td>util-str.lua</td> </tr>
+ </table>
+ >,
+ ]
+
+ merged_luatex_fonts [
+ shape = box,
+ style = "filled,rounded",
+ color = "#FFFFFFAA",
+ fontsize = 10,
+ label = <
+ <table border="0">
+ <th> <td colspan="2"> <font point-size="12" face="Iwona Italic">Font Loader (LuaTeX-Fonts)</font> </td> </th>
+ <tr> <td>luatex-basics-gen.lua</td> <td>luatex-basics-nod.lua</td> </tr>
+ <tr> <td>luatex-fonts-enc.lua</td> <td>luatex-fonts-syn.lua</td> </tr>
+ <tr> <td>luatex-fonts-tfm.lua</td> <td>luatex-fonts-chr.lua</td> </tr>
+ <tr> <td>luatex-fonts-lua.lua</td> <td>luatex-fonts-def.lua</td> </tr>
+ <tr> <td>luatex-fonts-ext.lua</td> <td>luatex-fonts-cbk.lua</td> </tr>
+ </table>
+ >,
+ ]
+
+ merged_context_libs [
+ shape = box,
+ style = "filled,rounded",
+ color = "#FFFFFFAA",
+ fontsize = 10,
+ label = <
+ <table border="0">
+ <th> <td colspan="3"> <font point-size="12" face="Iwona Italic"> Font and Node Libraries from Context </font> </td> </th>
+ <tr> <td>data-con.lua</td> <td>font-ini.lua</td> <td>font-con.lua</td> </tr>
+ <tr> <td>font-cid.lua</td> <td>font-map.lua</td> <td>font-oti.lua</td> </tr>
+ <tr> <td>font-otf.lua</td> <td>font-otb.lua</td> <td>node-inj.lua</td> </tr>
+ <tr> <td>font-ota.lua</td> <td>font-otn.lua</td> <td>font-def.lua</td> </tr>
+ </table>
+ >,
+ ]
+}
+
+// vim:ft=dot:sw=4:ts=4:expandtab
diff --git a/font-age.lua b/font-age.lua
deleted file mode 100644
index 741bb47..0000000
--- a/font-age.lua
+++ /dev/null
@@ -1,3744 +0,0 @@
-if not modules then modules = { } end modules ['font-age'] = {
- version = 1.001,
- comment = "companion to font-gee.lua",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "derived from http://www.adobe.com/devnet/opentype/archives/glyphlist.txt",
- original = "Adobe Glyph List, version 2.0, September 20, 2002",
-}
-
-if context then
- texio.write_nl("fatal error: this module is not for context")
- os.exit()
-end
-
-return { -- generated
- ["A"]=65,
- ["AE"]=198,
- ["AEacute"]=508,
- ["AEmacron"]=482,
- ["Aacute"]=193,
- ["Abreve"]=258,
- ["Abreveacute"]=7854,
- ["Abrevecyrillic"]=1232,
- ["Abrevedotbelow"]=7862,
- ["Abrevegrave"]=7856,
- ["Abrevehookabove"]=7858,
- ["Abrevetilde"]=7860,
- ["Acaron"]=461,
- ["Acircle"]=9398,
- ["Acircumflex"]=194,
- ["Acircumflexacute"]=7844,
- ["Acircumflexdotbelow"]=7852,
- ["Acircumflexgrave"]=7846,
- ["Acircumflexhookabove"]=7848,
- ["Acircumflextilde"]=7850,
- ["Adblgrave"]=512,
- ["Adieresis"]=196,
- ["Adieresiscyrillic"]=1234,
- ["Adieresismacron"]=478,
- ["Adotbelow"]=7840,
- ["Adotmacron"]=480,
- ["Agrave"]=192,
- ["Ahookabove"]=7842,
- ["Aiecyrillic"]=1236,
- ["Ainvertedbreve"]=514,
- ["Alpha"]=913,
- ["Alphatonos"]=902,
- ["Amacron"]=256,
- ["Amonospace"]=65313,
- ["Aogonek"]=260,
- ["Aring"]=197,
- ["Aringacute"]=506,
- ["Aringbelow"]=7680,
- ["Atilde"]=195,
- ["Aybarmenian"]=1329,
- ["B"]=66,
- ["Bcircle"]=9399,
- ["Bdotaccent"]=7682,
- ["Bdotbelow"]=7684,
- ["Benarmenian"]=1330,
- ["Beta"]=914,
- ["Bhook"]=385,
- ["Blinebelow"]=7686,
- ["Bmonospace"]=65314,
- ["Btopbar"]=386,
- ["C"]=67,
- ["Caarmenian"]=1342,
- ["Cacute"]=262,
- ["Ccaron"]=268,
- ["Ccedilla"]=199,
- ["Ccedillaacute"]=7688,
- ["Ccircle"]=9400,
- ["Ccircumflex"]=264,
- ["Cdotaccent"]=266,
- ["Chaarmenian"]=1353,
- ["Cheabkhasiancyrillic"]=1212,
- ["Chedescenderabkhasiancyrillic"]=1214,
- ["Chedescendercyrillic"]=1206,
- ["Chedieresiscyrillic"]=1268,
- ["Cheharmenian"]=1347,
- ["Chekhakassiancyrillic"]=1227,
- ["Cheverticalstrokecyrillic"]=1208,
- ["Chi"]=935,
- ["Chook"]=391,
- ["Cmonospace"]=65315,
- ["Coarmenian"]=1361,
- ["D"]=68,
- ["DZ"]=497,
- ["DZcaron"]=452,
- ["Daarmenian"]=1332,
- ["Dafrican"]=393,
- ["Dcaron"]=270,
- ["Dcedilla"]=7696,
- ["Dcircle"]=9401,
- ["Dcircumflexbelow"]=7698,
- ["Ddotaccent"]=7690,
- ["Ddotbelow"]=7692,
- ["Deicoptic"]=1006,
- ["Deltagreek"]=916,
- ["Dhook"]=394,
- ["Digammagreek"]=988,
- ["Dlinebelow"]=7694,
- ["Dmonospace"]=65316,
- ["Dslash"]=272,
- ["Dtopbar"]=395,
- ["Dz"]=498,
- ["Dzcaron"]=453,
- ["Dzeabkhasiancyrillic"]=1248,
- ["E"]=69,
- ["Eacute"]=201,
- ["Ebreve"]=276,
- ["Ecaron"]=282,
- ["Ecedillabreve"]=7708,
- ["Echarmenian"]=1333,
- ["Ecircle"]=9402,
- ["Ecircumflex"]=202,
- ["Ecircumflexacute"]=7870,
- ["Ecircumflexbelow"]=7704,
- ["Ecircumflexdotbelow"]=7878,
- ["Ecircumflexgrave"]=7872,
- ["Ecircumflexhookabove"]=7874,
- ["Ecircumflextilde"]=7876,
- ["Edblgrave"]=516,
- ["Edieresis"]=203,
- ["Edotaccent"]=278,
- ["Edotbelow"]=7864,
- ["Egrave"]=200,
- ["Eharmenian"]=1335,
- ["Ehookabove"]=7866,
- ["Eightroman"]=8551,
- ["Einvertedbreve"]=518,
- ["Eiotifiedcyrillic"]=1124,
- ["Elevenroman"]=8554,
- ["Emacron"]=274,
- ["Emacronacute"]=7702,
- ["Emacrongrave"]=7700,
- ["Emonospace"]=65317,
- ["Endescendercyrillic"]=1186,
- ["Eng"]=330,
- ["Enghecyrillic"]=1188,
- ["Enhookcyrillic"]=1223,
- ["Eogonek"]=280,
- ["Eopen"]=400,
- ["Epsilon"]=917,
- ["Epsilontonos"]=904,
- ["Ereversed"]=398,
- ["Esdescendercyrillic"]=1194,
- ["Esh"]=425,
- ["Eta"]=919,
- ["Etarmenian"]=1336,
- ["Etatonos"]=905,
- ["Eth"]=208,
- ["Etilde"]=7868,
- ["Etildebelow"]=7706,
- ["Ezh"]=439,
- ["Ezhcaron"]=494,
- ["Ezhreversed"]=440,
- ["F"]=70,
- ["Fcircle"]=9403,
- ["Fdotaccent"]=7710,
- ["Feharmenian"]=1366,
- ["Feicoptic"]=996,
- ["Fhook"]=401,
- ["Fiveroman"]=8548,
- ["Fmonospace"]=65318,
- ["Fourroman"]=8547,
- ["G"]=71,
- ["GBsquare"]=13191,
- ["Gacute"]=500,
- ["Gamma"]=915,
- ["Gammaafrican"]=404,
- ["Gangiacoptic"]=1002,
- ["Gbreve"]=286,
- ["Gcaron"]=486,
- ["Gcircle"]=9404,
- ["Gcircumflex"]=284,
- ["Gcommaaccent"]=290,
- ["Gdotaccent"]=288,
- ["Ghadarmenian"]=1346,
- ["Ghemiddlehookcyrillic"]=1172,
- ["Ghestrokecyrillic"]=1170,
- ["Ghook"]=403,
- ["Gimarmenian"]=1331,
- ["Gmacron"]=7712,
- ["Gmonospace"]=65319,
- ["Gsmallhook"]=667,
- ["Gstroke"]=484,
- ["H"]=72,
- ["HPsquare"]=13259,
- ["Haabkhasiancyrillic"]=1192,
- ["Hadescendercyrillic"]=1202,
- ["Hbar"]=294,
- ["Hbrevebelow"]=7722,
- ["Hcedilla"]=7720,
- ["Hcircle"]=9405,
- ["Hcircumflex"]=292,
- ["Hdieresis"]=7718,
- ["Hdotaccent"]=7714,
- ["Hdotbelow"]=7716,
- ["Hmonospace"]=65320,
- ["Hoarmenian"]=1344,
- ["Horicoptic"]=1000,
- ["Hzsquare"]=13200,
- ["I"]=73,
- ["IJ"]=306,
- ["Iacute"]=205,
- ["Ibreve"]=300,
- ["Icaron"]=463,
- ["Icircle"]=9406,
- ["Icircumflex"]=206,
- ["Idblgrave"]=520,
- ["Idieresis"]=207,
- ["Idieresisacute"]=7726,
- ["Idieresiscyrillic"]=1252,
- ["Idotaccent"]=304,
- ["Idotbelow"]=7882,
- ["Iebrevecyrillic"]=1238,
- ["Ifraktur"]=8465,
- ["Igrave"]=204,
- ["Ihookabove"]=7880,
- ["Iinvertedbreve"]=522,
- ["Imacron"]=298,
- ["Imacroncyrillic"]=1250,
- ["Imonospace"]=65321,
- ["Iniarmenian"]=1339,
- ["Iogonek"]=302,
- ["Iota"]=921,
- ["Iotaafrican"]=406,
- ["Iotadieresis"]=938,
- ["Iotatonos"]=906,
- ["Istroke"]=407,
- ["Itilde"]=296,
- ["Itildebelow"]=7724,
- ["Izhitsadblgravecyrillic"]=1142,
- ["J"]=74,
- ["Jaarmenian"]=1345,
- ["Jcircle"]=9407,
- ["Jcircumflex"]=308,
- ["Jheharmenian"]=1355,
- ["Jmonospace"]=65322,
- ["K"]=75,
- ["KBsquare"]=13189,
- ["KKsquare"]=13261,
- ["Kabashkircyrillic"]=1184,
- ["Kacute"]=7728,
- ["Kadescendercyrillic"]=1178,
- ["Kahookcyrillic"]=1219,
- ["Kappa"]=922,
- ["Kastrokecyrillic"]=1182,
- ["Kaverticalstrokecyrillic"]=1180,
- ["Kcaron"]=488,
- ["Kcircle"]=9408,
- ["Kcommaaccent"]=310,
- ["Kdotbelow"]=7730,
- ["Keharmenian"]=1364,
- ["Kenarmenian"]=1343,
- ["Kheicoptic"]=998,
- ["Khook"]=408,
- ["Klinebelow"]=7732,
- ["Kmonospace"]=65323,
- ["Koppacyrillic"]=1152,
- ["Koppagreek"]=990,
- ["Ksicyrillic"]=1134,
- ["L"]=76,
- ["LJ"]=455,
- ["Lacute"]=313,
- ["Lambda"]=923,
- ["Lcaron"]=317,
- ["Lcircle"]=9409,
- ["Lcircumflexbelow"]=7740,
- ["Lcommaaccent"]=315,
- ["Ldotaccent"]=319,
- ["Ldotbelow"]=7734,
- ["Ldotbelowmacron"]=7736,
- ["Liwnarmenian"]=1340,
- ["Lj"]=456,
- ["Llinebelow"]=7738,
- ["Lmonospace"]=65324,
- ["Lslash"]=321,
- ["M"]=77,
- ["MBsquare"]=13190,
- ["Macute"]=7742,
- ["Mcircle"]=9410,
- ["Mdotaccent"]=7744,
- ["Mdotbelow"]=7746,
- ["Menarmenian"]=1348,
- ["Mmonospace"]=65325,
- ["Mturned"]=412,
- ["Mu"]=924,
- ["N"]=78,
- ["NJ"]=458,
- ["Nacute"]=323,
- ["Ncaron"]=327,
- ["Ncircle"]=9411,
- ["Ncircumflexbelow"]=7754,
- ["Ncommaaccent"]=325,
- ["Ndotaccent"]=7748,
- ["Ndotbelow"]=7750,
- ["Nhookleft"]=413,
- ["Nineroman"]=8552,
- ["Nj"]=459,
- ["Nlinebelow"]=7752,
- ["Nmonospace"]=65326,
- ["Nowarmenian"]=1350,
- ["Ntilde"]=209,
- ["Nu"]=925,
- ["O"]=79,
- ["OE"]=338,
- ["Oacute"]=211,
- ["Obarredcyrillic"]=1256,
- ["Obarreddieresiscyrillic"]=1258,
- ["Obreve"]=334,
- ["Ocaron"]=465,
- ["Ocenteredtilde"]=415,
- ["Ocircle"]=9412,
- ["Ocircumflex"]=212,
- ["Ocircumflexacute"]=7888,
- ["Ocircumflexdotbelow"]=7896,
- ["Ocircumflexgrave"]=7890,
- ["Ocircumflexhookabove"]=7892,
- ["Ocircumflextilde"]=7894,
- ["Odblgrave"]=524,
- ["Odieresis"]=214,
- ["Odieresiscyrillic"]=1254,
- ["Odotbelow"]=7884,
- ["Ograve"]=210,
- ["Oharmenian"]=1365,
- ["Ohookabove"]=7886,
- ["Ohorn"]=416,
- ["Ohornacute"]=7898,
- ["Ohorndotbelow"]=7906,
- ["Ohorngrave"]=7900,
- ["Ohornhookabove"]=7902,
- ["Ohorntilde"]=7904,
- ["Ohungarumlaut"]=336,
- ["Oi"]=418,
- ["Oinvertedbreve"]=526,
- ["Omacron"]=332,
- ["Omacronacute"]=7762,
- ["Omacrongrave"]=7760,
- ["Omega"]=8486,
- ["Omegacyrillic"]=1120,
- ["Omegagreek"]=937,
- ["Omegaroundcyrillic"]=1146,
- ["Omegatitlocyrillic"]=1148,
- ["Omegatonos"]=911,
- ["Omicron"]=927,
- ["Omicrontonos"]=908,
- ["Omonospace"]=65327,
- ["Oneroman"]=8544,
- ["Oogonek"]=490,
- ["Oogonekmacron"]=492,
- ["Oopen"]=390,
- ["Oslash"]=216,
- ["Ostrokeacute"]=510,
- ["Otcyrillic"]=1150,
- ["Otilde"]=213,
- ["Otildeacute"]=7756,
- ["Otildedieresis"]=7758,
- ["P"]=80,
- ["Pacute"]=7764,
- ["Pcircle"]=9413,
- ["Pdotaccent"]=7766,
- ["Peharmenian"]=1354,
- ["Pemiddlehookcyrillic"]=1190,
- ["Phi"]=934,
- ["Phook"]=420,
- ["Pi"]=928,
- ["Piwrarmenian"]=1363,
- ["Pmonospace"]=65328,
- ["Psi"]=936,
- ["Psicyrillic"]=1136,
- ["Q"]=81,
- ["Qcircle"]=9414,
- ["Qmonospace"]=65329,
- ["R"]=82,
- ["Raarmenian"]=1356,
- ["Racute"]=340,
- ["Rcaron"]=344,
- ["Rcircle"]=9415,
- ["Rcommaaccent"]=342,
- ["Rdblgrave"]=528,
- ["Rdotaccent"]=7768,
- ["Rdotbelow"]=7770,
- ["Rdotbelowmacron"]=7772,
- ["Reharmenian"]=1360,
- ["Rfraktur"]=8476,
- ["Rho"]=929,
- ["Rinvertedbreve"]=530,
- ["Rlinebelow"]=7774,
- ["Rmonospace"]=65330,
- ["Rsmallinverted"]=641,
- ["Rsmallinvertedsuperior"]=694,
- ["S"]=83,
- ["SF010000"]=9484,
- ["SF020000"]=9492,
- ["SF030000"]=9488,
- ["SF040000"]=9496,
- ["SF050000"]=9532,
- ["SF060000"]=9516,
- ["SF070000"]=9524,
- ["SF080000"]=9500,
- ["SF090000"]=9508,
- ["SF100000"]=9472,
- ["SF110000"]=9474,
- ["SF190000"]=9569,
- ["SF200000"]=9570,
- ["SF210000"]=9558,
- ["SF220000"]=9557,
- ["SF230000"]=9571,
- ["SF240000"]=9553,
- ["SF250000"]=9559,
- ["SF260000"]=9565,
- ["SF270000"]=9564,
- ["SF280000"]=9563,
- ["SF360000"]=9566,
- ["SF370000"]=9567,
- ["SF380000"]=9562,
- ["SF390000"]=9556,
- ["SF400000"]=9577,
- ["SF410000"]=9574,
- ["SF420000"]=9568,
- ["SF430000"]=9552,
- ["SF440000"]=9580,
- ["SF450000"]=9575,
- ["SF460000"]=9576,
- ["SF470000"]=9572,
- ["SF480000"]=9573,
- ["SF490000"]=9561,
- ["SF500000"]=9560,
- ["SF510000"]=9554,
- ["SF520000"]=9555,
- ["SF530000"]=9579,
- ["SF540000"]=9578,
- ["Sacute"]=346,
- ["Sacutedotaccent"]=7780,
- ["Sampigreek"]=992,
- ["Scaron"]=352,
- ["Scarondotaccent"]=7782,
- ["Scedilla"]=350,
- ["Schwa"]=399,
- ["Schwacyrillic"]=1240,
- ["Schwadieresiscyrillic"]=1242,
- ["Scircle"]=9416,
- ["Scircumflex"]=348,
- ["Scommaaccent"]=536,
- ["Sdotaccent"]=7776,
- ["Sdotbelow"]=7778,
- ["Sdotbelowdotaccent"]=7784,
- ["Seharmenian"]=1357,
- ["Sevenroman"]=8550,
- ["Shaarmenian"]=1351,
- ["Sheicoptic"]=994,
- ["Shhacyrillic"]=1210,
- ["Shimacoptic"]=1004,
- ["Sigma"]=931,
- ["Sixroman"]=8549,
- ["Smonospace"]=65331,
- ["Stigmagreek"]=986,
- ["T"]=84,
- ["Tau"]=932,
- ["Tbar"]=358,
- ["Tcaron"]=356,
- ["Tcircle"]=9417,
- ["Tcircumflexbelow"]=7792,
- ["Tcommaaccent"]=354,
- ["Tdotaccent"]=7786,
- ["Tdotbelow"]=7788,
- ["Tedescendercyrillic"]=1196,
- ["Tenroman"]=8553,
- ["Tetsecyrillic"]=1204,
- ["Theta"]=920,
- ["Thook"]=428,
- ["Thorn"]=222,
- ["Threeroman"]=8546,
- ["Tiwnarmenian"]=1359,
- ["Tlinebelow"]=7790,
- ["Tmonospace"]=65332,
- ["Toarmenian"]=1337,
- ["Tonefive"]=444,
- ["Tonesix"]=388,
- ["Tonetwo"]=423,
- ["Tretroflexhook"]=430,
- ["Twelveroman"]=8555,
- ["Tworoman"]=8545,
- ["U"]=85,
- ["Uacute"]=218,
- ["Ubreve"]=364,
- ["Ucaron"]=467,
- ["Ucircle"]=9418,
- ["Ucircumflex"]=219,
- ["Ucircumflexbelow"]=7798,
- ["Udblgrave"]=532,
- ["Udieresis"]=220,
- ["Udieresisacute"]=471,
- ["Udieresisbelow"]=7794,
- ["Udieresiscaron"]=473,
- ["Udieresiscyrillic"]=1264,
- ["Udieresisgrave"]=475,
- ["Udieresismacron"]=469,
- ["Udotbelow"]=7908,
- ["Ugrave"]=217,
- ["Uhookabove"]=7910,
- ["Uhorn"]=431,
- ["Uhornacute"]=7912,
- ["Uhorndotbelow"]=7920,
- ["Uhorngrave"]=7914,
- ["Uhornhookabove"]=7916,
- ["Uhorntilde"]=7918,
- ["Uhungarumlaut"]=368,
- ["Uhungarumlautcyrillic"]=1266,
- ["Uinvertedbreve"]=534,
- ["Ukcyrillic"]=1144,
- ["Umacron"]=362,
- ["Umacroncyrillic"]=1262,
- ["Umacrondieresis"]=7802,
- ["Umonospace"]=65333,
- ["Uogonek"]=370,
- ["Upsilon"]=933,
- ["Upsilonacutehooksymbolgreek"]=979,
- ["Upsilonafrican"]=433,
- ["Upsilondieresis"]=939,
- ["Upsilondieresishooksymbolgreek"]=980,
- ["Upsilonhooksymbol"]=978,
- ["Upsilontonos"]=910,
- ["Uring"]=366,
- ["Ustraightcyrillic"]=1198,
- ["Ustraightstrokecyrillic"]=1200,
- ["Utilde"]=360,
- ["Utildeacute"]=7800,
- ["Utildebelow"]=7796,
- ["V"]=86,
- ["Vcircle"]=9419,
- ["Vdotbelow"]=7806,
- ["Vewarmenian"]=1358,
- ["Vhook"]=434,
- ["Vmonospace"]=65334,
- ["Voarmenian"]=1352,
- ["Vtilde"]=7804,
- ["W"]=87,
- ["Wacute"]=7810,
- ["Wcircle"]=9420,
- ["Wcircumflex"]=372,
- ["Wdieresis"]=7812,
- ["Wdotaccent"]=7814,
- ["Wdotbelow"]=7816,
- ["Wgrave"]=7808,
- ["Wmonospace"]=65335,
- ["X"]=88,
- ["Xcircle"]=9421,
- ["Xdieresis"]=7820,
- ["Xdotaccent"]=7818,
- ["Xeharmenian"]=1341,
- ["Xi"]=926,
- ["Xmonospace"]=65336,
- ["Y"]=89,
- ["Yacute"]=221,
- ["Ycircle"]=9422,
- ["Ycircumflex"]=374,
- ["Ydieresis"]=376,
- ["Ydotaccent"]=7822,
- ["Ydotbelow"]=7924,
- ["Yerudieresiscyrillic"]=1272,
- ["Ygrave"]=7922,
- ["Yhook"]=435,
- ["Yhookabove"]=7926,
- ["Yiarmenian"]=1349,
- ["Yiwnarmenian"]=1362,
- ["Ymonospace"]=65337,
- ["Ytilde"]=7928,
- ["Yusbigcyrillic"]=1130,
- ["Yusbigiotifiedcyrillic"]=1132,
- ["Yuslittlecyrillic"]=1126,
- ["Yuslittleiotifiedcyrillic"]=1128,
- ["Z"]=90,
- ["Zaarmenian"]=1334,
- ["Zacute"]=377,
- ["Zcaron"]=381,
- ["Zcircle"]=9423,
- ["Zcircumflex"]=7824,
- ["Zdotaccent"]=379,
- ["Zdotbelow"]=7826,
- ["Zedescendercyrillic"]=1176,
- ["Zedieresiscyrillic"]=1246,
- ["Zeta"]=918,
- ["Zhearmenian"]=1338,
- ["Zhebrevecyrillic"]=1217,
- ["Zhedescendercyrillic"]=1174,
- ["Zhedieresiscyrillic"]=1244,
- ["Zlinebelow"]=7828,
- ["Zmonospace"]=65338,
- ["Zstroke"]=437,
- ["a"]=97,
- ["aabengali"]=2438,
- ["aacute"]=225,
- ["aadeva"]=2310,
- ["aagujarati"]=2694,
- ["aagurmukhi"]=2566,
- ["aamatragurmukhi"]=2622,
- ["aarusquare"]=13059,
- ["aavowelsignbengali"]=2494,
- ["aavowelsigndeva"]=2366,
- ["aavowelsigngujarati"]=2750,
- ["abbreviationmarkarmenian"]=1375,
- ["abbreviationsigndeva"]=2416,
- ["abengali"]=2437,
- ["abopomofo"]=12570,
- ["abreve"]=259,
- ["abreveacute"]=7855,
- ["abrevecyrillic"]=1233,
- ["abrevedotbelow"]=7863,
- ["abrevegrave"]=7857,
- ["abrevehookabove"]=7859,
- ["abrevetilde"]=7861,
- ["acaron"]=462,
- ["acircle"]=9424,
- ["acircumflex"]=226,
- ["acircumflexacute"]=7845,
- ["acircumflexdotbelow"]=7853,
- ["acircumflexgrave"]=7847,
- ["acircumflexhookabove"]=7849,
- ["acircumflextilde"]=7851,
- ["acute"]=180,
- ["acutebelowcmb"]=791,
- ["acutecomb"]=769,
- ["acutedeva"]=2388,
- ["acutelowmod"]=719,
- ["acutetonecmb"]=833,
- ["adblgrave"]=513,
- ["addakgurmukhi"]=2673,
- ["adeva"]=2309,
- ["adieresis"]=228,
- ["adieresiscyrillic"]=1235,
- ["adieresismacron"]=479,
- ["adotbelow"]=7841,
- ["adotmacron"]=481,
- ["ae"]=230,
- ["aeacute"]=509,
- ["aekorean"]=12624,
- ["aemacron"]=483,
- ["afii10017"]=1040,
- ["afii10018"]=1041,
- ["afii10019"]=1042,
- ["afii10020"]=1043,
- ["afii10021"]=1044,
- ["afii10022"]=1045,
- ["afii10023"]=1025,
- ["afii10024"]=1046,
- ["afii10025"]=1047,
- ["afii10026"]=1048,
- ["afii10027"]=1049,
- ["afii10028"]=1050,
- ["afii10029"]=1051,
- ["afii10030"]=1052,
- ["afii10031"]=1053,
- ["afii10032"]=1054,
- ["afii10033"]=1055,
- ["afii10034"]=1056,
- ["afii10035"]=1057,
- ["afii10036"]=1058,
- ["afii10037"]=1059,
- ["afii10038"]=1060,
- ["afii10039"]=1061,
- ["afii10040"]=1062,
- ["afii10041"]=1063,
- ["afii10042"]=1064,
- ["afii10043"]=1065,
- ["afii10044"]=1066,
- ["afii10045"]=1067,
- ["afii10046"]=1068,
- ["afii10047"]=1069,
- ["afii10048"]=1070,
- ["afii10049"]=1071,
- ["afii10050"]=1168,
- ["afii10051"]=1026,
- ["afii10052"]=1027,
- ["afii10053"]=1028,
- ["afii10054"]=1029,
- ["afii10055"]=1030,
- ["afii10056"]=1031,
- ["afii10057"]=1032,
- ["afii10058"]=1033,
- ["afii10059"]=1034,
- ["afii10060"]=1035,
- ["afii10061"]=1036,
- ["afii10062"]=1038,
- ["afii10065"]=1072,
- ["afii10145"]=1039,
- ["afii10146"]=1122,
- ["afii10147"]=1138,
- ["afii10148"]=1140,
- ["afii299"]=8206,
- ["afii300"]=8207,
- ["afii301"]=8205,
- ["afii57534"]=1749,
- ["afii61573"]=8236,
- ["afii61574"]=8237,
- ["afii61575"]=8238,
- ["agrave"]=224,
- ["agujarati"]=2693,
- ["agurmukhi"]=2565,
- ["ahiragana"]=12354,
- ["ahookabove"]=7843,
- ["aibengali"]=2448,
- ["aibopomofo"]=12574,
- ["aideva"]=2320,
- ["aiecyrillic"]=1237,
- ["aigujarati"]=2704,
- ["aigurmukhi"]=2576,
- ["aimatragurmukhi"]=2632,
- ["ainarabic"]=1593,
- ["ainfinalarabic"]=65226,
- ["aininitialarabic"]=65227,
- ["ainmedialarabic"]=65228,
- ["ainvertedbreve"]=515,
- ["aivowelsignbengali"]=2504,
- ["aivowelsigndeva"]=2376,
- ["aivowelsigngujarati"]=2760,
- ["akatakana"]=12450,
- ["akatakanahalfwidth"]=65393,
- ["akorean"]=12623,
- ["alefarabic"]=1575,
- ["alefdageshhebrew"]=64304,
- ["aleffinalarabic"]=65166,
- ["alefhamzaabovearabic"]=1571,
- ["alefhamzaabovefinalarabic"]=65156,
- ["alefhamzabelowarabic"]=1573,
- ["alefhamzabelowfinalarabic"]=65160,
- ["alefhebrew"]=1488,
- ["aleflamedhebrew"]=64335,
- ["alefmaddaabovearabic"]=1570,
- ["alefmaddaabovefinalarabic"]=65154,
- ["alefmaksuraarabic"]=1609,
- ["alefmaksurafinalarabic"]=65264,
- ["alefpatahhebrew"]=64302,
- ["alefqamatshebrew"]=64303,
- ["aleph"]=8501,
- ["allequal"]=8780,
- ["alpha"]=945,
- ["alphatonos"]=940,
- ["amacron"]=257,
- ["amonospace"]=65345,
- ["ampersand"]=38,
- ["ampersandmonospace"]=65286,
- ["amsquare"]=13250,
- ["anbopomofo"]=12578,
- ["angbopomofo"]=12580,
- ["angkhankhuthai"]=3674,
- ["angle"]=8736,
- ["anglebracketleft"]=12296,
- ["anglebracketleftvertical"]=65087,
- ["anglebracketright"]=12297,
- ["anglebracketrightvertical"]=65088,
- ["angleleft"]=9001,
- ["angleright"]=9002,
- ["angstrom"]=8491,
- ["anoteleia"]=903,
- ["anudattadeva"]=2386,
- ["anusvarabengali"]=2434,
- ["anusvaradeva"]=2306,
- ["anusvaragujarati"]=2690,
- ["aogonek"]=261,
- ["apaatosquare"]=13056,
- ["aparen"]=9372,
- ["apostrophearmenian"]=1370,
- ["apostrophemod"]=700,
- ["apple"]=63743,
- ["approaches"]=8784,
- ["approxequal"]=8776,
- ["approxequalorimage"]=8786,
- ["araeaekorean"]=12686,
- ["araeakorean"]=12685,
- ["arc"]=8978,
- ["arighthalfring"]=7834,
- ["aring"]=229,
- ["aringacute"]=507,
- ["aringbelow"]=7681,
- ["arrowboth"]=8596,
- ["arrowdashdown"]=8675,
- ["arrowdashleft"]=8672,
- ["arrowdashright"]=8674,
- ["arrowdashup"]=8673,
- ["arrowdbldown"]=8659,
- ["arrowdblup"]=8657,
- ["arrowdown"]=8595,
- ["arrowdownleft"]=8601,
- ["arrowdownright"]=8600,
- ["arrowdownwhite"]=8681,
- ["arrowheaddownmod"]=709,
- ["arrowheadleftmod"]=706,
- ["arrowheadrightmod"]=707,
- ["arrowheadupmod"]=708,
- ["arrowleft"]=8592,
- ["arrowleftdbl"]=8656,
- ["arrowleftdblstroke"]=8653,
- ["arrowleftoverright"]=8646,
- ["arrowleftwhite"]=8678,
- ["arrowright"]=8594,
- ["arrowrightdblstroke"]=8655,
- ["arrowrightheavy"]=10142,
- ["arrowrightoverleft"]=8644,
- ["arrowrightwhite"]=8680,
- ["arrowtableft"]=8676,
- ["arrowtabright"]=8677,
- ["arrowup"]=8593,
- ["arrowupdn"]=8597,
- ["arrowupdownbase"]=8616,
- ["arrowupleft"]=8598,
- ["arrowupleftofdown"]=8645,
- ["arrowupright"]=8599,
- ["arrowupwhite"]=8679,
- ["asciicircum"]=94,
- ["asciicircummonospace"]=65342,
- ["asciitilde"]=126,
- ["asciitildemonospace"]=65374,
- ["ascript"]=593,
- ["ascriptturned"]=594,
- ["asmallhiragana"]=12353,
- ["asmallkatakana"]=12449,
- ["asmallkatakanahalfwidth"]=65383,
- ["asterisk"]=42,
- ["asteriskarabic"]=1645,
- ["asteriskmath"]=8727,
- ["asteriskmonospace"]=65290,
- ["asterisksmall"]=65121,
- ["asterism"]=8258,
- ["asymptoticallyequal"]=8771,
- ["at"]=64,
- ["atilde"]=227,
- ["atmonospace"]=65312,
- ["atsmall"]=65131,
- ["aturned"]=592,
- ["aubengali"]=2452,
- ["aubopomofo"]=12576,
- ["audeva"]=2324,
- ["augujarati"]=2708,
- ["augurmukhi"]=2580,
- ["aulengthmarkbengali"]=2519,
- ["aumatragurmukhi"]=2636,
- ["auvowelsignbengali"]=2508,
- ["auvowelsigndeva"]=2380,
- ["auvowelsigngujarati"]=2764,
- ["avagrahadeva"]=2365,
- ["aybarmenian"]=1377,
- ["ayinaltonehebrew"]=64288,
- ["ayinhebrew"]=1506,
- ["b"]=98,
- ["babengali"]=2476,
- ["backslash"]=92,
- ["backslashmonospace"]=65340,
- ["badeva"]=2348,
- ["bagujarati"]=2732,
- ["bagurmukhi"]=2604,
- ["bahiragana"]=12400,
- ["bahtthai"]=3647,
- ["bakatakana"]=12496,
- ["barmonospace"]=65372,
- ["bbopomofo"]=12549,
- ["bcircle"]=9425,
- ["bdotaccent"]=7683,
- ["bdotbelow"]=7685,
- ["beamedsixteenthnotes"]=9836,
- ["because"]=8757,
- ["becyrillic"]=1073,
- ["beharabic"]=1576,
- ["behfinalarabic"]=65168,
- ["behinitialarabic"]=65169,
- ["behiragana"]=12409,
- ["behmedialarabic"]=65170,
- ["behmeeminitialarabic"]=64671,
- ["behmeemisolatedarabic"]=64520,
- ["behnoonfinalarabic"]=64621,
- ["bekatakana"]=12505,
- ["benarmenian"]=1378,
- ["beta"]=946,
- ["betasymbolgreek"]=976,
- ["betdageshhebrew"]=64305,
- ["bethebrew"]=1489,
- ["betrafehebrew"]=64332,
- ["bhabengali"]=2477,
- ["bhadeva"]=2349,
- ["bhagujarati"]=2733,
- ["bhagurmukhi"]=2605,
- ["bhook"]=595,
- ["bihiragana"]=12403,
- ["bikatakana"]=12499,
- ["bilabialclick"]=664,
- ["bindigurmukhi"]=2562,
- ["birusquare"]=13105,
- ["blackcircle"]=9679,
- ["blackdiamond"]=9670,
- ["blackleftpointingtriangle"]=9664,
- ["blacklenticularbracketleft"]=12304,
- ["blacklenticularbracketleftvertical"]=65083,
- ["blacklenticularbracketright"]=12305,
- ["blacklenticularbracketrightvertical"]=65084,
- ["blacklowerlefttriangle"]=9699,
- ["blacklowerrighttriangle"]=9698,
- ["blackrightpointingtriangle"]=9654,
- ["blacksmallsquare"]=9642,
- ["blackstar"]=9733,
- ["blackupperlefttriangle"]=9700,
- ["blackupperrighttriangle"]=9701,
- ["blackuppointingsmalltriangle"]=9652,
- ["blank"]=9251,
- ["blinebelow"]=7687,
- ["block"]=9608,
- ["bmonospace"]=65346,
- ["bobaimaithai"]=3610,
- ["bohiragana"]=12412,
- ["bokatakana"]=12508,
- ["bparen"]=9373,
- ["bqsquare"]=13251,
- ["braceleft"]=123,
- ["braceleftmonospace"]=65371,
- ["braceleftsmall"]=65115,
- ["braceleftvertical"]=65079,
- ["braceright"]=125,
- ["bracerightmonospace"]=65373,
- ["bracerightsmall"]=65116,
- ["bracerightvertical"]=65080,
- ["bracketleft"]=91,
- ["bracketleftmonospace"]=65339,
- ["bracketright"]=93,
- ["bracketrightmonospace"]=65341,
- ["breve"]=728,
- ["brevebelowcmb"]=814,
- ["brevecmb"]=774,
- ["breveinvertedbelowcmb"]=815,
- ["breveinvertedcmb"]=785,
- ["breveinverteddoublecmb"]=865,
- ["bridgebelowcmb"]=810,
- ["bridgeinvertedbelowcmb"]=826,
- ["brokenbar"]=166,
- ["bstroke"]=384,
- ["btopbar"]=387,
- ["buhiragana"]=12406,
- ["bukatakana"]=12502,
- ["bullet"]=8226,
- ["bulletoperator"]=8729,
- ["bullseye"]=9678,
- ["c"]=99,
- ["caarmenian"]=1390,
- ["cabengali"]=2458,
- ["cacute"]=263,
- ["cadeva"]=2330,
- ["cagujarati"]=2714,
- ["cagurmukhi"]=2586,
- ["calsquare"]=13192,
- ["candrabindubengali"]=2433,
- ["candrabinducmb"]=784,
- ["candrabindudeva"]=2305,
- ["candrabindugujarati"]=2689,
- ["capslock"]=8682,
- ["careof"]=8453,
- ["caron"]=711,
- ["caronbelowcmb"]=812,
- ["caroncmb"]=780,
- ["carriagereturn"]=8629,
- ["cbopomofo"]=12568,
- ["ccaron"]=269,
- ["ccedilla"]=231,
- ["ccedillaacute"]=7689,
- ["ccircle"]=9426,
- ["ccircumflex"]=265,
- ["ccurl"]=597,
- ["cdotaccent"]=267,
- ["cdsquare"]=13253,
- ["cedilla"]=184,
- ["cedillacmb"]=807,
- ["cent"]=162,
- ["centigrade"]=8451,
- ["centmonospace"]=65504,
- ["chaarmenian"]=1401,
- ["chabengali"]=2459,
- ["chadeva"]=2331,
- ["chagujarati"]=2715,
- ["chagurmukhi"]=2587,
- ["chbopomofo"]=12564,
- ["cheabkhasiancyrillic"]=1213,
- ["checkmark"]=10003,
- ["checyrillic"]=1095,
- ["chedescenderabkhasiancyrillic"]=1215,
- ["chedescendercyrillic"]=1207,
- ["chedieresiscyrillic"]=1269,
- ["cheharmenian"]=1395,
- ["chekhakassiancyrillic"]=1228,
- ["cheverticalstrokecyrillic"]=1209,
- ["chi"]=967,
- ["chieuchacirclekorean"]=12919,
- ["chieuchaparenkorean"]=12823,
- ["chieuchcirclekorean"]=12905,
- ["chieuchkorean"]=12618,
- ["chieuchparenkorean"]=12809,
- ["chochangthai"]=3594,
- ["chochanthai"]=3592,
- ["chochingthai"]=3593,
- ["chochoethai"]=3596,
- ["chook"]=392,
- ["cieucacirclekorean"]=12918,
- ["cieucaparenkorean"]=12822,
- ["cieuccirclekorean"]=12904,
- ["cieuckorean"]=12616,
- ["cieucparenkorean"]=12808,
- ["cieucuparenkorean"]=12828,
- ["circleot"]=8857,
- ["circlepostalmark"]=12342,
- ["circlewithlefthalfblack"]=9680,
- ["circlewithrighthalfblack"]=9681,
- ["circumflex"]=710,
- ["circumflexbelowcmb"]=813,
- ["circumflexcmb"]=770,
- ["clear"]=8999,
- ["clickalveolar"]=450,
- ["clickdental"]=448,
- ["clicklateral"]=449,
- ["clickretroflex"]=451,
- ["clubsuitblack"]=9827,
- ["clubsuitwhite"]=9831,
- ["cmcubedsquare"]=13220,
- ["cmonospace"]=65347,
- ["cmsquaredsquare"]=13216,
- ["coarmenian"]=1409,
- ["colon"]=58,
- ["colonmonospace"]=65306,
- ["colonsign"]=8353,
- ["colonsmall"]=65109,
- ["colontriangularhalfmod"]=721,
- ["colontriangularmod"]=720,
- ["comma"]=44,
- ["commaabovecmb"]=787,
- ["commaaboverightcmb"]=789,
- ["commaarabic"]=1548,
- ["commaarmenian"]=1373,
- ["commamonospace"]=65292,
- ["commareversedabovecmb"]=788,
- ["commareversedmod"]=701,
- ["commasmall"]=65104,
- ["commaturnedabovecmb"]=786,
- ["commaturnedmod"]=699,
- ["congruent"]=8773,
- ["contourintegral"]=8750,
- ["control"]=8963,
- ["controlACK"]=6,
- ["controlBEL"]=7,
- ["controlBS"]=8,
- ["controlCAN"]=24,
- ["controlCR"]=13,
- ["controlDC1"]=17,
- ["controlDC2"]=18,
- ["controlDC3"]=19,
- ["controlDC4"]=20,
- ["controlDEL"]=127,
- ["controlDLE"]=16,
- ["controlEM"]=25,
- ["controlENQ"]=5,
- ["controlEOT"]=4,
- ["controlESC"]=27,
- ["controlETB"]=23,
- ["controlETX"]=3,
- ["controlFF"]=12,
- ["controlFS"]=28,
- ["controlGS"]=29,
- ["controlHT"]=9,
- ["controlLF"]=10,
- ["controlNAK"]=21,
- ["controlRS"]=30,
- ["controlSI"]=15,
- ["controlSO"]=14,
- ["controlSOT"]=2,
- ["controlSTX"]=1,
- ["controlSUB"]=26,
- ["controlSYN"]=22,
- ["controlUS"]=31,
- ["controlVT"]=11,
- ["copyright"]=169,
- ["cornerbracketleft"]=12300,
- ["cornerbracketlefthalfwidth"]=65378,
- ["cornerbracketleftvertical"]=65089,
- ["cornerbracketright"]=12301,
- ["cornerbracketrighthalfwidth"]=65379,
- ["cornerbracketrightvertical"]=65090,
- ["corporationsquare"]=13183,
- ["cosquare"]=13255,
- ["coverkgsquare"]=13254,
- ["cparen"]=9374,
- ["cruzeiro"]=8354,
- ["cstretched"]=663,
- ["curlyand"]=8911,
- ["curlyor"]=8910,
- ["currency"]=164,
- ["d"]=100,
- ["daarmenian"]=1380,
- ["dabengali"]=2470,
- ["dadarabic"]=1590,
- ["dadeva"]=2342,
- ["dadfinalarabic"]=65214,
- ["dadinitialarabic"]=65215,
- ["dadmedialarabic"]=65216,
- ["dageshhebrew"]=1468,
- ["dagger"]=8224,
- ["daggerdbl"]=8225,
- ["dagujarati"]=2726,
- ["dagurmukhi"]=2598,
- ["dahiragana"]=12384,
- ["dakatakana"]=12480,
- ["dalarabic"]=1583,
- ["daletdageshhebrew"]=64307,
- ["dalettserehebrew"]=1491,
- ["dalfinalarabic"]=65194,
- ["dammalowarabic"]=1615,
- ["dammatanarabic"]=1612,
- ["danda"]=2404,
- ["dargalefthebrew"]=1447,
- ["dasiapneumatacyrilliccmb"]=1157,
- ["dblanglebracketleft"]=12298,
- ["dblanglebracketleftvertical"]=65085,
- ["dblanglebracketright"]=12299,
- ["dblanglebracketrightvertical"]=65086,
- ["dblarchinvertedbelowcmb"]=811,
- ["dblarrowleft"]=8660,
- ["dblarrowright"]=8658,
- ["dbldanda"]=2405,
- ["dblgravecmb"]=783,
- ["dblintegral"]=8748,
- ["dbllowlinecmb"]=819,
- ["dbloverlinecmb"]=831,
- ["dblprimemod"]=698,
- ["dblverticalbar"]=8214,
- ["dblverticallineabovecmb"]=782,
- ["dbopomofo"]=12553,
- ["dbsquare"]=13256,
- ["dcaron"]=271,
- ["dcedilla"]=7697,
- ["dcircle"]=9427,
- ["dcircumflexbelow"]=7699,
- ["ddabengali"]=2465,
- ["ddadeva"]=2337,
- ["ddagujarati"]=2721,
- ["ddagurmukhi"]=2593,
- ["ddalarabic"]=1672,
- ["ddalfinalarabic"]=64393,
- ["dddhadeva"]=2396,
- ["ddhabengali"]=2466,
- ["ddhadeva"]=2338,
- ["ddhagujarati"]=2722,
- ["ddhagurmukhi"]=2594,
- ["ddotaccent"]=7691,
- ["ddotbelow"]=7693,
- ["decimalseparatorpersian"]=1643,
- ["decyrillic"]=1076,
- ["degree"]=176,
- ["dehihebrew"]=1453,
- ["dehiragana"]=12391,
- ["deicoptic"]=1007,
- ["dekatakana"]=12487,
- ["deleteleft"]=9003,
- ["deleteright"]=8998,
- ["delta"]=948,
- ["deltaturned"]=397,
- ["denominatorminusonenumeratorbengali"]=2552,
- ["dezh"]=676,
- ["dhabengali"]=2471,
- ["dhadeva"]=2343,
- ["dhagujarati"]=2727,
- ["dhagurmukhi"]=2599,
- ["dhook"]=599,
- ["dialytikatonoscmb"]=836,
- ["diamond"]=9830,
- ["diamondsuitwhite"]=9826,
- ["dieresis"]=168,
- ["dieresisbelowcmb"]=804,
- ["dieresiscmb"]=776,
- ["dieresistonos"]=901,
- ["dihiragana"]=12386,
- ["dikatakana"]=12482,
- ["dittomark"]=12291,
- ["divide"]=247,
- ["divides"]=8739,
- ["divisionslash"]=8725,
- ["djecyrillic"]=1106,
- ["dlinebelow"]=7695,
- ["dlsquare"]=13207,
- ["dmacron"]=273,
- ["dmonospace"]=65348,
- ["dnblock"]=9604,
- ["dochadathai"]=3598,
- ["dodekthai"]=3604,
- ["dohiragana"]=12393,
- ["dokatakana"]=12489,
- ["dollar"]=36,
- ["dollarmonospace"]=65284,
- ["dollarsmall"]=65129,
- ["dong"]=8363,
- ["dorusquare"]=13094,
- ["dotaccent"]=729,
- ["dotaccentcmb"]=775,
- ["dotbelowcomb"]=803,
- ["dotkatakana"]=12539,
- ["dotlessi"]=305,
- ["dotlessjstrokehook"]=644,
- ["dotmath"]=8901,
- ["dottedcircle"]=9676,
- ["downtackbelowcmb"]=798,
- ["downtackmod"]=725,
- ["dparen"]=9375,
- ["dtail"]=598,
- ["dtopbar"]=396,
- ["duhiragana"]=12389,
- ["dukatakana"]=12485,
- ["dz"]=499,
- ["dzaltone"]=675,
- ["dzcaron"]=454,
- ["dzcurl"]=677,
- ["dzeabkhasiancyrillic"]=1249,
- ["dzecyrillic"]=1109,
- ["dzhecyrillic"]=1119,
- ["e"]=101,
- ["eacute"]=233,
- ["earth"]=9793,
- ["ebengali"]=2447,
- ["ebopomofo"]=12572,
- ["ebreve"]=277,
- ["ecandradeva"]=2317,
- ["ecandragujarati"]=2701,
- ["ecandravowelsigndeva"]=2373,
- ["ecandravowelsigngujarati"]=2757,
- ["ecaron"]=283,
- ["ecedillabreve"]=7709,
- ["echarmenian"]=1381,
- ["echyiwnarmenian"]=1415,
- ["ecircle"]=9428,
- ["ecircumflex"]=234,
- ["ecircumflexacute"]=7871,
- ["ecircumflexbelow"]=7705,
- ["ecircumflexdotbelow"]=7879,
- ["ecircumflexgrave"]=7873,
- ["ecircumflexhookabove"]=7875,
- ["ecircumflextilde"]=7877,
- ["ecyrillic"]=1108,
- ["edblgrave"]=517,
- ["edeva"]=2319,
- ["edieresis"]=235,
- ["edotaccent"]=279,
- ["edotbelow"]=7865,
- ["eegurmukhi"]=2575,
- ["eematragurmukhi"]=2631,
- ["efcyrillic"]=1092,
- ["egrave"]=232,
- ["egujarati"]=2703,
- ["eharmenian"]=1383,
- ["ehbopomofo"]=12573,
- ["ehiragana"]=12360,
- ["ehookabove"]=7867,
- ["eibopomofo"]=12575,
- ["eight"]=56,
- ["eightbengali"]=2542,
- ["eightcircle"]=9319,
- ["eightcircleinversesansserif"]=10129,
- ["eightdeva"]=2414,
- ["eighteencircle"]=9329,
- ["eighteenparen"]=9349,
- ["eighteenperiod"]=9369,
- ["eightgujarati"]=2798,
- ["eightgurmukhi"]=2670,
- ["eighthackarabic"]=1640,
- ["eighthangzhou"]=12328,
- ["eightideographicparen"]=12839,
- ["eightinferior"]=8328,
- ["eightmonospace"]=65304,
- ["eightparen"]=9339,
- ["eightperiod"]=9359,
- ["eightpersian"]=1784,
- ["eightroman"]=8567,
- ["eightsuperior"]=8312,
- ["eightthai"]=3672,
- ["einvertedbreve"]=519,
- ["eiotifiedcyrillic"]=1125,
- ["ekatakana"]=12456,
- ["ekatakanahalfwidth"]=65396,
- ["ekonkargurmukhi"]=2676,
- ["ekorean"]=12628,
- ["elcyrillic"]=1083,
- ["element"]=8712,
- ["elevencircle"]=9322,
- ["elevenparen"]=9342,
- ["elevenperiod"]=9362,
- ["elevenroman"]=8570,
- ["ellipsis"]=8230,
- ["ellipsisvertical"]=8942,
- ["emacron"]=275,
- ["emacronacute"]=7703,
- ["emacrongrave"]=7701,
- ["emcyrillic"]=1084,
- ["emdash"]=8212,
- ["emdashvertical"]=65073,
- ["emonospace"]=65349,
- ["emphasismarkarmenian"]=1371,
- ["emptyset"]=8709,
- ["enbopomofo"]=12579,
- ["encyrillic"]=1085,
- ["endash"]=8211,
- ["endashvertical"]=65074,
- ["endescendercyrillic"]=1187,
- ["eng"]=331,
- ["engbopomofo"]=12581,
- ["enghecyrillic"]=1189,
- ["enhookcyrillic"]=1224,
- ["enspace"]=8194,
- ["eogonek"]=281,
- ["eokorean"]=12627,
- ["eopen"]=603,
- ["eopenclosed"]=666,
- ["eopenreversed"]=604,
- ["eopenreversedclosed"]=606,
- ["eopenreversedhook"]=605,
- ["eparen"]=9376,
- ["epsilon"]=949,
- ["epsilontonos"]=941,
- ["equal"]=61,
- ["equalmonospace"]=65309,
- ["equalsmall"]=65126,
- ["equalsuperior"]=8316,
- ["equivalence"]=8801,
- ["erbopomofo"]=12582,
- ["ercyrillic"]=1088,
- ["ereversed"]=600,
- ["ereversedcyrillic"]=1101,
- ["escyrillic"]=1089,
- ["esdescendercyrillic"]=1195,
- ["esh"]=643,
- ["eshcurl"]=646,
- ["eshortdeva"]=2318,
- ["eshortvowelsigndeva"]=2374,
- ["eshreversedloop"]=426,
- ["eshsquatreversed"]=645,
- ["esmallhiragana"]=12359,
- ["esmallkatakana"]=12455,
- ["esmallkatakanahalfwidth"]=65386,
- ["estimated"]=8494,
- ["eta"]=951,
- ["etarmenian"]=1384,
- ["etatonos"]=942,
- ["eth"]=240,
- ["etilde"]=7869,
- ["etildebelow"]=7707,
- ["etnahtalefthebrew"]=1425,
- ["eturned"]=477,
- ["eukorean"]=12641,
- ["euro"]=8364,
- ["evowelsignbengali"]=2503,
- ["evowelsigndeva"]=2375,
- ["evowelsigngujarati"]=2759,
- ["exclam"]=33,
- ["exclamarmenian"]=1372,
- ["exclamdbl"]=8252,
- ["exclamdown"]=161,
- ["exclammonospace"]=65281,
- ["ezh"]=658,
- ["ezhcaron"]=495,
- ["ezhcurl"]=659,
- ["ezhreversed"]=441,
- ["ezhtail"]=442,
- ["f"]=102,
- ["fadeva"]=2398,
- ["fagurmukhi"]=2654,
- ["fahrenheit"]=8457,
- ["fathalowarabic"]=1614,
- ["fathatanarabic"]=1611,
- ["fbopomofo"]=12552,
- ["fcircle"]=9429,
- ["fdotaccent"]=7711,
- ["feharabic"]=1601,
- ["feharmenian"]=1414,
- ["fehfinalarabic"]=65234,
- ["fehinitialarabic"]=65235,
- ["fehmedialarabic"]=65236,
- ["feicoptic"]=997,
- ["ff"]=64256,
- ["ffi"]=64259,
- ["ffl"]=64260,
- ["fi"]=64257,
- ["fifteencircle"]=9326,
- ["fifteenparen"]=9346,
- ["fifteenperiod"]=9366,
- ["figuredash"]=8210,
- ["filledbox"]=9632,
- ["filledrect"]=9644,
- ["finalkafdageshhebrew"]=64314,
- ["finalkafshevahebrew"]=1498,
- ["finalmemhebrew"]=1501,
- ["finalnunhebrew"]=1503,
- ["finalpehebrew"]=1507,
- ["finaltsadihebrew"]=1509,
- ["firsttonechinese"]=713,
- ["fisheye"]=9673,
- ["fitacyrillic"]=1139,
- ["five"]=53,
- ["fivebengali"]=2539,
- ["fivecircle"]=9316,
- ["fivecircleinversesansserif"]=10126,
- ["fivedeva"]=2411,
- ["fiveeighths"]=8541,
- ["fivegujarati"]=2795,
- ["fivegurmukhi"]=2667,
- ["fivehackarabic"]=1637,
- ["fivehangzhou"]=12325,
- ["fiveideographicparen"]=12836,
- ["fiveinferior"]=8325,
- ["fivemonospace"]=65301,
- ["fiveparen"]=9336,
- ["fiveperiod"]=9356,
- ["fivepersian"]=1781,
- ["fiveroman"]=8564,
- ["fivesuperior"]=8309,
- ["fivethai"]=3669,
- ["fl"]=64258,
- ["florin"]=402,
- ["fmonospace"]=65350,
- ["fmsquare"]=13209,
- ["fofanthai"]=3615,
- ["fofathai"]=3613,
- ["fongmanthai"]=3663,
- ["four"]=52,
- ["fourbengali"]=2538,
- ["fourcircle"]=9315,
- ["fourcircleinversesansserif"]=10125,
- ["fourdeva"]=2410,
- ["fourgujarati"]=2794,
- ["fourgurmukhi"]=2666,
- ["fourhackarabic"]=1636,
- ["fourhangzhou"]=12324,
- ["fourideographicparen"]=12835,
- ["fourinferior"]=8324,
- ["fourmonospace"]=65300,
- ["fournumeratorbengali"]=2551,
- ["fourparen"]=9335,
- ["fourperiod"]=9355,
- ["fourpersian"]=1780,
- ["fourroman"]=8563,
- ["foursuperior"]=8308,
- ["fourteencircle"]=9325,
- ["fourteenparen"]=9345,
- ["fourteenperiod"]=9365,
- ["fourthai"]=3668,
- ["fourthtonechinese"]=715,
- ["fparen"]=9377,
- ["fraction"]=8260,
- ["franc"]=8355,
- ["g"]=103,
- ["gabengali"]=2455,
- ["gacute"]=501,
- ["gadeva"]=2327,
- ["gafarabic"]=1711,
- ["gaffinalarabic"]=64403,
- ["gafinitialarabic"]=64404,
- ["gafmedialarabic"]=64405,
- ["gagujarati"]=2711,
- ["gagurmukhi"]=2583,
- ["gahiragana"]=12364,
- ["gakatakana"]=12460,
- ["gamma"]=947,
- ["gammalatinsmall"]=611,
- ["gammasuperior"]=736,
- ["gangiacoptic"]=1003,
- ["gbopomofo"]=12557,
- ["gbreve"]=287,
- ["gcaron"]=487,
- ["gcircle"]=9430,
- ["gcircumflex"]=285,
- ["gcommaaccent"]=291,
- ["gdotaccent"]=289,
- ["gecyrillic"]=1075,
- ["gehiragana"]=12370,
- ["gekatakana"]=12466,
- ["geometricallyequal"]=8785,
- ["gereshaccenthebrew"]=1436,
- ["gereshhebrew"]=1523,
- ["gereshmuqdamhebrew"]=1437,
- ["germandbls"]=223,
- ["gershayimaccenthebrew"]=1438,
- ["gershayimhebrew"]=1524,
- ["getamark"]=12307,
- ["ghabengali"]=2456,
- ["ghadarmenian"]=1394,
- ["ghadeva"]=2328,
- ["ghagujarati"]=2712,
- ["ghagurmukhi"]=2584,
- ["ghainarabic"]=1594,
- ["ghainfinalarabic"]=65230,
- ["ghaininitialarabic"]=65231,
- ["ghainmedialarabic"]=65232,
- ["ghemiddlehookcyrillic"]=1173,
- ["ghestrokecyrillic"]=1171,
- ["gheupturncyrillic"]=1169,
- ["ghhadeva"]=2394,
- ["ghhagurmukhi"]=2650,
- ["ghook"]=608,
- ["ghzsquare"]=13203,
- ["gihiragana"]=12366,
- ["gikatakana"]=12462,
- ["gimarmenian"]=1379,
- ["gimeldageshhebrew"]=64306,
- ["gimelhebrew"]=1490,
- ["gjecyrillic"]=1107,
- ["glottalinvertedstroke"]=446,
- ["glottalstop"]=660,
- ["glottalstopinverted"]=662,
- ["glottalstopmod"]=704,
- ["glottalstopreversed"]=661,
- ["glottalstopreversedmod"]=705,
- ["glottalstopreversedsuperior"]=740,
- ["glottalstopstroke"]=673,
- ["glottalstopstrokereversed"]=674,
- ["gmacron"]=7713,
- ["gmonospace"]=65351,
- ["gohiragana"]=12372,
- ["gokatakana"]=12468,
- ["gparen"]=9378,
- ["gpasquare"]=13228,
- ["grave"]=96,
- ["gravebelowcmb"]=790,
- ["gravecomb"]=768,
- ["gravedeva"]=2387,
- ["gravelowmod"]=718,
- ["gravemonospace"]=65344,
- ["gravetonecmb"]=832,
- ["greater"]=62,
- ["greaterequal"]=8805,
- ["greaterequalorless"]=8923,
- ["greatermonospace"]=65310,
- ["greaterorequivalent"]=8819,
- ["greaterorless"]=8823,
- ["greateroverequal"]=8807,
- ["greatersmall"]=65125,
- ["gscript"]=609,
- ["gstroke"]=485,
- ["guhiragana"]=12368,
- ["guillemotleft"]=171,
- ["guillemotright"]=187,
- ["guilsinglleft"]=8249,
- ["guilsinglright"]=8250,
- ["gukatakana"]=12464,
- ["guramusquare"]=13080,
- ["gysquare"]=13257,
- ["h"]=104,
- ["haabkhasiancyrillic"]=1193,
- ["habengali"]=2489,
- ["hadescendercyrillic"]=1203,
- ["hadeva"]=2361,
- ["hagujarati"]=2745,
- ["hagurmukhi"]=2617,
- ["haharabic"]=1581,
- ["hahfinalarabic"]=65186,
- ["hahinitialarabic"]=65187,
- ["hahiragana"]=12399,
- ["hahmedialarabic"]=65188,
- ["haitusquare"]=13098,
- ["hakatakana"]=12495,
- ["hakatakanahalfwidth"]=65418,
- ["halantgurmukhi"]=2637,
- ["hamzasukunarabic"]=1569,
- ["hangulfiller"]=12644,
- ["hardsigncyrillic"]=1098,
- ["harpoonleftbarbup"]=8636,
- ["harpoonrightbarbup"]=8640,
- ["hasquare"]=13258,
- ["hatafpatahwidehebrew"]=1458,
- ["hatafqamatswidehebrew"]=1459,
- ["hatafsegolwidehebrew"]=1457,
- ["hbar"]=295,
- ["hbopomofo"]=12559,
- ["hbrevebelow"]=7723,
- ["hcedilla"]=7721,
- ["hcircle"]=9431,
- ["hcircumflex"]=293,
- ["hdieresis"]=7719,
- ["hdotaccent"]=7715,
- ["hdotbelow"]=7717,
- ["heartsuitblack"]=9829,
- ["heartsuitwhite"]=9825,
- ["hedageshhebrew"]=64308,
- ["hehaltonearabic"]=1729,
- ["heharabic"]=1607,
- ["hehebrew"]=1492,
- ["hehfinalaltonearabic"]=64423,
- ["hehfinalarabic"]=65258,
- ["hehhamzaabovefinalarabic"]=64421,
- ["hehhamzaaboveisolatedarabic"]=64420,
- ["hehinitialaltonearabic"]=64424,
- ["hehinitialarabic"]=65259,
- ["hehiragana"]=12408,
- ["hehmedialaltonearabic"]=64425,
- ["hehmedialarabic"]=65260,
- ["heiseierasquare"]=13179,
- ["hekatakana"]=12504,
- ["hekatakanahalfwidth"]=65421,
- ["hekutaarusquare"]=13110,
- ["henghook"]=615,
- ["herutusquare"]=13113,
- ["hethebrew"]=1495,
- ["hhook"]=614,
- ["hhooksuperior"]=689,
- ["hieuhacirclekorean"]=12923,
- ["hieuhaparenkorean"]=12827,
- ["hieuhcirclekorean"]=12909,
- ["hieuhkorean"]=12622,
- ["hieuhparenkorean"]=12813,
- ["hihiragana"]=12402,
- ["hikatakana"]=12498,
- ["hikatakanahalfwidth"]=65419,
- ["hiriqwidehebrew"]=1460,
- ["hlinebelow"]=7830,
- ["hmonospace"]=65352,
- ["hoarmenian"]=1392,
- ["hohipthai"]=3627,
- ["hohiragana"]=12411,
- ["hokatakana"]=12507,
- ["hokatakanahalfwidth"]=65422,
- ["holamwidehebrew"]=1465,
- ["honokhukthai"]=3630,
- ["hookcmb"]=777,
- ["hookpalatalizedbelowcmb"]=801,
- ["hookretroflexbelowcmb"]=802,
- ["hoonsquare"]=13122,
- ["horicoptic"]=1001,
- ["horizontalbar"]=8213,
- ["horncmb"]=795,
- ["hotsprings"]=9832,
- ["house"]=8962,
- ["hparen"]=9379,
- ["hsuperior"]=688,
- ["hturned"]=613,
- ["huhiragana"]=12405,
- ["huiitosquare"]=13107,
- ["hukatakana"]=12501,
- ["hukatakanahalfwidth"]=65420,
- ["hungarumlaut"]=733,
- ["hungarumlautcmb"]=779,
- ["hv"]=405,
- ["hyphen"]=45,
- ["hyphenmonospace"]=65293,
- ["hyphensmall"]=65123,
- ["hyphentwo"]=8208,
- ["i"]=105,
- ["iacute"]=237,
- ["iacyrillic"]=1103,
- ["ibengali"]=2439,
- ["ibopomofo"]=12583,
- ["ibreve"]=301,
- ["icaron"]=464,
- ["icircle"]=9432,
- ["icircumflex"]=238,
- ["icyrillic"]=1110,
- ["idblgrave"]=521,
- ["ideographearthcircle"]=12943,
- ["ideographfirecircle"]=12939,
- ["ideographicallianceparen"]=12863,
- ["ideographiccallparen"]=12858,
- ["ideographiccentrecircle"]=12965,
- ["ideographicclose"]=12294,
- ["ideographiccomma"]=12289,
- ["ideographiccommaleft"]=65380,
- ["ideographiccongratulationparen"]=12855,
- ["ideographiccorrectcircle"]=12963,
- ["ideographicearthparen"]=12847,
- ["ideographicenterpriseparen"]=12861,
- ["ideographicexcellentcircle"]=12957,
- ["ideographicfestivalparen"]=12864,
- ["ideographicfinancialcircle"]=12950,
- ["ideographicfinancialparen"]=12854,
- ["ideographicfireparen"]=12843,
- ["ideographichaveparen"]=12850,
- ["ideographichighcircle"]=12964,
- ["ideographiciterationmark"]=12293,
- ["ideographiclaborcircle"]=12952,
- ["ideographiclaborparen"]=12856,
- ["ideographicleftcircle"]=12967,
- ["ideographiclowcircle"]=12966,
- ["ideographicmedicinecircle"]=12969,
- ["ideographicmetalparen"]=12846,
- ["ideographicmoonparen"]=12842,
- ["ideographicnameparen"]=12852,
- ["ideographicperiod"]=12290,
- ["ideographicprintcircle"]=12958,
- ["ideographicreachparen"]=12867,
- ["ideographicrepresentparen"]=12857,
- ["ideographicresourceparen"]=12862,
- ["ideographicrightcircle"]=12968,
- ["ideographicsecretcircle"]=12953,
- ["ideographicselfparen"]=12866,
- ["ideographicsocietyparen"]=12851,
- ["ideographicspace"]=12288,
- ["ideographicspecialparen"]=12853,
- ["ideographicstockparen"]=12849,
- ["ideographicstudyparen"]=12859,
- ["ideographicsunparen"]=12848,
- ["ideographicsuperviseparen"]=12860,
- ["ideographicwaterparen"]=12844,
- ["ideographicwoodparen"]=12845,
- ["ideographiczero"]=12295,
- ["ideographmetalcircle"]=12942,
- ["ideographmooncircle"]=12938,
- ["ideographnamecircle"]=12948,
- ["ideographsuncircle"]=12944,
- ["ideographwatercircle"]=12940,
- ["ideographwoodcircle"]=12941,
- ["ideva"]=2311,
- ["idieresis"]=239,
- ["idieresisacute"]=7727,
- ["idieresiscyrillic"]=1253,
- ["idotbelow"]=7883,
- ["iebrevecyrillic"]=1239,
- ["iecyrillic"]=1077,
- ["ieungacirclekorean"]=12917,
- ["ieungaparenkorean"]=12821,
- ["ieungcirclekorean"]=12903,
- ["ieungkorean"]=12615,
- ["ieungparenkorean"]=12807,
- ["igrave"]=236,
- ["igujarati"]=2695,
- ["igurmukhi"]=2567,
- ["ihiragana"]=12356,
- ["ihookabove"]=7881,
- ["iibengali"]=2440,
- ["iicyrillic"]=1080,
- ["iideva"]=2312,
- ["iigujarati"]=2696,
- ["iigurmukhi"]=2568,
- ["iimatragurmukhi"]=2624,
- ["iinvertedbreve"]=523,
- ["iishortcyrillic"]=1081,
- ["iivowelsignbengali"]=2496,
- ["iivowelsigndeva"]=2368,
- ["iivowelsigngujarati"]=2752,
- ["ij"]=307,
- ["ikatakana"]=12452,
- ["ikatakanahalfwidth"]=65394,
- ["ikorean"]=12643,
- ["iluyhebrew"]=1452,
- ["imacron"]=299,
- ["imacroncyrillic"]=1251,
- ["imageorapproximatelyequal"]=8787,
- ["imatragurmukhi"]=2623,
- ["imonospace"]=65353,
- ["increment"]=8710,
- ["infinity"]=8734,
- ["iniarmenian"]=1387,
- ["integral"]=8747,
- ["integralbt"]=8993,
- ["integraltp"]=8992,
- ["intersection"]=8745,
- ["intisquare"]=13061,
- ["invbullet"]=9688,
- ["invsmileface"]=9787,
- ["iocyrillic"]=1105,
- ["iogonek"]=303,
- ["iota"]=953,
- ["iotadieresis"]=970,
- ["iotadieresistonos"]=912,
- ["iotalatin"]=617,
- ["iotatonos"]=943,
- ["iparen"]=9380,
- ["irigurmukhi"]=2674,
- ["ismallhiragana"]=12355,
- ["ismallkatakana"]=12451,
- ["ismallkatakanahalfwidth"]=65384,
- ["issharbengali"]=2554,
- ["istroke"]=616,
- ["iterationhiragana"]=12445,
- ["iterationkatakana"]=12541,
- ["itilde"]=297,
- ["itildebelow"]=7725,
- ["iubopomofo"]=12585,
- ["iucyrillic"]=1102,
- ["ivowelsignbengali"]=2495,
- ["ivowelsigndeva"]=2367,
- ["ivowelsigngujarati"]=2751,
- ["izhitsacyrillic"]=1141,
- ["izhitsadblgravecyrillic"]=1143,
- ["j"]=106,
- ["jaarmenian"]=1393,
- ["jabengali"]=2460,
- ["jadeva"]=2332,
- ["jagujarati"]=2716,
- ["jagurmukhi"]=2588,
- ["jbopomofo"]=12560,
- ["jcaron"]=496,
- ["jcircle"]=9433,
- ["jcircumflex"]=309,
- ["jcrossedtail"]=669,
- ["jdotlessstroke"]=607,
- ["jecyrillic"]=1112,
- ["jeemarabic"]=1580,
- ["jeemfinalarabic"]=65182,
- ["jeeminitialarabic"]=65183,
- ["jeemmedialarabic"]=65184,
- ["jeharabic"]=1688,
- ["jehfinalarabic"]=64395,
- ["jhabengali"]=2461,
- ["jhadeva"]=2333,
- ["jhagujarati"]=2717,
- ["jhagurmukhi"]=2589,
- ["jheharmenian"]=1403,
- ["jis"]=12292,
- ["jmonospace"]=65354,
- ["jparen"]=9381,
- ["jsuperior"]=690,
- ["k"]=107,
- ["kabashkircyrillic"]=1185,
- ["kabengali"]=2453,
- ["kacute"]=7729,
- ["kacyrillic"]=1082,
- ["kadescendercyrillic"]=1179,
- ["kadeva"]=2325,
- ["kafarabic"]=1603,
- ["kafdageshhebrew"]=64315,
- ["kaffinalarabic"]=65242,
- ["kafhebrew"]=1499,
- ["kafinitialarabic"]=65243,
- ["kafmedialarabic"]=65244,
- ["kafrafehebrew"]=64333,
- ["kagujarati"]=2709,
- ["kagurmukhi"]=2581,
- ["kahiragana"]=12363,
- ["kahookcyrillic"]=1220,
- ["kakatakana"]=12459,
- ["kakatakanahalfwidth"]=65398,
- ["kappa"]=954,
- ["kappasymbolgreek"]=1008,
- ["kapyeounmieumkorean"]=12657,
- ["kapyeounphieuphkorean"]=12676,
- ["kapyeounpieupkorean"]=12664,
- ["kapyeounssangpieupkorean"]=12665,
- ["karoriisquare"]=13069,
- ["kasmallkatakana"]=12533,
- ["kasquare"]=13188,
- ["kasraarabic"]=1616,
- ["kasratanarabic"]=1613,
- ["kastrokecyrillic"]=1183,
- ["katahiraprolongmarkhalfwidth"]=65392,
- ["kaverticalstrokecyrillic"]=1181,
- ["kbopomofo"]=12558,
- ["kcalsquare"]=13193,
- ["kcaron"]=489,
- ["kcircle"]=9434,
- ["kcommaaccent"]=311,
- ["kdotbelow"]=7731,
- ["keharmenian"]=1412,
- ["kehiragana"]=12369,
- ["kekatakana"]=12465,
- ["kekatakanahalfwidth"]=65401,
- ["kenarmenian"]=1391,
- ["kesmallkatakana"]=12534,
- ["kgreenlandic"]=312,
- ["khabengali"]=2454,
- ["khacyrillic"]=1093,
- ["khadeva"]=2326,
- ["khagujarati"]=2710,
- ["khagurmukhi"]=2582,
- ["khaharabic"]=1582,
- ["khahfinalarabic"]=65190,
- ["khahinitialarabic"]=65191,
- ["khahmedialarabic"]=65192,
- ["kheicoptic"]=999,
- ["khhadeva"]=2393,
- ["khhagurmukhi"]=2649,
- ["khieukhacirclekorean"]=12920,
- ["khieukhaparenkorean"]=12824,
- ["khieukhcirclekorean"]=12906,
- ["khieukhkorean"]=12619,
- ["khieukhparenkorean"]=12810,
- ["khokhaithai"]=3586,
- ["khokhonthai"]=3589,
- ["khokhuatthai"]=3587,
- ["khokhwaithai"]=3588,
- ["khomutthai"]=3675,
- ["khook"]=409,
- ["khorakhangthai"]=3590,
- ["khzsquare"]=13201,
- ["kihiragana"]=12365,
- ["kikatakana"]=12461,
- ["kikatakanahalfwidth"]=65399,
- ["kiroguramusquare"]=13077,
- ["kiromeetorusquare"]=13078,
- ["kirosquare"]=13076,
- ["kiyeokacirclekorean"]=12910,
- ["kiyeokaparenkorean"]=12814,
- ["kiyeokcirclekorean"]=12896,
- ["kiyeokkorean"]=12593,
- ["kiyeokparenkorean"]=12800,
- ["kiyeoksioskorean"]=12595,
- ["kjecyrillic"]=1116,
- ["klinebelow"]=7733,
- ["klsquare"]=13208,
- ["kmcubedsquare"]=13222,
- ["kmonospace"]=65355,
- ["kmsquaredsquare"]=13218,
- ["kohiragana"]=12371,
- ["kohmsquare"]=13248,
- ["kokaithai"]=3585,
- ["kokatakana"]=12467,
- ["kokatakanahalfwidth"]=65402,
- ["kooposquare"]=13086,
- ["koppacyrillic"]=1153,
- ["koreanstandardsymbol"]=12927,
- ["koroniscmb"]=835,
- ["kparen"]=9382,
- ["kpasquare"]=13226,
- ["ksicyrillic"]=1135,
- ["ktsquare"]=13263,
- ["kturned"]=670,
- ["kuhiragana"]=12367,
- ["kukatakana"]=12463,
- ["kukatakanahalfwidth"]=65400,
- ["kvsquare"]=13240,
- ["kwsquare"]=13246,
- ["l"]=108,
- ["labengali"]=2482,
- ["lacute"]=314,
- ["ladeva"]=2354,
- ["lagujarati"]=2738,
- ["lagurmukhi"]=2610,
- ["lakkhangyaothai"]=3653,
- ["lamaleffinalarabic"]=65276,
- ["lamalefhamzaabovefinalarabic"]=65272,
- ["lamalefhamzaaboveisolatedarabic"]=65271,
- ["lamalefhamzabelowfinalarabic"]=65274,
- ["lamalefhamzabelowisolatedarabic"]=65273,
- ["lamalefisolatedarabic"]=65275,
- ["lamalefmaddaabovefinalarabic"]=65270,
- ["lamalefmaddaaboveisolatedarabic"]=65269,
- ["lamarabic"]=1604,
- ["lambda"]=955,
- ["lambdastroke"]=411,
- ["lameddageshhebrew"]=64316,
- ["lamedholamhebrew"]=1500,
- ["lamfinalarabic"]=65246,
- ["lamhahinitialarabic"]=64714,
- ["lamjeeminitialarabic"]=64713,
- ["lamkhahinitialarabic"]=64715,
- ["lamlamhehisolatedarabic"]=65010,
- ["lammedialarabic"]=65248,
- ["lammeemhahinitialarabic"]=64904,
- ["lammeeminitialarabic"]=64716,
- ["lammeemkhahinitialarabic"]=65247,
- ["largecircle"]=9711,
- ["lbar"]=410,
- ["lbelt"]=620,
- ["lbopomofo"]=12556,
- ["lcaron"]=318,
- ["lcircle"]=9435,
- ["lcircumflexbelow"]=7741,
- ["lcommaaccent"]=316,
- ["ldotaccent"]=320,
- ["ldotbelow"]=7735,
- ["ldotbelowmacron"]=7737,
- ["leftangleabovecmb"]=794,
- ["lefttackbelowcmb"]=792,
- ["less"]=60,
- ["lessequal"]=8804,
- ["lessequalorgreater"]=8922,
- ["lessmonospace"]=65308,
- ["lessorequivalent"]=8818,
- ["lessorgreater"]=8822,
- ["lessoverequal"]=8806,
- ["lesssmall"]=65124,
- ["lezh"]=622,
- ["lfblock"]=9612,
- ["lhookretroflex"]=621,
- ["lira"]=8356,
- ["liwnarmenian"]=1388,
- ["lj"]=457,
- ["ljecyrillic"]=1113,
- ["lladeva"]=2355,
- ["llagujarati"]=2739,
- ["llinebelow"]=7739,
- ["llladeva"]=2356,
- ["llvocalicbengali"]=2529,
- ["llvocalicdeva"]=2401,
- ["llvocalicvowelsignbengali"]=2531,
- ["llvocalicvowelsigndeva"]=2403,
- ["lmiddletilde"]=619,
- ["lmonospace"]=65356,
- ["lmsquare"]=13264,
- ["lochulathai"]=3628,
- ["logicaland"]=8743,
- ["logicalnot"]=172,
- ["logicalor"]=8744,
- ["lolingthai"]=3621,
- ["lowlinecenterline"]=65102,
- ["lowlinecmb"]=818,
- ["lowlinedashed"]=65101,
- ["lozenge"]=9674,
- ["lparen"]=9383,
- ["lslash"]=322,
- ["lsquare"]=8467,
- ["luthai"]=3622,
- ["lvocalicbengali"]=2444,
- ["lvocalicdeva"]=2316,
- ["lvocalicvowelsignbengali"]=2530,
- ["lvocalicvowelsigndeva"]=2402,
- ["lxsquare"]=13267,
- ["m"]=109,
- ["mabengali"]=2478,
- ["macron"]=175,
- ["macronbelowcmb"]=817,
- ["macroncmb"]=772,
- ["macronlowmod"]=717,
- ["macronmonospace"]=65507,
- ["macute"]=7743,
- ["madeva"]=2350,
- ["magujarati"]=2734,
- ["magurmukhi"]=2606,
- ["mahapakhlefthebrew"]=1444,
- ["mahiragana"]=12414,
- ["maichattawathai"]=3659,
- ["maiekthai"]=3656,
- ["maihanakatthai"]=3633,
- ["maitaikhuthai"]=3655,
- ["maithothai"]=3657,
- ["maitrithai"]=3658,
- ["maiyamokthai"]=3654,
- ["makatakana"]=12510,
- ["makatakanahalfwidth"]=65423,
- ["mansyonsquare"]=13127,
- ["maqafhebrew"]=1470,
- ["mars"]=9794,
- ["masoracirclehebrew"]=1455,
- ["masquare"]=13187,
- ["mbopomofo"]=12551,
- ["mbsquare"]=13268,
- ["mcircle"]=9436,
- ["mcubedsquare"]=13221,
- ["mdotaccent"]=7745,
- ["mdotbelow"]=7747,
- ["meemarabic"]=1605,
- ["meemfinalarabic"]=65250,
- ["meeminitialarabic"]=65251,
- ["meemmedialarabic"]=65252,
- ["meemmeeminitialarabic"]=64721,
- ["meemmeemisolatedarabic"]=64584,
- ["meetorusquare"]=13133,
- ["mehiragana"]=12417,
- ["meizierasquare"]=13182,
- ["mekatakana"]=12513,
- ["mekatakanahalfwidth"]=65426,
- ["memdageshhebrew"]=64318,
- ["memhebrew"]=1502,
- ["menarmenian"]=1396,
- ["merkhakefulalefthebrew"]=1446,
- ["merkhalefthebrew"]=1445,
- ["mhook"]=625,
- ["mhzsquare"]=13202,
- ["middledotkatakanahalfwidth"]=65381,
- ["mieumacirclekorean"]=12914,
- ["mieumaparenkorean"]=12818,
- ["mieumcirclekorean"]=12900,
- ["mieumkorean"]=12609,
- ["mieumpansioskorean"]=12656,
- ["mieumparenkorean"]=12804,
- ["mieumpieupkorean"]=12654,
- ["mieumsioskorean"]=12655,
- ["mihiragana"]=12415,
- ["mikatakana"]=12511,
- ["mikatakanahalfwidth"]=65424,
- ["minus"]=8722,
- ["minusbelowcmb"]=800,
- ["minuscircle"]=8854,
- ["minusmod"]=727,
- ["minusplus"]=8723,
- ["minute"]=8242,
- ["miribaarusquare"]=13130,
- ["mirisquare"]=13129,
- ["mlonglegturned"]=624,
- ["mlsquare"]=13206,
- ["mmcubedsquare"]=13219,
- ["mmonospace"]=65357,
- ["mmsquaredsquare"]=13215,
- ["mohiragana"]=12418,
- ["mohmsquare"]=13249,
- ["mokatakana"]=12514,
- ["mokatakanahalfwidth"]=65427,
- ["molsquare"]=13270,
- ["momathai"]=3617,
- ["moverssquare"]=13223,
- ["moverssquaredsquare"]=13224,
- ["mparen"]=9384,
- ["mpasquare"]=13227,
- ["mssquare"]=13235,
- ["mturned"]=623,
- ["mu1"]=181,
- ["muasquare"]=13186,
- ["muchgreater"]=8811,
- ["muchless"]=8810,
- ["mufsquare"]=13196,
- ["mugreek"]=956,
- ["mugsquare"]=13197,
- ["muhiragana"]=12416,
- ["mukatakana"]=12512,
- ["mukatakanahalfwidth"]=65425,
- ["mulsquare"]=13205,
- ["multiply"]=215,
- ["mumsquare"]=13211,
- ["munahlefthebrew"]=1443,
- ["musicalnote"]=9834,
- ["musicalnotedbl"]=9835,
- ["musicflatsign"]=9837,
- ["musicsharpsign"]=9839,
- ["mussquare"]=13234,
- ["muvsquare"]=13238,
- ["muwsquare"]=13244,
- ["mvmegasquare"]=13241,
- ["mvsquare"]=13239,
- ["mwmegasquare"]=13247,
- ["mwsquare"]=13245,
- ["n"]=110,
- ["nabengali"]=2472,
- ["nabla"]=8711,
- ["nacute"]=324,
- ["nadeva"]=2344,
- ["nagujarati"]=2728,
- ["nagurmukhi"]=2600,
- ["nahiragana"]=12394,
- ["nakatakana"]=12490,
- ["nakatakanahalfwidth"]=65413,
- ["nasquare"]=13185,
- ["nbopomofo"]=12555,
- ["ncaron"]=328,
- ["ncircle"]=9437,
- ["ncircumflexbelow"]=7755,
- ["ncommaaccent"]=326,
- ["ndotaccent"]=7749,
- ["ndotbelow"]=7751,
- ["nehiragana"]=12397,
- ["nekatakana"]=12493,
- ["nekatakanahalfwidth"]=65416,
- ["nfsquare"]=13195,
- ["ngabengali"]=2457,
- ["ngadeva"]=2329,
- ["ngagujarati"]=2713,
- ["ngagurmukhi"]=2585,
- ["ngonguthai"]=3591,
- ["nhiragana"]=12435,
- ["nhookleft"]=626,
- ["nhookretroflex"]=627,
- ["nieunacirclekorean"]=12911,
- ["nieunaparenkorean"]=12815,
- ["nieuncieuckorean"]=12597,
- ["nieuncirclekorean"]=12897,
- ["nieunhieuhkorean"]=12598,
- ["nieunkorean"]=12596,
- ["nieunpansioskorean"]=12648,
- ["nieunparenkorean"]=12801,
- ["nieunsioskorean"]=12647,
- ["nieuntikeutkorean"]=12646,
- ["nihiragana"]=12395,
- ["nikatakana"]=12491,
- ["nikatakanahalfwidth"]=65414,
- ["nikhahitthai"]=3661,
- ["nine"]=57,
- ["ninebengali"]=2543,
- ["ninecircle"]=9320,
- ["ninecircleinversesansserif"]=10130,
- ["ninedeva"]=2415,
- ["ninegujarati"]=2799,
- ["ninegurmukhi"]=2671,
- ["ninehackarabic"]=1641,
- ["ninehangzhou"]=12329,
- ["nineideographicparen"]=12840,
- ["nineinferior"]=8329,
- ["ninemonospace"]=65305,
- ["nineparen"]=9340,
- ["nineperiod"]=9360,
- ["ninepersian"]=1785,
- ["nineroman"]=8568,
- ["ninesuperior"]=8313,
- ["nineteencircle"]=9330,
- ["nineteenparen"]=9350,
- ["nineteenperiod"]=9370,
- ["ninethai"]=3673,
- ["nj"]=460,
- ["njecyrillic"]=1114,
- ["nkatakana"]=12531,
- ["nkatakanahalfwidth"]=65437,
- ["nlegrightlong"]=414,
- ["nlinebelow"]=7753,
- ["nmonospace"]=65358,
- ["nmsquare"]=13210,
- ["nnabengali"]=2467,
- ["nnadeva"]=2339,
- ["nnagujarati"]=2723,
- ["nnagurmukhi"]=2595,
- ["nnnadeva"]=2345,
- ["nohiragana"]=12398,
- ["nokatakana"]=12494,
- ["nokatakanahalfwidth"]=65417,
- ["nonbreakingspace"]=160,
- ["nonenthai"]=3603,
- ["nonuthai"]=3609,
- ["noonarabic"]=1606,
- ["noonfinalarabic"]=65254,
- ["noonghunnaarabic"]=1722,
- ["noonghunnafinalarabic"]=64415,
- ["nooninitialarabic"]=65255,
- ["noonjeeminitialarabic"]=64722,
- ["noonjeemisolatedarabic"]=64587,
- ["noonmedialarabic"]=65256,
- ["noonmeeminitialarabic"]=64725,
- ["noonmeemisolatedarabic"]=64590,
- ["noonnoonfinalarabic"]=64653,
- ["notcontains"]=8716,
- ["notelementof"]=8713,
- ["notequal"]=8800,
- ["notgreater"]=8815,
- ["notgreaternorequal"]=8817,
- ["notgreaternorless"]=8825,
- ["notidentical"]=8802,
- ["notless"]=8814,
- ["notlessnorequal"]=8816,
- ["notparallel"]=8742,
- ["notprecedes"]=8832,
- ["notsubset"]=8836,
- ["notsucceeds"]=8833,
- ["notsuperset"]=8837,
- ["nowarmenian"]=1398,
- ["nparen"]=9385,
- ["nssquare"]=13233,
- ["nsuperior"]=8319,
- ["ntilde"]=241,
- ["nu"]=957,
- ["nuhiragana"]=12396,
- ["nukatakana"]=12492,
- ["nukatakanahalfwidth"]=65415,
- ["nuktabengali"]=2492,
- ["nuktadeva"]=2364,
- ["nuktagujarati"]=2748,
- ["nuktagurmukhi"]=2620,
- ["numbersign"]=35,
- ["numbersignmonospace"]=65283,
- ["numbersignsmall"]=65119,
- ["numeralsigngreek"]=884,
- ["numeralsignlowergreek"]=885,
- ["numero"]=8470,
- ["nundageshhebrew"]=64320,
- ["nunhebrew"]=1504,
- ["nvsquare"]=13237,
- ["nwsquare"]=13243,
- ["nyabengali"]=2462,
- ["nyadeva"]=2334,
- ["nyagujarati"]=2718,
- ["nyagurmukhi"]=2590,
- ["o"]=111,
- ["oacute"]=243,
- ["oangthai"]=3629,
- ["obarred"]=629,
- ["obarredcyrillic"]=1257,
- ["obarreddieresiscyrillic"]=1259,
- ["obengali"]=2451,
- ["obopomofo"]=12571,
- ["obreve"]=335,
- ["ocandradeva"]=2321,
- ["ocandragujarati"]=2705,
- ["ocandravowelsigndeva"]=2377,
- ["ocandravowelsigngujarati"]=2761,
- ["ocaron"]=466,
- ["ocircle"]=9438,
- ["ocircumflex"]=244,
- ["ocircumflexacute"]=7889,
- ["ocircumflexdotbelow"]=7897,
- ["ocircumflexgrave"]=7891,
- ["ocircumflexhookabove"]=7893,
- ["ocircumflextilde"]=7895,
- ["ocyrillic"]=1086,
- ["odblgrave"]=525,
- ["odeva"]=2323,
- ["odieresis"]=246,
- ["odieresiscyrillic"]=1255,
- ["odotbelow"]=7885,
- ["oe"]=339,
- ["oekorean"]=12634,
- ["ogonek"]=731,
- ["ogonekcmb"]=808,
- ["ograve"]=242,
- ["ogujarati"]=2707,
- ["oharmenian"]=1413,
- ["ohiragana"]=12362,
- ["ohookabove"]=7887,
- ["ohorn"]=417,
- ["ohornacute"]=7899,
- ["ohorndotbelow"]=7907,
- ["ohorngrave"]=7901,
- ["ohornhookabove"]=7903,
- ["ohorntilde"]=7905,
- ["ohungarumlaut"]=337,
- ["oi"]=419,
- ["oinvertedbreve"]=527,
- ["okatakana"]=12458,
- ["okatakanahalfwidth"]=65397,
- ["okorean"]=12631,
- ["olehebrew"]=1451,
- ["omacron"]=333,
- ["omacronacute"]=7763,
- ["omacrongrave"]=7761,
- ["omdeva"]=2384,
- ["omega"]=969,
- ["omegacyrillic"]=1121,
- ["omegalatinclosed"]=631,
- ["omegaroundcyrillic"]=1147,
- ["omegatitlocyrillic"]=1149,
- ["omegatonos"]=974,
- ["omgujarati"]=2768,
- ["omicron"]=959,
- ["omicrontonos"]=972,
- ["omonospace"]=65359,
- ["one"]=49,
- ["onebengali"]=2535,
- ["onecircle"]=9312,
- ["onecircleinversesansserif"]=10122,
- ["onedeva"]=2407,
- ["onedotenleader"]=8228,
- ["oneeighth"]=8539,
- ["onegujarati"]=2791,
- ["onegurmukhi"]=2663,
- ["onehackarabic"]=1633,
- ["onehalf"]=189,
- ["onehangzhou"]=12321,
- ["oneideographicparen"]=12832,
- ["oneinferior"]=8321,
- ["onemonospace"]=65297,
- ["onenumeratorbengali"]=2548,
- ["oneparen"]=9332,
- ["oneperiod"]=9352,
- ["onepersian"]=1777,
- ["onequarter"]=188,
- ["oneroman"]=8560,
- ["onesuperior"]=185,
- ["onethai"]=3665,
- ["onethird"]=8531,
- ["oogonek"]=491,
- ["oogonekmacron"]=493,
- ["oogurmukhi"]=2579,
- ["oomatragurmukhi"]=2635,
- ["oopen"]=596,
- ["oparen"]=9386,
- ["option"]=8997,
- ["ordfeminine"]=170,
- ["ordmasculine"]=186,
- ["oshortdeva"]=2322,
- ["oshortvowelsigndeva"]=2378,
- ["oslash"]=248,
- ["osmallhiragana"]=12361,
- ["osmallkatakana"]=12457,
- ["osmallkatakanahalfwidth"]=65387,
- ["ostrokeacute"]=511,
- ["otcyrillic"]=1151,
- ["otilde"]=245,
- ["otildeacute"]=7757,
- ["otildedieresis"]=7759,
- ["oubopomofo"]=12577,
- ["overline"]=8254,
- ["overlinecenterline"]=65098,
- ["overlinecmb"]=773,
- ["overlinedashed"]=65097,
- ["overlinedblwavy"]=65100,
- ["overlinewavy"]=65099,
- ["ovowelsignbengali"]=2507,
- ["ovowelsigndeva"]=2379,
- ["ovowelsigngujarati"]=2763,
- ["p"]=112,
- ["paampssquare"]=13184,
- ["paasentosquare"]=13099,
- ["pabengali"]=2474,
- ["pacute"]=7765,
- ["padeva"]=2346,
- ["pagedown"]=8671,
- ["pageup"]=8670,
- ["pagujarati"]=2730,
- ["pagurmukhi"]=2602,
- ["pahiragana"]=12401,
- ["paiyannoithai"]=3631,
- ["pakatakana"]=12497,
- ["palatalizationcyrilliccmb"]=1156,
- ["palochkacyrillic"]=1216,
- ["pansioskorean"]=12671,
- ["paragraph"]=182,
- ["parallel"]=8741,
- ["parenleft"]=40,
- ["parenleftaltonearabic"]=64830,
- ["parenleftinferior"]=8333,
- ["parenleftmonospace"]=65288,
- ["parenleftsmall"]=65113,
- ["parenleftsuperior"]=8317,
- ["parenleftvertical"]=65077,
- ["parenright"]=41,
- ["parenrightaltonearabic"]=64831,
- ["parenrightinferior"]=8334,
- ["parenrightmonospace"]=65289,
- ["parenrightsmall"]=65114,
- ["parenrightsuperior"]=8318,
- ["parenrightvertical"]=65078,
- ["partialdiff"]=8706,
- ["paseqhebrew"]=1472,
- ["pashtahebrew"]=1433,
- ["pasquare"]=13225,
- ["patahwidehebrew"]=1463,
- ["pazerhebrew"]=1441,
- ["pbopomofo"]=12550,
- ["pcircle"]=9439,
- ["pdotaccent"]=7767,
- ["pecyrillic"]=1087,
- ["pedageshhebrew"]=64324,
- ["peezisquare"]=13115,
- ["pefinaldageshhebrew"]=64323,
- ["peharabic"]=1662,
- ["peharmenian"]=1402,
- ["pehebrew"]=1508,
- ["pehfinalarabic"]=64343,
- ["pehinitialarabic"]=64344,
- ["pehiragana"]=12410,
- ["pehmedialarabic"]=64345,
- ["pekatakana"]=12506,
- ["pemiddlehookcyrillic"]=1191,
- ["perafehebrew"]=64334,
- ["percent"]=37,
- ["percentarabic"]=1642,
- ["percentmonospace"]=65285,
- ["percentsmall"]=65130,
- ["period"]=46,
- ["periodarmenian"]=1417,
- ["periodcentered"]=183,
- ["periodhalfwidth"]=65377,
- ["periodmonospace"]=65294,
- ["periodsmall"]=65106,
- ["perispomenigreekcmb"]=834,
- ["perpendicular"]=8869,
- ["perthousand"]=8240,
- ["peseta"]=8359,
- ["pfsquare"]=13194,
- ["phabengali"]=2475,
- ["phadeva"]=2347,
- ["phagujarati"]=2731,
- ["phagurmukhi"]=2603,
- ["phi"]=966,
- ["phieuphacirclekorean"]=12922,
- ["phieuphaparenkorean"]=12826,
- ["phieuphcirclekorean"]=12908,
- ["phieuphkorean"]=12621,
- ["phieuphparenkorean"]=12812,
- ["philatin"]=632,
- ["phinthuthai"]=3642,
- ["phisymbolgreek"]=981,
- ["phook"]=421,
- ["phophanthai"]=3614,
- ["phophungthai"]=3612,
- ["phosamphaothai"]=3616,
- ["pi"]=960,
- ["pieupacirclekorean"]=12915,
- ["pieupaparenkorean"]=12819,
- ["pieupcieuckorean"]=12662,
- ["pieupcirclekorean"]=12901,
- ["pieupkiyeokkorean"]=12658,
- ["pieupkorean"]=12610,
- ["pieupparenkorean"]=12805,
- ["pieupsioskiyeokkorean"]=12660,
- ["pieupsioskorean"]=12612,
- ["pieupsiostikeutkorean"]=12661,
- ["pieupthieuthkorean"]=12663,
- ["pieuptikeutkorean"]=12659,
- ["pihiragana"]=12404,
- ["pikatakana"]=12500,
- ["pisymbolgreek"]=982,
- ["piwrarmenian"]=1411,
- ["plus"]=43,
- ["plusbelowcmb"]=799,
- ["pluscircle"]=8853,
- ["plusminus"]=177,
- ["plusmod"]=726,
- ["plusmonospace"]=65291,
- ["plussmall"]=65122,
- ["plussuperior"]=8314,
- ["pmonospace"]=65360,
- ["pmsquare"]=13272,
- ["pohiragana"]=12413,
- ["pointingindexdownwhite"]=9759,
- ["pointingindexleftwhite"]=9756,
- ["pointingindexrightwhite"]=9758,
- ["pointingindexupwhite"]=9757,
- ["pokatakana"]=12509,
- ["poplathai"]=3611,
- ["postalmark"]=12306,
- ["postalmarkface"]=12320,
- ["pparen"]=9387,
- ["precedes"]=8826,
- ["prescription"]=8478,
- ["primemod"]=697,
- ["primereversed"]=8245,
- ["product"]=8719,
- ["projective"]=8965,
- ["prolongedkana"]=12540,
- ["propellor"]=8984,
- ["proportion"]=8759,
- ["proportional"]=8733,
- ["psi"]=968,
- ["psicyrillic"]=1137,
- ["psilipneumatacyrilliccmb"]=1158,
- ["pssquare"]=13232,
- ["puhiragana"]=12407,
- ["pukatakana"]=12503,
- ["pvsquare"]=13236,
- ["pwsquare"]=13242,
- ["q"]=113,
- ["qadeva"]=2392,
- ["qadmahebrew"]=1448,
- ["qafarabic"]=1602,
- ["qaffinalarabic"]=65238,
- ["qafinitialarabic"]=65239,
- ["qafmedialarabic"]=65240,
- ["qamatswidehebrew"]=1464,
- ["qarneyparahebrew"]=1439,
- ["qbopomofo"]=12561,
- ["qcircle"]=9440,
- ["qhook"]=672,
- ["qmonospace"]=65361,
- ["qofdageshhebrew"]=64327,
- ["qoftserehebrew"]=1511,
- ["qparen"]=9388,
- ["quarternote"]=9833,
- ["qubutswidehebrew"]=1467,
- ["question"]=63,
- ["questionarabic"]=1567,
- ["questionarmenian"]=1374,
- ["questiondown"]=191,
- ["questiongreek"]=894,
- ["questionmonospace"]=65311,
- ["quotedbl"]=34,
- ["quotedblbase"]=8222,
- ["quotedblleft"]=8220,
- ["quotedblmonospace"]=65282,
- ["quotedblprime"]=12318,
- ["quotedblprimereversed"]=12317,
- ["quotedblright"]=8221,
- ["quoteleft"]=8216,
- ["quotereversed"]=8219,
- ["quoteright"]=8217,
- ["quoterightn"]=329,
- ["quotesinglbase"]=8218,
- ["quotesingle"]=39,
- ["quotesinglemonospace"]=65287,
- ["r"]=114,
- ["raarmenian"]=1404,
- ["rabengali"]=2480,
- ["racute"]=341,
- ["radeva"]=2352,
- ["radical"]=8730,
- ["radoverssquare"]=13230,
- ["radoverssquaredsquare"]=13231,
- ["radsquare"]=13229,
- ["rafehebrew"]=1471,
- ["ragujarati"]=2736,
- ["ragurmukhi"]=2608,
- ["rahiragana"]=12425,
- ["rakatakana"]=12521,
- ["rakatakanahalfwidth"]=65431,
- ["ralowerdiagonalbengali"]=2545,
- ["ramiddlediagonalbengali"]=2544,
- ["ramshorn"]=612,
- ["ratio"]=8758,
- ["rbopomofo"]=12566,
- ["rcaron"]=345,
- ["rcircle"]=9441,
- ["rcommaaccent"]=343,
- ["rdblgrave"]=529,
- ["rdotaccent"]=7769,
- ["rdotbelow"]=7771,
- ["rdotbelowmacron"]=7773,
- ["referencemark"]=8251,
- ["registered"]=174,
- ["reharmenian"]=1408,
- ["rehfinalarabic"]=65198,
- ["rehiragana"]=12428,
- ["rehyehaleflamarabic"]=1585,
- ["rekatakana"]=12524,
- ["rekatakanahalfwidth"]=65434,
- ["reshdageshhebrew"]=64328,
- ["reshtserehebrew"]=1512,
- ["reversedtilde"]=8765,
- ["reviamugrashhebrew"]=1431,
- ["revlogicalnot"]=8976,
- ["rfishhook"]=638,
- ["rfishhookreversed"]=639,
- ["rhabengali"]=2525,
- ["rhadeva"]=2397,
- ["rho"]=961,
- ["rhook"]=637,
- ["rhookturned"]=635,
- ["rhookturnedsuperior"]=693,
- ["rhosymbolgreek"]=1009,
- ["rhotichookmod"]=734,
- ["rieulacirclekorean"]=12913,
- ["rieulaparenkorean"]=12817,
- ["rieulcirclekorean"]=12899,
- ["rieulhieuhkorean"]=12608,
- ["rieulkiyeokkorean"]=12602,
- ["rieulkiyeoksioskorean"]=12649,
- ["rieulkorean"]=12601,
- ["rieulmieumkorean"]=12603,
- ["rieulpansioskorean"]=12652,
- ["rieulparenkorean"]=12803,
- ["rieulphieuphkorean"]=12607,
- ["rieulpieupkorean"]=12604,
- ["rieulpieupsioskorean"]=12651,
- ["rieulsioskorean"]=12605,
- ["rieulthieuthkorean"]=12606,
- ["rieultikeutkorean"]=12650,
- ["rieulyeorinhieuhkorean"]=12653,
- ["rightangle"]=8735,
- ["righttackbelowcmb"]=793,
- ["righttriangle"]=8895,
- ["rihiragana"]=12426,
- ["rikatakana"]=12522,
- ["rikatakanahalfwidth"]=65432,
- ["ring"]=730,
- ["ringbelowcmb"]=805,
- ["ringcmb"]=778,
- ["ringhalfleft"]=703,
- ["ringhalfleftarmenian"]=1369,
- ["ringhalfleftbelowcmb"]=796,
- ["ringhalfleftcentered"]=723,
- ["ringhalfright"]=702,
- ["ringhalfrightbelowcmb"]=825,
- ["ringhalfrightcentered"]=722,
- ["rinvertedbreve"]=531,
- ["rittorusquare"]=13137,
- ["rlinebelow"]=7775,
- ["rlongleg"]=636,
- ["rlonglegturned"]=634,
- ["rmonospace"]=65362,
- ["rohiragana"]=12429,
- ["rokatakana"]=12525,
- ["rokatakanahalfwidth"]=65435,
- ["roruathai"]=3619,
- ["rparen"]=9389,
- ["rrabengali"]=2524,
- ["rradeva"]=2353,
- ["rragurmukhi"]=2652,
- ["rreharabic"]=1681,
- ["rrehfinalarabic"]=64397,
- ["rrvocalicbengali"]=2528,
- ["rrvocalicdeva"]=2400,
- ["rrvocalicgujarati"]=2784,
- ["rrvocalicvowelsignbengali"]=2500,
- ["rrvocalicvowelsigndeva"]=2372,
- ["rrvocalicvowelsigngujarati"]=2756,
- ["rtblock"]=9616,
- ["rturned"]=633,
- ["rturnedsuperior"]=692,
- ["ruhiragana"]=12427,
- ["rukatakana"]=12523,
- ["rukatakanahalfwidth"]=65433,
- ["rupeemarkbengali"]=2546,
- ["rupeesignbengali"]=2547,
- ["ruthai"]=3620,
- ["rvocalicbengali"]=2443,
- ["rvocalicdeva"]=2315,
- ["rvocalicgujarati"]=2699,
- ["rvocalicvowelsignbengali"]=2499,
- ["rvocalicvowelsigndeva"]=2371,
- ["rvocalicvowelsigngujarati"]=2755,
- ["s"]=115,
- ["sabengali"]=2488,
- ["sacute"]=347,
- ["sacutedotaccent"]=7781,
- ["sadarabic"]=1589,
- ["sadeva"]=2360,
- ["sadfinalarabic"]=65210,
- ["sadinitialarabic"]=65211,
- ["sadmedialarabic"]=65212,
- ["sagujarati"]=2744,
- ["sagurmukhi"]=2616,
- ["sahiragana"]=12373,
- ["sakatakana"]=12469,
- ["sakatakanahalfwidth"]=65403,
- ["sallallahoualayhewasallamarabic"]=65018,
- ["samekhdageshhebrew"]=64321,
- ["samekhhebrew"]=1505,
- ["saraaathai"]=3634,
- ["saraaethai"]=3649,
- ["saraaimaimalaithai"]=3652,
- ["saraaimaimuanthai"]=3651,
- ["saraamthai"]=3635,
- ["saraathai"]=3632,
- ["saraethai"]=3648,
- ["saraiithai"]=3637,
- ["saraithai"]=3636,
- ["saraothai"]=3650,
- ["saraueethai"]=3639,
- ["sarauethai"]=3638,
- ["sarauthai"]=3640,
- ["sarauuthai"]=3641,
- ["sbopomofo"]=12569,
- ["scaron"]=353,
- ["scarondotaccent"]=7783,
- ["scedilla"]=351,
- ["schwa"]=601,
- ["schwacyrillic"]=1241,
- ["schwadieresiscyrillic"]=1243,
- ["schwahook"]=602,
- ["scircle"]=9442,
- ["scircumflex"]=349,
- ["scommaaccent"]=537,
- ["sdotaccent"]=7777,
- ["sdotbelow"]=7779,
- ["sdotbelowdotaccent"]=7785,
- ["seagullbelowcmb"]=828,
- ["second"]=8243,
- ["secondtonechinese"]=714,
- ["section"]=167,
- ["seenarabic"]=1587,
- ["seenfinalarabic"]=65202,
- ["seeninitialarabic"]=65203,
- ["seenmedialarabic"]=65204,
- ["segoltahebrew"]=1426,
- ["segolwidehebrew"]=1462,
- ["seharmenian"]=1405,
- ["sehiragana"]=12379,
- ["sekatakana"]=12475,
- ["sekatakanahalfwidth"]=65406,
- ["semicolon"]=59,
- ["semicolonarabic"]=1563,
- ["semicolonmonospace"]=65307,
- ["semicolonsmall"]=65108,
- ["semivoicedmarkkana"]=12444,
- ["semivoicedmarkkanahalfwidth"]=65439,
- ["sentisquare"]=13090,
- ["sentosquare"]=13091,
- ["seven"]=55,
- ["sevenbengali"]=2541,
- ["sevencircle"]=9318,
- ["sevencircleinversesansserif"]=10128,
- ["sevendeva"]=2413,
- ["seveneighths"]=8542,
- ["sevengujarati"]=2797,
- ["sevengurmukhi"]=2669,
- ["sevenhackarabic"]=1639,
- ["sevenhangzhou"]=12327,
- ["sevenideographicparen"]=12838,
- ["seveninferior"]=8327,
- ["sevenmonospace"]=65303,
- ["sevenparen"]=9338,
- ["sevenperiod"]=9358,
- ["sevenpersian"]=1783,
- ["sevenroman"]=8566,
- ["sevensuperior"]=8311,
- ["seventeencircle"]=9328,
- ["seventeenparen"]=9348,
- ["seventeenperiod"]=9368,
- ["seventhai"]=3671,
- ["shaarmenian"]=1399,
- ["shabengali"]=2486,
- ["shacyrillic"]=1096,
- ["shaddadammaarabic"]=64609,
- ["shaddadammatanarabic"]=64606,
- ["shaddafathaarabic"]=64608,
- ["shaddafathatanarabic"]=1617,
- ["shaddakasraarabic"]=64610,
- ["shaddakasratanarabic"]=64607,
- ["shadedark"]=9619,
- ["shadelight"]=9617,
- ["shademedium"]=9618,
- ["shadeva"]=2358,
- ["shagujarati"]=2742,
- ["shagurmukhi"]=2614,
- ["shalshelethebrew"]=1427,
- ["shbopomofo"]=12565,
- ["shchacyrillic"]=1097,
- ["sheenarabic"]=1588,
- ["sheenfinalarabic"]=65206,
- ["sheeninitialarabic"]=65207,
- ["sheenmedialarabic"]=65208,
- ["sheicoptic"]=995,
- ["sheqelhebrew"]=8362,
- ["shevawidehebrew"]=1456,
- ["shhacyrillic"]=1211,
- ["shimacoptic"]=1005,
- ["shindageshhebrew"]=64329,
- ["shindageshshindothebrew"]=64300,
- ["shindageshsindothebrew"]=64301,
- ["shindothebrew"]=1473,
- ["shinhebrew"]=1513,
- ["shinshindothebrew"]=64298,
- ["shinsindothebrew"]=64299,
- ["shook"]=642,
- ["sigma"]=963,
- ["sigmafinal"]=962,
- ["sigmalunatesymbolgreek"]=1010,
- ["sihiragana"]=12375,
- ["sikatakana"]=12471,
- ["sikatakanahalfwidth"]=65404,
- ["siluqlefthebrew"]=1469,
- ["sindothebrew"]=1474,
- ["siosacirclekorean"]=12916,
- ["siosaparenkorean"]=12820,
- ["sioscieuckorean"]=12670,
- ["sioscirclekorean"]=12902,
- ["sioskiyeokkorean"]=12666,
- ["sioskorean"]=12613,
- ["siosnieunkorean"]=12667,
- ["siosparenkorean"]=12806,
- ["siospieupkorean"]=12669,
- ["siostikeutkorean"]=12668,
- ["six"]=54,
- ["sixbengali"]=2540,
- ["sixcircle"]=9317,
- ["sixcircleinversesansserif"]=10127,
- ["sixdeva"]=2412,
- ["sixgujarati"]=2796,
- ["sixgurmukhi"]=2668,
- ["sixhackarabic"]=1638,
- ["sixhangzhou"]=12326,
- ["sixideographicparen"]=12837,
- ["sixinferior"]=8326,
- ["sixmonospace"]=65302,
- ["sixparen"]=9337,
- ["sixperiod"]=9357,
- ["sixpersian"]=1782,
- ["sixroman"]=8565,
- ["sixsuperior"]=8310,
- ["sixteencircle"]=9327,
- ["sixteencurrencydenominatorbengali"]=2553,
- ["sixteenparen"]=9347,
- ["sixteenperiod"]=9367,
- ["sixthai"]=3670,
- ["slash"]=47,
- ["slashmonospace"]=65295,
- ["slong"]=383,
- ["slongdotaccent"]=7835,
- ["smonospace"]=65363,
- ["sofpasuqhebrew"]=1475,
- ["softhyphen"]=173,
- ["softsigncyrillic"]=1100,
- ["sohiragana"]=12381,
- ["sokatakana"]=12477,
- ["sokatakanahalfwidth"]=65407,
- ["soliduslongoverlaycmb"]=824,
- ["solidusshortoverlaycmb"]=823,
- ["sorusithai"]=3625,
- ["sosalathai"]=3624,
- ["sosothai"]=3595,
- ["sosuathai"]=3626,
- ["space"]=32,
- ["spadesuitblack"]=9824,
- ["spadesuitwhite"]=9828,
- ["sparen"]=9390,
- ["squarebelowcmb"]=827,
- ["squarecc"]=13252,
- ["squarecm"]=13213,
- ["squarediagonalcrosshatchfill"]=9641,
- ["squarehorizontalfill"]=9636,
- ["squarekg"]=13199,
- ["squarekm"]=13214,
- ["squarekmcapital"]=13262,
- ["squareln"]=13265,
- ["squarelog"]=13266,
- ["squaremg"]=13198,
- ["squaremil"]=13269,
- ["squaremm"]=13212,
- ["squaremsquared"]=13217,
- ["squareorthogonalcrosshatchfill"]=9638,
- ["squareupperlefttolowerrightfill"]=9639,
- ["squareupperrighttolowerleftfill"]=9640,
- ["squareverticalfill"]=9637,
- ["squarewhitewithsmallblack"]=9635,
- ["srsquare"]=13275,
- ["ssabengali"]=2487,
- ["ssadeva"]=2359,
- ["ssagujarati"]=2743,
- ["ssangcieuckorean"]=12617,
- ["ssanghieuhkorean"]=12677,
- ["ssangieungkorean"]=12672,
- ["ssangkiyeokkorean"]=12594,
- ["ssangnieunkorean"]=12645,
- ["ssangpieupkorean"]=12611,
- ["ssangsioskorean"]=12614,
- ["ssangtikeutkorean"]=12600,
- ["sterling"]=163,
- ["sterlingmonospace"]=65505,
- ["strokelongoverlaycmb"]=822,
- ["strokeshortoverlaycmb"]=821,
- ["subset"]=8834,
- ["subsetnotequal"]=8842,
- ["subsetorequal"]=8838,
- ["succeeds"]=8827,
- ["suchthat"]=8715,
- ["suhiragana"]=12377,
- ["sukatakana"]=12473,
- ["sukatakanahalfwidth"]=65405,
- ["sukunarabic"]=1618,
- ["summation"]=8721,
- ["sun"]=9788,
- ["superset"]=8835,
- ["supersetnotequal"]=8843,
- ["supersetorequal"]=8839,
- ["svsquare"]=13276,
- ["syouwaerasquare"]=13180,
- ["t"]=116,
- ["tabengali"]=2468,
- ["tackdown"]=8868,
- ["tackleft"]=8867,
- ["tadeva"]=2340,
- ["tagujarati"]=2724,
- ["tagurmukhi"]=2596,
- ["taharabic"]=1591,
- ["tahfinalarabic"]=65218,
- ["tahinitialarabic"]=65219,
- ["tahiragana"]=12383,
- ["tahmedialarabic"]=65220,
- ["taisyouerasquare"]=13181,
- ["takatakana"]=12479,
- ["takatakanahalfwidth"]=65408,
- ["tatweelarabic"]=1600,
- ["tau"]=964,
- ["tavdageshhebrew"]=64330,
- ["tavhebrew"]=1514,
- ["tbar"]=359,
- ["tbopomofo"]=12554,
- ["tcaron"]=357,
- ["tccurl"]=680,
- ["tcheharabic"]=1670,
- ["tchehfinalarabic"]=64379,
- ["tchehmedialarabic"]=64381,
- ["tchehmeeminitialarabic"]=64380,
- ["tcircle"]=9443,
- ["tcircumflexbelow"]=7793,
- ["tcommaaccent"]=355,
- ["tdieresis"]=7831,
- ["tdotaccent"]=7787,
- ["tdotbelow"]=7789,
- ["tecyrillic"]=1090,
- ["tedescendercyrillic"]=1197,
- ["teharabic"]=1578,
- ["tehfinalarabic"]=65174,
- ["tehhahinitialarabic"]=64674,
- ["tehhahisolatedarabic"]=64524,
- ["tehinitialarabic"]=65175,
- ["tehiragana"]=12390,
- ["tehjeeminitialarabic"]=64673,
- ["tehjeemisolatedarabic"]=64523,
- ["tehmarbutaarabic"]=1577,
- ["tehmarbutafinalarabic"]=65172,
- ["tehmedialarabic"]=65176,
- ["tehmeeminitialarabic"]=64676,
- ["tehmeemisolatedarabic"]=64526,
- ["tehnoonfinalarabic"]=64627,
- ["tekatakana"]=12486,
- ["tekatakanahalfwidth"]=65411,
- ["telephone"]=8481,
- ["telephoneblack"]=9742,
- ["telishagedolahebrew"]=1440,
- ["telishaqetanahebrew"]=1449,
- ["tencircle"]=9321,
- ["tenideographicparen"]=12841,
- ["tenparen"]=9341,
- ["tenperiod"]=9361,
- ["tenroman"]=8569,
- ["tesh"]=679,
- ["tetdageshhebrew"]=64312,
- ["tethebrew"]=1496,
- ["tetsecyrillic"]=1205,
- ["tevirlefthebrew"]=1435,
- ["thabengali"]=2469,
- ["thadeva"]=2341,
- ["thagujarati"]=2725,
- ["thagurmukhi"]=2597,
- ["thalarabic"]=1584,
- ["thalfinalarabic"]=65196,
- ["thanthakhatthai"]=3660,
- ["theharabic"]=1579,
- ["thehfinalarabic"]=65178,
- ["thehinitialarabic"]=65179,
- ["thehmedialarabic"]=65180,
- ["thereexists"]=8707,
- ["therefore"]=8756,
- ["theta"]=952,
- ["thetasymbolgreek"]=977,
- ["thieuthacirclekorean"]=12921,
- ["thieuthaparenkorean"]=12825,
- ["thieuthcirclekorean"]=12907,
- ["thieuthkorean"]=12620,
- ["thieuthparenkorean"]=12811,
- ["thirteencircle"]=9324,
- ["thirteenparen"]=9344,
- ["thirteenperiod"]=9364,
- ["thonangmonthothai"]=3601,
- ["thook"]=429,
- ["thophuthaothai"]=3602,
- ["thorn"]=254,
- ["thothahanthai"]=3607,
- ["thothanthai"]=3600,
- ["thothongthai"]=3608,
- ["thothungthai"]=3606,
- ["thousandcyrillic"]=1154,
- ["thousandsseparatorpersian"]=1644,
- ["three"]=51,
- ["threebengali"]=2537,
- ["threecircle"]=9314,
- ["threecircleinversesansserif"]=10124,
- ["threedeva"]=2409,
- ["threeeighths"]=8540,
- ["threegujarati"]=2793,
- ["threegurmukhi"]=2665,
- ["threehackarabic"]=1635,
- ["threehangzhou"]=12323,
- ["threeideographicparen"]=12834,
- ["threeinferior"]=8323,
- ["threemonospace"]=65299,
- ["threenumeratorbengali"]=2550,
- ["threeparen"]=9334,
- ["threeperiod"]=9354,
- ["threepersian"]=1779,
- ["threequarters"]=190,
- ["threeroman"]=8562,
- ["threesuperior"]=179,
- ["threethai"]=3667,
- ["thzsquare"]=13204,
- ["tihiragana"]=12385,
- ["tikatakana"]=12481,
- ["tikatakanahalfwidth"]=65409,
- ["tikeutacirclekorean"]=12912,
- ["tikeutaparenkorean"]=12816,
- ["tikeutcirclekorean"]=12898,
- ["tikeutkorean"]=12599,
- ["tikeutparenkorean"]=12802,
- ["tilde"]=732,
- ["tildebelowcmb"]=816,
- ["tildecomb"]=771,
- ["tildedoublecmb"]=864,
- ["tildeoperator"]=8764,
- ["tildeoverlaycmb"]=820,
- ["tildeverticalcmb"]=830,
- ["timescircle"]=8855,
- ["tipehalefthebrew"]=1430,
- ["tippigurmukhi"]=2672,
- ["titlocyrilliccmb"]=1155,
- ["tiwnarmenian"]=1407,
- ["tlinebelow"]=7791,
- ["tmonospace"]=65364,
- ["toarmenian"]=1385,
- ["tohiragana"]=12392,
- ["tokatakana"]=12488,
- ["tokatakanahalfwidth"]=65412,
- ["tonebarextrahighmod"]=741,
- ["tonebarextralowmod"]=745,
- ["tonebarhighmod"]=742,
- ["tonebarlowmod"]=744,
- ["tonebarmidmod"]=743,
- ["tonefive"]=445,
- ["tonesix"]=389,
- ["tonetwo"]=424,
- ["tonos"]=900,
- ["tonsquare"]=13095,
- ["topatakthai"]=3599,
- ["tortoiseshellbracketleft"]=12308,
- ["tortoiseshellbracketleftsmall"]=65117,
- ["tortoiseshellbracketleftvertical"]=65081,
- ["tortoiseshellbracketright"]=12309,
- ["tortoiseshellbracketrightsmall"]=65118,
- ["tortoiseshellbracketrightvertical"]=65082,
- ["totaothai"]=3605,
- ["tpalatalhook"]=427,
- ["tparen"]=9391,
- ["trademark"]=8482,
- ["tretroflexhook"]=648,
- ["triagdn"]=9660,
- ["triaglf"]=9668,
- ["triagrt"]=9658,
- ["triagup"]=9650,
- ["ts"]=678,
- ["tsadidageshhebrew"]=64326,
- ["tsadihebrew"]=1510,
- ["tsecyrillic"]=1094,
- ["tserewidehebrew"]=1461,
- ["tshecyrillic"]=1115,
- ["ttabengali"]=2463,
- ["ttadeva"]=2335,
- ["ttagujarati"]=2719,
- ["ttagurmukhi"]=2591,
- ["tteharabic"]=1657,
- ["ttehfinalarabic"]=64359,
- ["ttehinitialarabic"]=64360,
- ["ttehmedialarabic"]=64361,
- ["tthabengali"]=2464,
- ["tthadeva"]=2336,
- ["tthagujarati"]=2720,
- ["tthagurmukhi"]=2592,
- ["tturned"]=647,
- ["tuhiragana"]=12388,
- ["tukatakana"]=12484,
- ["tukatakanahalfwidth"]=65410,
- ["tusmallhiragana"]=12387,
- ["tusmallkatakana"]=12483,
- ["tusmallkatakanahalfwidth"]=65391,
- ["twelvecircle"]=9323,
- ["twelveparen"]=9343,
- ["twelveperiod"]=9363,
- ["twelveroman"]=8571,
- ["twentycircle"]=9331,
- ["twentyparen"]=9351,
- ["twentyperiod"]=9371,
- ["two"]=50,
- ["twobengali"]=2536,
- ["twocircle"]=9313,
- ["twocircleinversesansserif"]=10123,
- ["twodeva"]=2408,
- ["twodotleader"]=8229,
- ["twodotleadervertical"]=65072,
- ["twogujarati"]=2792,
- ["twogurmukhi"]=2664,
- ["twohackarabic"]=1634,
- ["twohangzhou"]=12322,
- ["twoideographicparen"]=12833,
- ["twoinferior"]=8322,
- ["twomonospace"]=65298,
- ["twonumeratorbengali"]=2549,
- ["twoparen"]=9333,
- ["twoperiod"]=9353,
- ["twopersian"]=1778,
- ["tworoman"]=8561,
- ["twostroke"]=443,
- ["twosuperior"]=178,
- ["twothai"]=3666,
- ["twothirds"]=8532,
- ["u"]=117,
- ["uacute"]=250,
- ["ubar"]=649,
- ["ubengali"]=2441,
- ["ubopomofo"]=12584,
- ["ubreve"]=365,
- ["ucaron"]=468,
- ["ucircle"]=9444,
- ["ucircumflex"]=251,
- ["ucircumflexbelow"]=7799,
- ["ucyrillic"]=1091,
- ["udattadeva"]=2385,
- ["udblgrave"]=533,
- ["udeva"]=2313,
- ["udieresis"]=252,
- ["udieresisacute"]=472,
- ["udieresisbelow"]=7795,
- ["udieresiscaron"]=474,
- ["udieresiscyrillic"]=1265,
- ["udieresisgrave"]=476,
- ["udieresismacron"]=470,
- ["udotbelow"]=7909,
- ["ugrave"]=249,
- ["ugujarati"]=2697,
- ["ugurmukhi"]=2569,
- ["uhiragana"]=12358,
- ["uhookabove"]=7911,
- ["uhorn"]=432,
- ["uhornacute"]=7913,
- ["uhorndotbelow"]=7921,
- ["uhorngrave"]=7915,
- ["uhornhookabove"]=7917,
- ["uhorntilde"]=7919,
- ["uhungarumlaut"]=369,
- ["uhungarumlautcyrillic"]=1267,
- ["uinvertedbreve"]=535,
- ["ukatakana"]=12454,
- ["ukatakanahalfwidth"]=65395,
- ["ukcyrillic"]=1145,
- ["ukorean"]=12636,
- ["umacron"]=363,
- ["umacroncyrillic"]=1263,
- ["umacrondieresis"]=7803,
- ["umatragurmukhi"]=2625,
- ["umonospace"]=65365,
- ["underscore"]=95,
- ["underscoredbl"]=8215,
- ["underscoremonospace"]=65343,
- ["underscorevertical"]=65075,
- ["underscorewavy"]=65103,
- ["union"]=8746,
- ["universal"]=8704,
- ["uogonek"]=371,
- ["uparen"]=9392,
- ["upblock"]=9600,
- ["upperdothebrew"]=1476,
- ["upsilon"]=965,
- ["upsilondieresis"]=971,
- ["upsilondieresistonos"]=944,
- ["upsilonlatin"]=650,
- ["upsilontonos"]=973,
- ["uptackbelowcmb"]=797,
- ["uptackmod"]=724,
- ["uragurmukhi"]=2675,
- ["uring"]=367,
- ["ushortcyrillic"]=1118,
- ["usmallhiragana"]=12357,
- ["usmallkatakana"]=12453,
- ["usmallkatakanahalfwidth"]=65385,
- ["ustraightcyrillic"]=1199,
- ["ustraightstrokecyrillic"]=1201,
- ["utilde"]=361,
- ["utildeacute"]=7801,
- ["utildebelow"]=7797,
- ["uubengali"]=2442,
- ["uudeva"]=2314,
- ["uugujarati"]=2698,
- ["uugurmukhi"]=2570,
- ["uumatragurmukhi"]=2626,
- ["uuvowelsignbengali"]=2498,
- ["uuvowelsigndeva"]=2370,
- ["uuvowelsigngujarati"]=2754,
- ["uvowelsignbengali"]=2497,
- ["uvowelsigndeva"]=2369,
- ["uvowelsigngujarati"]=2753,
- ["v"]=118,
- ["vadeva"]=2357,
- ["vagujarati"]=2741,
- ["vagurmukhi"]=2613,
- ["vakatakana"]=12535,
- ["vavdageshhebrew"]=64309,
- ["vavhebrew"]=1493,
- ["vavholamhebrew"]=64331,
- ["vavvavhebrew"]=1520,
- ["vavyodhebrew"]=1521,
- ["vcircle"]=9445,
- ["vdotbelow"]=7807,
- ["vecyrillic"]=1074,
- ["veharabic"]=1700,
- ["vehfinalarabic"]=64363,
- ["vehinitialarabic"]=64364,
- ["vehmedialarabic"]=64365,
- ["vekatakana"]=12537,
- ["venus"]=9792,
- ["verticalbar"]=124,
- ["verticallineabovecmb"]=781,
- ["verticallinebelowcmb"]=809,
- ["verticallinelowmod"]=716,
- ["verticallinemod"]=712,
- ["vewarmenian"]=1406,
- ["vhook"]=651,
- ["vikatakana"]=12536,
- ["viramabengali"]=2509,
- ["viramadeva"]=2381,
- ["viramagujarati"]=2765,
- ["visargabengali"]=2435,
- ["visargadeva"]=2307,
- ["visargagujarati"]=2691,
- ["vmonospace"]=65366,
- ["voarmenian"]=1400,
- ["voicediterationhiragana"]=12446,
- ["voicediterationkatakana"]=12542,
- ["voicedmarkkana"]=12443,
- ["voicedmarkkanahalfwidth"]=65438,
- ["vokatakana"]=12538,
- ["vparen"]=9393,
- ["vtilde"]=7805,
- ["vturned"]=652,
- ["vuhiragana"]=12436,
- ["vukatakana"]=12532,
- ["w"]=119,
- ["wacute"]=7811,
- ["waekorean"]=12633,
- ["wahiragana"]=12431,
- ["wakatakana"]=12527,
- ["wakatakanahalfwidth"]=65436,
- ["wakorean"]=12632,
- ["wasmallhiragana"]=12430,
- ["wasmallkatakana"]=12526,
- ["wattosquare"]=13143,
- ["wavedash"]=12316,
- ["wavyunderscorevertical"]=65076,
- ["wawarabic"]=1608,
- ["wawfinalarabic"]=65262,
- ["wawhamzaabovearabic"]=1572,
- ["wawhamzaabovefinalarabic"]=65158,
- ["wbsquare"]=13277,
- ["wcircle"]=9446,
- ["wcircumflex"]=373,
- ["wdieresis"]=7813,
- ["wdotaccent"]=7815,
- ["wdotbelow"]=7817,
- ["wehiragana"]=12433,
- ["weierstrass"]=8472,
- ["wekatakana"]=12529,
- ["wekorean"]=12638,
- ["weokorean"]=12637,
- ["wgrave"]=7809,
- ["whitebullet"]=9702,
- ["whitecircle"]=9675,
- ["whitecircleinverse"]=9689,
- ["whitecornerbracketleft"]=12302,
- ["whitecornerbracketleftvertical"]=65091,
- ["whitecornerbracketright"]=12303,
- ["whitecornerbracketrightvertical"]=65092,
- ["whitediamond"]=9671,
- ["whitediamondcontainingblacksmalldiamond"]=9672,
- ["whitedownpointingsmalltriangle"]=9663,
- ["whitedownpointingtriangle"]=9661,
- ["whiteleftpointingsmalltriangle"]=9667,
- ["whiteleftpointingtriangle"]=9665,
- ["whitelenticularbracketleft"]=12310,
- ["whitelenticularbracketright"]=12311,
- ["whiterightpointingsmalltriangle"]=9657,
- ["whiterightpointingtriangle"]=9655,
- ["whitesmallsquare"]=9643,
- ["whitesmilingface"]=9786,
- ["whitesquare"]=9633,
- ["whitestar"]=9734,
- ["whitetelephone"]=9743,
- ["whitetortoiseshellbracketleft"]=12312,
- ["whitetortoiseshellbracketright"]=12313,
- ["whiteuppointingsmalltriangle"]=9653,
- ["whiteuppointingtriangle"]=9651,
- ["wihiragana"]=12432,
- ["wikatakana"]=12528,
- ["wikorean"]=12639,
- ["wmonospace"]=65367,
- ["wohiragana"]=12434,
- ["wokatakana"]=12530,
- ["wokatakanahalfwidth"]=65382,
- ["won"]=8361,
- ["wonmonospace"]=65510,
- ["wowaenthai"]=3623,
- ["wparen"]=9394,
- ["wring"]=7832,
- ["wsuperior"]=695,
- ["wturned"]=653,
- ["wynn"]=447,
- ["x"]=120,
- ["xabovecmb"]=829,
- ["xbopomofo"]=12562,
- ["xcircle"]=9447,
- ["xdieresis"]=7821,
- ["xdotaccent"]=7819,
- ["xeharmenian"]=1389,
- ["xi"]=958,
- ["xmonospace"]=65368,
- ["xparen"]=9395,
- ["xsuperior"]=739,
- ["y"]=121,
- ["yaadosquare"]=13134,
- ["yabengali"]=2479,
- ["yacute"]=253,
- ["yadeva"]=2351,
- ["yaekorean"]=12626,
- ["yagujarati"]=2735,
- ["yagurmukhi"]=2607,
- ["yahiragana"]=12420,
- ["yakatakana"]=12516,
- ["yakatakanahalfwidth"]=65428,
- ["yakorean"]=12625,
- ["yamakkanthai"]=3662,
- ["yasmallhiragana"]=12419,
- ["yasmallkatakana"]=12515,
- ["yasmallkatakanahalfwidth"]=65388,
- ["yatcyrillic"]=1123,
- ["ycircle"]=9448,
- ["ycircumflex"]=375,
- ["ydieresis"]=255,
- ["ydotaccent"]=7823,
- ["ydotbelow"]=7925,
- ["yeharabic"]=1610,
- ["yehbarreearabic"]=1746,
- ["yehbarreefinalarabic"]=64431,
- ["yehfinalarabic"]=65266,
- ["yehhamzaabovearabic"]=1574,
- ["yehhamzaabovefinalarabic"]=65162,
- ["yehhamzaaboveinitialarabic"]=65163,
- ["yehhamzaabovemedialarabic"]=65164,
- ["yehinitialarabic"]=65267,
- ["yehmedialarabic"]=65268,
- ["yehmeeminitialarabic"]=64733,
- ["yehmeemisolatedarabic"]=64600,
- ["yehnoonfinalarabic"]=64660,
- ["yehthreedotsbelowarabic"]=1745,
- ["yekorean"]=12630,
- ["yen"]=165,
- ["yenmonospace"]=65509,
- ["yeokorean"]=12629,
- ["yeorinhieuhkorean"]=12678,
- ["yerahbenyomolefthebrew"]=1450,
- ["yericyrillic"]=1099,
- ["yerudieresiscyrillic"]=1273,
- ["yesieungkorean"]=12673,
- ["yesieungpansioskorean"]=12675,
- ["yesieungsioskorean"]=12674,
- ["yetivhebrew"]=1434,
- ["ygrave"]=7923,
- ["yhook"]=436,
- ["yhookabove"]=7927,
- ["yiarmenian"]=1397,
- ["yicyrillic"]=1111,
- ["yikorean"]=12642,
- ["yinyang"]=9775,
- ["yiwnarmenian"]=1410,
- ["ymonospace"]=65369,
- ["yoddageshhebrew"]=64313,
- ["yodhebrew"]=1497,
- ["yodyodhebrew"]=1522,
- ["yodyodpatahhebrew"]=64287,
- ["yohiragana"]=12424,
- ["yoikorean"]=12681,
- ["yokatakana"]=12520,
- ["yokatakanahalfwidth"]=65430,
- ["yokorean"]=12635,
- ["yosmallhiragana"]=12423,
- ["yosmallkatakana"]=12519,
- ["yosmallkatakanahalfwidth"]=65390,
- ["yotgreek"]=1011,
- ["yoyaekorean"]=12680,
- ["yoyakorean"]=12679,
- ["yoyakthai"]=3618,
- ["yoyingthai"]=3597,
- ["yparen"]=9396,
- ["ypogegrammeni"]=890,
- ["ypogegrammenigreekcmb"]=837,
- ["yr"]=422,
- ["yring"]=7833,
- ["ysuperior"]=696,
- ["ytilde"]=7929,
- ["yturned"]=654,
- ["yuhiragana"]=12422,
- ["yuikorean"]=12684,
- ["yukatakana"]=12518,
- ["yukatakanahalfwidth"]=65429,
- ["yukorean"]=12640,
- ["yusbigcyrillic"]=1131,
- ["yusbigiotifiedcyrillic"]=1133,
- ["yuslittlecyrillic"]=1127,
- ["yuslittleiotifiedcyrillic"]=1129,
- ["yusmallhiragana"]=12421,
- ["yusmallkatakana"]=12517,
- ["yusmallkatakanahalfwidth"]=65389,
- ["yuyekorean"]=12683,
- ["yuyeokorean"]=12682,
- ["yyabengali"]=2527,
- ["yyadeva"]=2399,
- ["z"]=122,
- ["zaarmenian"]=1382,
- ["zacute"]=378,
- ["zadeva"]=2395,
- ["zagurmukhi"]=2651,
- ["zaharabic"]=1592,
- ["zahfinalarabic"]=65222,
- ["zahinitialarabic"]=65223,
- ["zahiragana"]=12374,
- ["zahmedialarabic"]=65224,
- ["zainarabic"]=1586,
- ["zainfinalarabic"]=65200,
- ["zakatakana"]=12470,
- ["zaqefgadolhebrew"]=1429,
- ["zaqefqatanhebrew"]=1428,
- ["zarqahebrew"]=1432,
- ["zayindageshhebrew"]=64310,
- ["zayinhebrew"]=1494,
- ["zbopomofo"]=12567,
- ["zcaron"]=382,
- ["zcircle"]=9449,
- ["zcircumflex"]=7825,
- ["zcurl"]=657,
- ["zdotaccent"]=380,
- ["zdotbelow"]=7827,
- ["zecyrillic"]=1079,
- ["zedescendercyrillic"]=1177,
- ["zedieresiscyrillic"]=1247,
- ["zehiragana"]=12380,
- ["zekatakana"]=12476,
- ["zero"]=48,
- ["zerobengali"]=2534,
- ["zerodeva"]=2406,
- ["zerogujarati"]=2790,
- ["zerogurmukhi"]=2662,
- ["zerohackarabic"]=1632,
- ["zeroinferior"]=8320,
- ["zeromonospace"]=65296,
- ["zeropersian"]=1776,
- ["zerosuperior"]=8304,
- ["zerothai"]=3664,
- ["zerowidthjoiner"]=65279,
- ["zerowidthnonjoiner"]=8204,
- ["zerowidthspace"]=8203,
- ["zeta"]=950,
- ["zhbopomofo"]=12563,
- ["zhearmenian"]=1386,
- ["zhebrevecyrillic"]=1218,
- ["zhecyrillic"]=1078,
- ["zhedescendercyrillic"]=1175,
- ["zhedieresiscyrillic"]=1245,
- ["zihiragana"]=12376,
- ["zikatakana"]=12472,
- ["zinorhebrew"]=1454,
- ["zlinebelow"]=7829,
- ["zmonospace"]=65370,
- ["zohiragana"]=12382,
- ["zokatakana"]=12478,
- ["zparen"]=9397,
- ["zretroflexhook"]=656,
- ["zstroke"]=438,
- ["zuhiragana"]=12378,
- ["zukatakana"]=12474,
-
- -- extras
-
- ["Dcroat"]=272,
- ["Delta"]=8710,
- ["Euro"]=8364,
- ["H18533"]=9679,
- ["H18543"]=9642,
- ["H18551"]=9643,
- ["H22073"]=9633,
- ["Ldot"]=319,
- ["Oslashacute"]=510,
- ["SF10000"]=9484,
- ["SF20000"]=9492,
- ["SF30000"]=9488,
- ["SF40000"]=9496,
- ["SF50000"]=9532,
- ["SF60000"]=9516,
- ["SF70000"]=9524,
- ["SF80000"]=9500,
- ["SF90000"]=9508,
- ["Upsilon1"]=978,
- ["afii10066"]=1073,
- ["afii10067"]=1074,
- ["afii10068"]=1075,
- ["afii10069"]=1076,
- ["afii10070"]=1077,
- ["afii10071"]=1105,
- ["afii10072"]=1078,
- ["afii10073"]=1079,
- ["afii10074"]=1080,
- ["afii10075"]=1081,
- ["afii10076"]=1082,
- ["afii10077"]=1083,
- ["afii10078"]=1084,
- ["afii10079"]=1085,
- ["afii10080"]=1086,
- ["afii10081"]=1087,
- ["afii10082"]=1088,
- ["afii10083"]=1089,
- ["afii10084"]=1090,
- ["afii10085"]=1091,
- ["afii10086"]=1092,
- ["afii10087"]=1093,
- ["afii10088"]=1094,
- ["afii10089"]=1095,
- ["afii10090"]=1096,
- ["afii10091"]=1097,
- ["afii10092"]=1098,
- ["afii10093"]=1099,
- ["afii10094"]=1100,
- ["afii10095"]=1101,
- ["afii10096"]=1102,
- ["afii10097"]=1103,
- ["afii10098"]=1169,
- ["afii10099"]=1106,
- ["afii10100"]=1107,
- ["afii10101"]=1108,
- ["afii10102"]=1109,
- ["afii10103"]=1110,
- ["afii10104"]=1111,
- ["afii10105"]=1112,
- ["afii10106"]=1113,
- ["afii10107"]=1114,
- ["afii10108"]=1115,
- ["afii10109"]=1116,
- ["afii10110"]=1118,
- ["afii10193"]=1119,
- ["afii10194"]=1123,
- ["afii10195"]=1139,
- ["afii10196"]=1141,
- ["afii10846"]=1241,
- ["afii208"]=8213,
- ["afii57381"]=1642,
- ["afii57388"]=1548,
- ["afii57392"]=1632,
- ["afii57393"]=1633,
- ["afii57394"]=1634,
- ["afii57395"]=1635,
- ["afii57396"]=1636,
- ["afii57397"]=1637,
- ["afii57398"]=1638,
- ["afii57399"]=1639,
- ["afii57400"]=1640,
- ["afii57401"]=1641,
- ["afii57403"]=1563,
- ["afii57407"]=1567,
- ["afii57409"]=1569,
- ["afii57410"]=1570,
- ["afii57411"]=1571,
- ["afii57412"]=1572,
- ["afii57413"]=1573,
- ["afii57414"]=1574,
- ["afii57415"]=1575,
- ["afii57416"]=1576,
- ["afii57417"]=1577,
- ["afii57418"]=1578,
- ["afii57419"]=1579,
- ["afii57420"]=1580,
- ["afii57421"]=1581,
- ["afii57422"]=1582,
- ["afii57423"]=1583,
- ["afii57424"]=1584,
- ["afii57425"]=1585,
- ["afii57426"]=1586,
- ["afii57427"]=1587,
- ["afii57428"]=1588,
- ["afii57429"]=1589,
- ["afii57430"]=1590,
- ["afii57431"]=1591,
- ["afii57432"]=1592,
- ["afii57433"]=1593,
- ["afii57434"]=1594,
- ["afii57440"]=1600,
- ["afii57441"]=1601,
- ["afii57442"]=1602,
- ["afii57443"]=1603,
- ["afii57444"]=1604,
- ["afii57445"]=1605,
- ["afii57446"]=1606,
- ["afii57448"]=1608,
- ["afii57449"]=1609,
- ["afii57450"]=1610,
- ["afii57451"]=1611,
- ["afii57452"]=1612,
- ["afii57453"]=1613,
- ["afii57454"]=1614,
- ["afii57455"]=1615,
- ["afii57456"]=1616,
- ["afii57457"]=1617,
- ["afii57458"]=1618,
- ["afii57470"]=1607,
- ["afii57505"]=1700,
- ["afii57506"]=1662,
- ["afii57507"]=1670,
- ["afii57508"]=1688,
- ["afii57509"]=1711,
- ["afii57511"]=1657,
- ["afii57512"]=1672,
- ["afii57513"]=1681,
- ["afii57514"]=1722,
- ["afii57519"]=1746,
- ["afii57636"]=8362,
- ["afii57645"]=1470,
- ["afii57658"]=1475,
- ["afii57664"]=1488,
- ["afii57665"]=1489,
- ["afii57666"]=1490,
- ["afii57667"]=1491,
- ["afii57668"]=1492,
- ["afii57669"]=1493,
- ["afii57670"]=1494,
- ["afii57671"]=1495,
- ["afii57672"]=1496,
- ["afii57673"]=1497,
- ["afii57674"]=1498,
- ["afii57675"]=1499,
- ["afii57676"]=1500,
- ["afii57677"]=1501,
- ["afii57678"]=1502,
- ["afii57679"]=1503,
- ["afii57680"]=1504,
- ["afii57681"]=1505,
- ["afii57682"]=1506,
- ["afii57683"]=1507,
- ["afii57684"]=1508,
- ["afii57685"]=1509,
- ["afii57686"]=1510,
- ["afii57687"]=1511,
- ["afii57688"]=1512,
- ["afii57689"]=1513,
- ["afii57690"]=1514,
- ["afii57716"]=1520,
- ["afii57717"]=1521,
- ["afii57718"]=1522,
- ["afii57793"]=1460,
- ["afii57794"]=1461,
- ["afii57795"]=1462,
- ["afii57796"]=1467,
- ["afii57797"]=1464,
- ["afii57798"]=1463,
- ["afii57799"]=1456,
- ["afii57800"]=1458,
- ["afii57801"]=1457,
- ["afii57802"]=1459,
- ["afii57803"]=1474,
- ["afii57804"]=1473,
- ["afii57806"]=1465,
- ["afii57807"]=1468,
- ["afii57839"]=1469,
- ["afii57841"]=1471,
- ["afii57842"]=1472,
- ["afii57929"]=700,
- ["afii61248"]=8453,
- ["afii61289"]=8467,
- ["afii61352"]=8470,
- ["afii61664"]=8204,
- ["afii63167"]=1645,
- ["afii64937"]=701,
- ["arrowdblboth"]=8660,
- ["arrowdblleft"]=8656,
- ["arrowdblright"]=8658,
- ["arrowupdnbse"]=8616,
- ["bar"]=124,
- ["circle"]=9675,
- ["circlemultiply"]=8855,
- ["circleplus"]=8853,
- ["club"]=9827,
- ["colonmonetary"]=8353,
- ["dcroat"]=273,
- ["dkshade"]=9619,
- ["existential"]=8707,
- ["female"]=9792,
- ["gradient"]=8711,
- ["heart"]=9829,
- ["hookabovecomb"]=777,
- ["invcircle"]=9689,
- ["ldot"]=320,
- ["longs"]=383,
- ["ltshade"]=9617,
- ["male"]=9794,
- ["mu"]=181,
- ["napostrophe"]=329,
- ["notelement"]=8713,
- ["omega1"]=982,
- ["openbullet"]=9702,
- ["orthogonal"]=8735,
- ["oslashacute"]=511,
- ["phi1"]=981,
- ["propersubset"]=8834,
- ["propersuperset"]=8835,
- ["reflexsubset"]=8838,
- ["reflexsuperset"]=8839,
- ["shade"]=9618,
- ["sigma1"]=962,
- ["similar"]=8764,
- ["smileface"]=9786,
- ["spacehackarabic"]=32,
- ["spade"]=9824,
- ["theta1"]=977,
- ["twodotenleader"]=8229,
-}
diff --git a/fontdbutil.lua b/fontdbutil.lua
new file mode 100755
index 0000000..31c7dfa
--- /dev/null
+++ b/fontdbutil.lua
@@ -0,0 +1,395 @@
+#!/usr/bin/env texlua
+--[[
+This file was originally written by Elie Roux and Khaled Hosny and is under CC0
+license (see http://creativecommons.org/publicdomain/zero/1.0/legalcode).
+
+This file is a wrapper for the luaotfload's font names module. It is part of the
+luaotfload bundle, please see the luaotfload documentation for more info.
+--]]
+
+kpse.set_program_name"luatex"
+
+local stringformat = string.format
+local texiowrite_nl = texio.write_nl
+local stringfind = string.find
+local stringlower = string.lower
+
+-- First we need to be able to load module (code copied from
+-- luatexbase-loader.sty):
+local loader_file = "luatexbase.loader.lua"
+local loader_path = assert(kpse.find_file(loader_file, "lua"),
+ "File '"..loader_file.."' not found")
+
+string.quoted = string.quoted or function (str)
+ return string.format("%q",str)
+end
+
+--texiowrite_nl("("..loader_path..")")
+dofile(loader_path) -- FIXME this pollutes stdout with filenames
+
+--[[doc--
+Depending on how the script is called we change its behavior.
+For backwards compatibility, moving or symlinking the script to a
+file name starting with \fileent{mkluatexfontdb} will cause it to
+trigger a database update on every run.
+Running as \fileent{fontdbutil} -- the new name -- will do this upon
+request only.
+
+There are two naming conventions followed here: firstly that of
+utilities such as \fileent{mktexpk}, \fileent{mktexlsr} and the likes,
+and secondly that of \fileent{fmtutil}.
+After support for querying the database was added, the latter appeared
+to be the more appropriate.
+--doc]]--
+
+config = config or { }
+local config = config
+config.luaotfload = config.luaotfload or { }
+
+do -- we don’t have file.basename and the likes yet, so inline parser ftw
+ local C, P = lpeg.C, lpeg.P
+ local lpegmatch = lpeg.match
+ local slash = P"/"
+ local dot = P"."
+ local noslash = 1 - slash
+ local slashes = slash^1
+ local path = slashes^-1 * (noslash^1 * slashes)^1
+ local thename = (1 - slash - dot)^1
+ local extension = dot * (1 - slash - dot)^1
+ local p_basename = path^-1 * C(thename) * extension^-1 * P(-1)
+
+ -- if stringfind(stringlower(arg[0]), "^fontdbutil") then
+ local self = lpegmatch(p_basename, stringlower(arg[0]))
+ if self == "fontdbutil" then
+ config.luaotfload.self = "fontdbutil"
+ else
+ config.luaotfload.self = "mkluatexfontdb"
+ end
+end
+
+config.lualibs = config.lualibs or { }
+config.lualibs.verbose = false
+config.lualibs.prefer_merged = true
+config.lualibs.load_extended = false
+
+require"lualibs"
+require"luaotfload-basics-gen.lua"
+require"luaotfload-override.lua" --- this populates the logs.* namespace
+require"luaotfload-database"
+require"alt_getopt"
+
+local version = "2.2" -- same version number as luaotfload
+local names = fonts.names
+
+local db_src_out = names.path.dir.."/"..names.path.basename
+local db_bin_out = file.replacesuffix(db_src_out, "luc")
+
+local help_messages = {
+ fontdbutil = [[
+
+Usage: %s [OPTION]...
+
+Operations on the LuaTeX font database.
+
+This tool is part of the luaotfload package. Valid options are:
+
+-------------------------------------------------------------------------------
+ VERBOSITY AND LOGGING
+
+ -q --quiet don't output anything
+ -v --verbose=LEVEL be more verbose (print the searched directories)
+ -vv print the loaded fonts
+ -vvv print all steps of directory searching
+ -V --version print version and exit
+ -h --help print this message
+
+-------------------------------------------------------------------------------
+ DATABASE
+
+ -u --update update the database
+ -f --force force re-indexing all fonts
+
+ --find="font name" query the database for a font name
+ -F --fuzzy look for approximate matches if --find fails
+ --limit=n limit display of fuzzy matches to <n>
+ (default: n = 1)
+ -i --info display font metadata
+
+ --log=stdout redirect log output to stdout
+
+The font database will be saved to
+ %s
+ %s
+
+]],
+ mkluatexfontdb = [[
+
+Usage: %s [OPTION]...
+
+Rebuild the LuaTeX font database.
+
+Valid options:
+ -f --force force re-indexing all fonts
+ -q --quiet don't output anything
+ -v --verbose=LEVEL be more verbose (print the searched directories)
+ -vv print the loaded fonts
+ -vvv print all steps of directory searching
+ -V --version print version and exit
+ -h --help print this message
+
+The font database will be saved to
+ %s
+ %s
+
+]],
+}
+
+local help_msg = function ( )
+ local template = help_messages[config.luaotfload.self]
+ or help.messages.fontdbutil
+ texiowrite_nl(stringformat(template, config.luaotfload.self, db_src_out, db_bin_out))
+end
+
+local version_msg = function ( )
+ texiowrite_nl(stringformat(
+ "%s version %s, database version %s.\n",
+ config.luaotfload.self, version, names.version))
+end
+
+local show_info_items = function (fontinfo)
+ local items = table.sortedkeys(fontinfo)
+ for n = 1, #items do
+ local item = items[n]
+ texiowrite_nl(stringformat(
+ [[ %11s: %s]], item, fontinfo[item]))
+ end
+end
+
+local show_font_info = function (filename)
+ local fullname = resolvers.findfile(filename)
+ if fullname then
+ local fontinfo = fontloader.info(fullname)
+ local nfonts = #fontinfo
+ if nfonts > 0 then -- true type collection
+ logs.names_report(true, 0, "resolve",
+ [[%s is a font collection]], filename)
+ for n = 1, nfonts do
+ logs.names_report(true, 0, "resolve",
+ [[showing info for font no. %d]], n)
+ show_info_items(fontinfo[n])
+ end
+ else
+ show_info_items(fontinfo)
+ end
+ else
+ logs.names_report(true, 0, "resolve",
+ "font %s not found", filename)
+ end
+end
+
+--[[--
+Running the scripts triggers one or more actions that have to be
+executed in the correct order. To avoid duplication we track them in a
+set.
+--]]--
+
+local action_sequence = {
+ "loglevel", "help", "version", "generate", "query"
+}
+local action_pending = table.tohash(action_sequence, false)
+
+action_pending.loglevel = true --- always set the loglevel
+action_pending.generate = false --- this is the default action
+
+local actions = { } --- (jobspec -> (bool * bool)) list
+
+actions.loglevel = function (job)
+ logs.set_loglevel(job.log_level)
+ logs.names_report("log", 2, "util",
+ "setting log level", "%d", job.log_level)
+ return true, true
+end
+
+actions.version = function (job)
+ version_msg()
+ return true, false
+end
+
+actions.help = function (job)
+ help_msg()
+ return true, false
+end
+
+actions.generate = function (job)
+ local fontnames, savedname
+ fontnames = names.update(fontnames, job.force_reload)
+ logs.names_report("log", 0, "db",
+ "fonts in the database", "%i", #fontnames.mappings)
+ savedname = names.save(fontnames)
+ if savedname then --- FIXME have names.save return bool
+ return true, true
+ end
+ return false, false
+end
+
+actions.query = function (job)
+
+ local query = job.query
+ local tmpspec = {
+ name = query,
+ lookup = "name",
+ specification = "name:" .. query,
+ optsize = 0,
+ }
+
+ local foundname, _whatever, success =
+ fonts.names.resolve(nil, nil, tmpspec)
+
+ if success then
+ logs.names_report(false, 0,
+ "resolve", "Font “%s” found!", query)
+ logs.names_report(false, 0,
+ "resolve", "Resolved file name “%s”:", foundname)
+ if job.show_info then
+ show_font_info(foundname)
+ end
+ else
+ logs.names_report(false, 0,
+ "resolve", "Cannot find “%s”.", query)
+ if job.fuzzy == true then
+ logs.names_report(false, 2,
+ "resolve", "Looking for close matches, this may take a while ...")
+ local success = fonts.names.find_closest(query, job.fuzzy_limit)
+ end
+ end
+ return true, true
+end
+
+--[[--
+Command-line processing.
+mkluatexfontdb.lua relies on the script alt_getopt to process argv and
+analyzes its output.
+
+TODO with extended lualibs we have the functionality from the
+environment.* namespace that could eliminate the dependency on
+alt_getopt.
+--]]--
+
+local process_cmdline = function ( ) -- unit -> jobspec
+ local result = { -- jobspec
+ force_reload = nil,
+ query = "",
+ log_level = 1,
+ }
+
+ if config.luaotfload.self == "mkluatexfontdb" then
+ action_pending["generate"] = true
+ end
+
+ local long_options = {
+ find = 1,
+ force = "f",
+ fuzzy = "F",
+ help = "h",
+ info = "i",
+ limit = 1,
+ log = 1,
+ quiet = "q",
+ update = "u",
+ verbose = 1 ,
+ version = "V",
+ }
+
+ local short_options = "fFiquvVh"
+
+ local options, _, optarg =
+ alt_getopt.get_ordered_opts (arg, short_options, long_options)
+
+ local nopts = #options
+ for n=1, nopts do
+ local v = options[n]
+ if v == "q" then
+ result.log_level = 0
+ elseif v == "u" then
+ action_pending["generate"] = true
+ elseif v == "v" then
+ if result.log_level > 0 then
+ result.log_level = result.log_level + 1
+ else
+ result.log_level = 2
+ end
+ elseif v == "V" then
+ action_pending["version"] = true
+ elseif v == "h" then
+ action_pending["help"] = true
+ elseif v == "f" then
+ result.update = true
+ result.force_reload = 1
+ elseif v == "verbose" then
+ local lvl = optarg[n]
+ if lvl then
+ result.log_level = tonumber(lvl)
+ end
+ elseif v == "log" then
+ local str = optarg[n]
+ if str then
+ logs.set_logout(str)
+ end
+ elseif v == "find" then
+ action_pending["query"] = true
+ result.query = optarg[n]
+ elseif v == "F" then
+ result.fuzzy = true
+ elseif v == "limit" then
+ local lim = optarg[n]
+ if lim then
+ result.fuzzy_limit = tonumber(lim)
+ end
+ elseif v == "i" then
+ result.show_info = true
+ end
+ end
+ return result
+end
+
+local main = function ( ) -- unit -> int
+ local retval = 0
+ local job = process_cmdline()
+
+-- inspect(action_pending)
+-- inspect(job)
+
+ for i=1, #action_sequence do
+ local actionname = action_sequence[i]
+ local exit = false
+ if action_pending[actionname] then
+ logs.names_report("log", 3, "util", "preparing for task",
+ "%s", actionname)
+
+ local action = actions[actionname]
+ local success, continue = action(job)
+
+ if not success then
+ logs.names_report(false, 0, "util",
+ "could not finish task", "%s", actionname)
+ retval = -1
+ exit = true
+ elseif not continue then
+ logs.names_report(false, 3, "util",
+ "task completed, exiting", "%s", actionname)
+ exit = true
+ else
+ logs.names_report(false, 3, "util",
+ "task completed successfully", "%s", actionname)
+ end
+ end
+ if exit then break end
+ end
+
+ texiowrite_nl""
+ return retval
+end
+
+return main()
+
+-- vim:tw=71:sw=4:ts=4:expandtab
diff --git a/otfl-basics-gen.lua b/luaotfload-basics-gen.lua
index bdbc3cf..727086e 100644
--- a/otfl-basics-gen.lua
+++ b/luaotfload-basics-gen.lua
@@ -12,7 +12,8 @@ if context then
end
local dummyfunction = function() end
-local dummyreporter = function(c) return function(...) texio.write(c .. " : " .. string.format(...)) end end
+----- dummyreporter = function(c) return function(...) texio.write_nl(c .. " : " .. string.format(...)) end end
+local dummyreporter = function(c) return function(...) texio.write_nl(c .. " : " .. string.formatters(...)) end end
statistics = {
register = dummyfunction,
@@ -74,32 +75,42 @@ texconfig.kpse_init = true
resolvers = resolvers or { } -- no fancy file helpers used
local remapper = {
- otf = "opentype fonts",
- ttf = "truetype fonts",
- ttc = "truetype fonts",
- dfont = "truetype fonts", -- "truetype dictionary",
- cid = "cid maps",
- fea = "font feature files",
- pfa = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this!
- pfb = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this!
+ otf = "opentype fonts",
+ ttf = "truetype fonts",
+ ttc = "truetype fonts",
+ dfont = "truetype fonts", -- "truetype dictionary",
+ cid = "cid maps",
+ cidmap = "cid maps",
+ fea = "font feature files",
+ pfa = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this!
+ pfb = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this!
}
function resolvers.findfile(name,fileformat)
- name = string.gsub(name,"\\","\/")
- fileformat = fileformat and string.lower(fileformat)
- local found = kpse.find_file(name,(fileformat and fileformat ~= "" and (remapper[fileformat] or fileformat)) or file.extname(name,"tex"))
+ name = string.gsub(name,"\\","/")
+ if not fileformat or fileformat == "" then
+ fileformat = file.suffix(name)
+ if fileformat == "" then
+ fileformat = "tex"
+ end
+ end
+ fileformat = string.lower(fileformat)
+ fileformat = remapper[fileformat] or fileformat
+ local found = kpse.find_file(name,fileformat)
if not found or found == "" then
found = kpse.find_file(name,"other text files")
end
return found
end
-function resolvers.findbinfile(name,fileformat)
- if not fileformat or fileformat == "" then
- fileformat = file.extname(name) -- string.match(name,"%.([^%.]-)$")
- end
- return resolvers.findfile(name,(fileformat and remapper[fileformat]) or fileformat)
-end
+-- function resolvers.findbinfile(name,fileformat)
+-- if not fileformat or fileformat == "" then
+-- fileformat = file.suffix(name)
+-- end
+-- return resolvers.findfile(name,(fileformat and remapper[fileformat]) or fileformat)
+-- end
+
+resolvers.findbinfile = resolvers.findfile
function resolvers.resolve(s)
return s
@@ -206,15 +217,28 @@ function caches.loaddata(paths,name)
for i=1,#paths do
local data = false
local luaname, lucname = makefullname(paths[i],name)
- if lucname and lfs.isfile(lucname) then
- texio.write(string.format("(load: %s)",lucname))
+ if lucname and lfs.isfile(lucname) then -- maybe also check for size
+ texio.write(string.format("(load luc: %s)",lucname))
data = loadfile(lucname)
+ if data then
+ data = data()
+ end
+ if data then
+ return data
+ else
+ texio.write(string.format("(loading failed: %s)",lucname))
+ end
end
- if not data and luaname and lfs.isfile(luaname) then
- texio.write(string.format("(load: %s)",luaname))
+ if luaname and lfs.isfile(luaname) then
+ texio.write(string.format("(load lua: %s)",luaname))
data = loadfile(luaname)
+ if data then
+ data = data()
+ end
+ if data then
+ return data
+ end
end
- return data and data()
end
end
@@ -241,20 +265,23 @@ end
-- this) in which case one should limit the method to luac and enable support
-- for execution.
-caches.compilemethod = "luac" -- luac dump both
+caches.compilemethod = "both"
function caches.compile(data,luaname,lucname)
local done = false
if caches.compilemethod == "luac" or caches.compilemethod == "both" then
- local command = "-o " .. string.quoted(lucname) .. " -s " .. string.quoted(luaname)
- done = os.spawn("texluac " .. command) == 0
+ done = os.spawn("texluac -o " .. string.quoted(lucname) .. " -s " .. string.quoted(luaname)) == 0
end
if not done and (caches.compilemethod == "dump" or caches.compilemethod == "both") then
- local d = table.serialize(data,true)
+ local d = io.loaddata(luaname)
+ if not d or d == "" then
+ d = table.serialize(data,true) -- slow
+ end
if d and d ~= "" then
local f = io.open(lucname,'w')
if f then
- local s = loadstring(d)
+ local s
+ if _G["loadstring"] then s=loadstring(d) else s=load(d) end
f:write(string.dump(s))
f:close()
end
diff --git a/otfl-basics-nod.lua b/luaotfload-basics-nod.lua
index 151d98a..151d98a 100644
--- a/otfl-basics-nod.lua
+++ b/luaotfload-basics-nod.lua
diff --git a/otfl-blacklist.cnf b/luaotfload-blacklist.cnf
index 771649b..771649b 100644
--- a/otfl-blacklist.cnf
+++ b/luaotfload-blacklist.cnf
diff --git a/otfl-font-clr.lua b/luaotfload-colors.lua
index 9fe2916..3d8bfab 100644
--- a/otfl-font-clr.lua
+++ b/luaotfload-colors.lua
@@ -1,12 +1,20 @@
-if not modules then modules = { } end modules ['font-clr'] = {
+if not modules then modules = { } end modules ['luaotfload-colors'] = {
version = 1.001,
- comment = "companion to font-otf.lua (font color)",
+ comment = "companion to luaotfload.lua (font color)",
author = "Khaled Hosny and Elie Roux",
copyright = "Luaotfload Development Team",
license = "GPL"
}
-local format = string.format
+local newnode = node.new
+local nodetype = node.id
+local traverse_nodes = node.traverse
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+
+local stringformat = string.format
+local stringgsub = string.gsub
+local stringfind = string.find
local otffeatures = fonts.constructors.newfeatures("otf")
local ids = fonts.hashes.identifiers
@@ -21,9 +29,9 @@ local function setcolor(tfmdata,value)
if #value == 6 or #value == 8 then
sanitized = value
elseif #value == 7 then
- _, _, sanitized = value:find("(......)")
+ _, _, sanitized = stringfind(value, "(......)")
elseif #value > 8 then
- _, _, sanitized = value:find("(........)")
+ _, _, sanitized = stringfind(value, "(........)")
else
-- broken color code ignored, issue a warning?
end
@@ -46,9 +54,9 @@ registerotffeature {
local function hex2dec(hex,one)
if one then
- return format("%.1g", tonumber(hex, 16)/255)
+ return stringformat("%.1g", tonumber(hex, 16)/255)
else
- return format("%.3g", tonumber(hex, 16)/255)
+ return stringformat("%.3g", tonumber(hex, 16)/255)
end
end
@@ -59,17 +67,17 @@ local function pageresources(a)
if not res then
res = "/TransGs1<</ca 1/CA 1>>"
end
- res2 = format("/TransGs%s<</ca %s/CA %s>>", a, a, a)
- res = format("%s%s", res, res:find(res2) and "" or res2)
+ res2 = stringformat("/TransGs%s<</ca %s/CA %s>>", a, a, a)
+ res = stringformat("%s%s", res, stringfind(res, res2) and "" or res2)
end
local function hex_to_rgba(hex)
local r, g, b, a, push, pop, res3
if hex then
if #hex == 6 then
- _, _, r, g, b = hex:find('(..)(..)(..)')
+ _, _, r, g, b = stringfind(hex, '(..)(..)(..)')
elseif #hex == 8 then
- _, _, r, g, b, a = hex:find('(..)(..)(..)(..)')
+ _, _, r, g, b, a = stringfind(hex, '(..)(..)(..)(..)')
a = hex2dec(a,true)
pageresources(a)
end
@@ -80,24 +88,24 @@ local function hex_to_rgba(hex)
g = hex2dec(g)
b = hex2dec(b)
if a then
- push = format('/TransGs%g gs %s %s %s rg', a, r, g, b)
+ push = stringformat('/TransGs%g gs %s %s %s rg', a, r, g, b)
pop = '0 g /TransGs1 gs'
else
- push = format('%s %s %s rg', r, g, b)
+ push = stringformat('%s %s %s rg', r, g, b)
pop = '0 g'
end
return push, pop
end
-local glyph = node.id('glyph')
-local hlist = node.id('hlist')
-local vlist = node.id('vlist')
-local whatsit = node.id('whatsit')
-local pgi = node.id('page_insert')
-local sbox = node.id('sub_box')
+local glyph = nodetype('glyph')
+local hlist = nodetype('hlist')
+local vlist = nodetype('vlist')
+local whatsit = nodetype('whatsit')
+local pgi = nodetype('page_insert')
+local sbox = nodetype('sub_box')
local function lookup_next_color(head)
- for n in node.traverse(head) do
+ for n in traverse_nodes(head) do
if n.id == glyph then
if ids[n.font] and ids[n.font].properties and ids[n.font].properties.color then
return ids[n.font].properties.color
@@ -119,7 +127,7 @@ local function lookup_next_color(head)
end
local function node_colorize(head, current_color, next_color)
- for n in node.traverse(head) do
+ for n in traverse_nodes(head) do
if n.id == hlist or n.id == vlist or n.id == sbox then
local next_color_in = lookup_next_color(n.next) or next_color
n.list, current_color = node_colorize(n.list, current_color, next_color_in)
@@ -128,19 +136,19 @@ local function node_colorize(head, current_color, next_color)
if tfmdata and tfmdata.properties and tfmdata.properties.color then
if tfmdata.properties.color ~= current_color then
local pushcolor = hex_to_rgba(tfmdata.properties.color)
- local push = node.new(whatsit, 8)
+ local push = newnode(whatsit, 8)
push.mode = 1
push.data = pushcolor
- head = node.insert_before(head, n, push)
+ head = insert_node_before(head, n, push)
current_color = tfmdata.properties.color
end
local next_color_in = lookup_next_color (n.next) or next_color
if next_color_in ~= tfmdata.properties.color then
local _, popcolor = hex_to_rgba(tfmdata.properties.color)
- local pop = node.new(whatsit, 8)
+ local pop = newnode(whatsit, 8)
pop.mode = 1
pop.data = popcolor
- head = node.insert_after(head, n, pop)
+ head = insert_node_after(head, n, pop)
current_color = nil
end
end
@@ -154,11 +162,11 @@ local function font_colorize(head)
-- and remove it to avoid duplicating it later
if res then
local r = "/ExtGState<<"..res..">>"
- tex.pdfpageresources = tex.pdfpageresources:gsub(r, "")
+ tex.pdfpageresources = stringgsub(tex.pdfpageresources, r, "")
end
local h = node_colorize(head, nil, nil)
-- now append our page resources
- if res and res:find("%S") then -- test for non-empty string
+ if res and stringfind(res, "%S") then -- test for non-empty string
local r = "/ExtGState<<"..res..">>"
tex.pdfpageresources = tex.pdfpageresources..r
end
@@ -169,7 +177,11 @@ local color_callback_activated = 0
function add_color_callback()
if color_callback_activated == 0 then
- luatexbase.add_to_callback("pre_output_filter", font_colorize, "loaotfload.colorize")
+ luatexbase.add_to_callback(
+ "pre_output_filter", font_colorize, "luaotfload.colorize")
color_callback_activated = 1
end
end
+
+-- vim:tw=71:sw=4:ts=4:expandtab
+
diff --git a/luaotfload-database.lua b/luaotfload-database.lua
new file mode 100644
index 0000000..19b04db
--- /dev/null
+++ b/luaotfload-database.lua
@@ -0,0 +1,1049 @@
+if not modules then modules = { } end modules ['luaotfload-database'] = {
+ version = 2.2,
+ comment = "companion to luaotfload.lua",
+ author = "Khaled Hosny and Elie Roux",
+ copyright = "Luaotfload Development Team",
+ license = "GNU GPL v2"
+}
+
+--- TODO: if the specification is an absolute filename with a font not in the
+--- database, add the font to the database and load it. There is a small
+--- difficulty with the filenames of the TEXMF tree that are referenced as
+--- relative paths...
+
+--- Luatex builtins
+local load = load
+local next = next
+local pcall = pcall
+local require = require
+local tonumber = tonumber
+
+local iolines = io.lines
+local ioopen = io.open
+local kpseexpand_path = kpse.expand_path
+local kpseexpand_var = kpse.expand_var
+local kpselookup = kpse.lookup
+local kpsereadable_file = kpse.readable_file
+local mathabs = math.abs
+local mathmin = math.min
+local stringfind = string.find
+local stringformat = string.format
+local stringgmatch = string.gmatch
+local stringgsub = string.gsub
+local stringlower = string.lower
+local stringsub = string.sub
+local stringupper = string.upper
+local tableconcat = table.concat
+local tablecopy = table.copy
+local tablesort = table.sort
+local tabletofile = table.tofile
+local texiowrite_nl = texio.write_nl
+local utf8gsub = unicode.utf8.gsub
+local utf8lower = unicode.utf8.lower
+
+--- these come from Lualibs/Context
+local dirglob = dir.glob
+local dirmkdirs = dir.mkdirs
+local filebasename = file.basename
+local filecollapsepath = file.collapsepath or file.collapse_path
+local fileextname = file.extname
+local fileiswritable = file.iswritable
+local filejoin = file.join
+local filereplacesuffix = file.replacesuffix
+local filesplitpath = file.splitpath or file.split_path
+local stringis_empty = string.is_empty
+local stringsplit = string.split
+local stringstrip = string.strip
+local tableappend = table.append
+local tabletohash = table.tohash
+
+--- the font loader namespace is “fonts”, same as in Context
+fonts = fonts or { }
+fonts.names = fonts.names or { }
+
+local names = fonts.names
+
+names.version = 2.2
+names.data = nil
+names.path = {
+ basename = "luaotfload-names.lua",
+ dir = "",
+ path = "",
+}
+
+-- We use the cache.* of ConTeXt (see luat-basics-gen), we can
+-- use it safely (all checks and directory creations are already done). It
+-- uses TEXMFCACHE or TEXMFVAR as starting points.
+local writable_path = caches.getwritablepath("names","")
+if not writable_path then
+ error("Impossible to find a suitable writeable cache...")
+end
+names.path.dir = writable_path
+names.path.path = filejoin(writable_path, names.path.basename)
+
+
+---- <FIXME>
+---
+--- these lines load some binary module called “lualatex-platform”
+--- that doesn’t appear to build with Lua 5.2. I’m going ahead and
+--- disable it for the time being until someone clarifies what it
+--- is supposed to do and whether we should care to fix it.
+---
+--local success = pcall(require, "luatexbase.modutils")
+--if success then
+-- success = pcall(luatexbase.require_module,
+-- "lualatex-platform", "2011/03/30")
+-- print(success)
+--end
+
+--local get_installed_fonts
+--if success then
+-- get_installed_fonts = lualatex.platform.get_installed_fonts
+--else
+-- function get_installed_fonts()
+-- end
+--end
+---- </FIXME>
+
+local get_installed_fonts = nil
+
+--[[doc--
+Auxiliary functions
+--doc]]--
+
+
+local report = logs.names_report
+
+local sanitize_string = function (str)
+ if str ~= nil then
+ return utf8gsub(utf8lower(str), "[^%a%d]", "")
+ end
+ return nil
+end
+
+local fontnames_init = function ( )
+ return {
+ mappings = { },
+ status = { },
+ version = names.version,
+ }
+end
+
+local make_name = function (path)
+ return filereplacesuffix(path, "lua"), filereplacesuffix(path, "luc")
+end
+
+--- When loading a lua file we try its binary complement first, which
+--- is assumed to be located at an identical path, carrying the suffix
+--- .luc.
+
+local code_cache = { }
+
+--- string -> (string * table)
+local load_lua_file = function (path)
+ local code = code_cache[path]
+ if code then return path, code() end
+
+ local foundname = filereplacesuffix(path, "luc")
+
+ local fh = ioopen(foundname, "rb") -- try bin first
+ if fh then
+ local chunk = fh:read"*all"
+ fh:close()
+ code = load(chunk, "b")
+ end
+
+ if not code then --- fall back to text file
+ foundname = filereplacesuffix(path, "lua")
+ fh = ioopen(foundname, "rb")
+ if fh then
+ local chunk = fh:read"*all"
+ fh:close()
+ code = load(chunk, "t")
+ end
+ end
+
+ if not code then return nil, nil end
+
+ code_cache[path] = code --- insert into memo
+ return foundname, code()
+end
+
+--- define locals in scope
+local find_closest
+local font_fullinfo
+local load_names
+local read_fonts_conf
+local reload_db
+local resolve
+local save_names
+local scan_external_dir
+local update_names
+
+load_names = function ( )
+ local foundname, data = load_lua_file(names.path.path)
+
+ if data then
+ report("info", 1, "db",
+ "Font names database loaded", "%s", foundname)
+ else
+ report("info", 0, "db",
+ [[Font names database not found, generating new one.
+ This can take several minutes; please be patient.]])
+ data = update_names(fontnames_init())
+ save_names(data)
+ end
+ return data
+end
+
+local fuzzy_limit = 1 --- display closest only
+
+local style_synonyms = { set = { } }
+do
+ style_synonyms.list = {
+ regular = { "normal", "roman",
+ "plain", "book",
+ "medium", },
+ --- TODO note from Élie Roux
+ --- boldregular was for old versions of Linux Libertine, is it still useful?
+ --- semibold is in new versions of Linux Libertine, but there is also a bold,
+ --- not sure it's useful here...
+ bold = { "demi", "demibold",
+ "semibold", "boldregular",},
+ italic = { "regularitalic", "normalitalic",
+ "oblique", "slanted", },
+ bolditalic = { "boldoblique", "boldslanted",
+ "demiitalic", "demioblique",
+ "demislanted", "demibolditalic",
+ "semibolditalic", },
+ }
+
+ for category, synonyms in next, style_synonyms.list do
+ style_synonyms.set[category] = tabletohash(synonyms, true)
+ end
+end
+
+--- state of the database
+local fonts_loaded = false
+local fonts_reloaded = false
+
+--[[doc--
+
+Luatex-fonts, the font-loader package luaotfload imports, comes with
+basic file location facilities (see luatex-fonts-syn.lua).
+However, the builtin functionality is too limited to be of more than
+basic use, which is why we supply our own resolver that accesses the
+font database created by the mkluatexfontdb script.
+
+--doc]]--
+
+
+---
+--- the request specification has the fields:
+---
+--- · features: table
+--- · normal: set of { ccmp clig itlc kern liga locl mark mkmk rlig }
+--- · ???
+--- · forced: string
+--- · lookup: "name" | "file"
+--- · method: string
+--- · name: string
+--- · resolved: string
+--- · size: int
+--- · specification: string (== <lookup> ":" <name>)
+--- · sub: string
+---
+--- the return value of “resolve” is the file name of the requested
+--- font
+---
+--- 'a -> 'a -> table -> (string * string | bool * bool)
+---
+--- note by phg: I added a third return value that indicates a
+--- successful lookup as this cannot be inferred from the other
+--- values.
+---
+---
+resolve = function (_,_,specification) -- the 1st two parameters are used by ConTeXt
+ local name = sanitize_string(specification.name)
+ local style = sanitize_string(specification.style) or "regular"
+
+ local size
+ if specification.optsize then
+ size = tonumber(specification.optsize)
+ elseif specification.size then
+ size = specification.size / 65536
+ end
+
+ if not fonts_loaded then
+ names.data = load_names()
+ fonts_loaded = true
+ end
+
+ local data = names.data
+ if type(data) == "table" then
+ local db_version, nms_version = data.version, names.version
+ if data.version ~= names.version then
+ report("log", 0, "db",
+ [[version mismatch; expected %4.3f, got %4.3f]],
+ nms_version, db_version
+ )
+ return reload_db(resolve, nil, nil, specification)
+ end
+ if data.mappings then
+ local found = { }
+ local synonym_set = style_synonyms.set
+ for _,face in next, data.mappings do
+ --- TODO we really should store those in dedicated
+ --- .sanitized field
+ local family = sanitize_string(face.names and face.names.family)
+ local subfamily = sanitize_string(face.names and face.names.subfamily)
+ local fullname = sanitize_string(face.names and face.names.fullname)
+ local psname = sanitize_string(face.names and face.names.psname)
+ local fontname = sanitize_string(face.fontname)
+ local pfullname = sanitize_string(face.fullname)
+ local optsize, dsnsize, maxsize, minsize
+ if #face.size > 0 then
+ optsize = face.size
+ dsnsize = optsize[1] and optsize[1] / 10
+ -- can be nil
+ maxsize = optsize[2] and optsize[2] / 10 or dsnsize
+ minsize = optsize[3] and optsize[3] / 10 or dsnsize
+ end
+ if name == family then
+ if subfamily == style then
+ if optsize then
+ if dsnsize == size
+ or (size > minsize and size <= maxsize) then
+ found[1] = face
+ break
+ else
+ found[#found+1] = face
+ end
+ else
+ found[1] = face
+ break
+ end
+ elseif synonym_set[style] and
+ synonym_set[style][subfamily] then
+ if optsize then
+ if dsnsize == size
+ or (size > minsize and size <= maxsize) then
+ found[1] = face
+ break
+ else
+ found[#found+1] = face
+ end
+ else
+ found[1] = face
+ break
+ end
+ elseif subfamily == "regular" or
+ synonym_set.regular[subfamily] then
+ found.fallback = face
+ end
+ else
+ if name == fullname
+ or name == pfullname
+ or name == fontname
+ or name == psname then
+ if optsize then
+ if dsnsize == size
+ or (size > minsize and size <= maxsize) then
+ found[1] = face
+ break
+ else
+ found[#found+1] = face
+ end
+ else
+ found[1] = face
+ break
+ end
+ end
+ end
+ end
+ if #found == 1 then
+ if kpselookup(found[1].filename[1]) then
+ report("log", 0, "resolve",
+ "font family='%s', subfamily='%s' found: %s",
+ name, style, found[1].filename[1]
+ )
+ return found[1].filename[1], found[1].filename[2], true
+ end
+ elseif #found > 1 then
+ -- we found matching font(s) but not in the requested optical
+ -- sizes, so we loop through the matches to find the one with
+ -- least difference from the requested size.
+ local closest
+ local least = math.huge -- initial value is infinity
+ for i,face in next, found do
+ local dsnsize = face.size[1]/10
+ local difference = mathabs(dsnsize-size)
+ if difference < least then
+ closest = face
+ least = difference
+ end
+ end
+ if kpselookup(closest.filename[1]) then
+ report("log", 0, "resolve",
+ "font family='%s', subfamily='%s' found: %s",
+ name, style, closest.filename[1]
+ )
+ return closest.filename[1], closest.filename[2], true
+ end
+ elseif found.fallback then
+ return found.fallback.filename[1], found.fallback.filename[2], true
+ end
+ --- no font found so far
+ if not fonts_reloaded then
+ --- last straw: try reloading the database
+ return reload_db(resolve, nil, nil, specification)
+ else
+ --- else, fallback to requested name
+ --- specification.name is empty with absolute paths, looks
+ --- like a bug in the specification parser <TODO< is it still
+ --- relevant? looks not...
+ return specification.name, false, false
+ end
+ end
+ else --- no db or outdated; reload names and retry
+ if not fonts_reloaded then
+ return reload_db(resolve, nil, nil, specification)
+ else --- unsucessfully reloaded; bail
+ return specification.name, false, false
+ end
+ end
+end --- resolve()
+
+--- when reload is triggered we update the database
+--- and then re-run the caller with the arg list
+
+--- ('a -> 'a) -> 'a list -> 'a
+reload_db = function (caller, ...)
+ report("log", 1, "db", "reload initiated")
+ names.data = update_names()
+ save_names(names.data)
+ fonts_reloaded = true
+ return caller(...)
+end
+
+--- string -> string -> int
+local iterative_levenshtein = function (s1, s2)
+
+ local costs = { }
+ local len1, len2 = #s1, #s2
+
+ for i = 0, len1 do
+ local last = i
+ for j = 0, len2 do
+ if i == 0 then
+ costs[j] = j
+ else
+ if j > 0 then
+ local current = costs[j-1]
+ if stringsub(s1, i, i) ~= stringsub(s2, j, j) then
+ current = mathmin(current, last, costs[j]) + 1
+ end
+ costs[j-1] = last
+ last = current
+ end
+ end
+ end
+ if i > 0 then costs[len2] = last end
+ end
+
+ return costs[len2]--- lower right has the distance
+end
+
+--- string -> int -> bool
+find_closest = function (name, limit)
+ local name = sanitize_string(name)
+ limit = limit or fuzzy_limit
+
+ if not fonts_loaded then
+ names.data = load_names()
+ fonts_loaded = true
+ end
+
+ local data = names.data
+
+ if type(data) == "table" then
+ local by_distance = { } --- (int, string list) dict
+ local distances = { } --- int list
+ local cached = { } --- (string, int) dict
+ local mappings = data.mappings
+ local n_fonts = #mappings
+
+ for n = 1, n_fonts do
+ local current = mappings[n]
+ local cnames = current.names
+ --[[
+ This is simplistic but surpisingly fast.
+ Matching is performed against the “family” name
+ of a db record. We then store its “fullname” at
+ it edit distance.
+ We should probably do some weighting over all the
+ font name categories as well as whatever agrep
+ does.
+ --]]
+ if cnames then
+ local fullname, family = cnames.fullname, cnames.family
+ family = sanitize_string(family)
+
+ local dist = cached[family]--- maybe already calculated
+ if not dist then
+ dist = iterative_levenshtein(name, family)
+ cached[family] = dist
+ end
+ local namelst = by_distance[dist]
+ if not namelst then --- first entry
+ namelst = { fullname }
+ distances[#distances+1] = dist
+ else --- append
+ namelst[#namelst+1] = fullname
+ end
+ by_distance[dist] = namelst
+ end
+ end
+
+ --- print the matches according to their distance
+ local n_distances = #distances
+ if n_distances > 0 then --- got some data
+ tablesort(distances)
+ limit = mathmin(n_distances, limit)
+ report(false, 1, "query",
+ "displaying %d distance levels", limit)
+
+ for i = 1, limit do
+ local dist = distances[i]
+ local namelst = by_distance[dist]
+ report(false, 0, "query",
+ "distance from “" .. name .. "”: " .. dist
+ .. "\n " .. tableconcat(namelst, "\n ")
+ )
+ end
+
+ return true
+ end
+ return false
+ else --- need reload
+ return reload_db(find_closest, name)
+ end
+ return false
+end --- find_closest()
+
+--[[doc--
+The data inside an Opentype font file can be quite heterogeneous.
+Thus in order to get the relevant information, parts of the original
+table as returned by the font file reader need to be relocated.
+--doc]]--
+font_fullinfo = function (filename, subfont, texmf)
+ local tfmdata = { }
+ local rawfont = fontloader.open(filename, subfont)
+ if not rawfont then
+ report("log", 1, "error", "failed to open %s", filename)
+ return
+ end
+ local metadata = fontloader.to_table(rawfont)
+ fontloader.close(rawfont)
+ collectgarbage("collect")
+ -- see http://www.microsoft.com/typography/OTSPEC/features_pt.htm#size
+ if metadata.fontstyle_name then
+ for _, name in next, metadata.fontstyle_name do
+ if name.lang == 1033 then --- I hate magic numbers
+ tfmdata.fontstyle_name = name.name
+ end
+ end
+ end
+ if metadata.names then
+ for _, namedata in next, metadata.names do
+ if namedata.lang == "English (US)" then
+ tfmdata.names = {
+ --- see
+ --- https://developer.apple.com/fonts/TTRefMan/RM06/Chap6name.html
+ fullname = namedata.names.compatfull
+ or namedata.names.fullname,
+ family = namedata.names.preffamilyname
+ or namedata.names.family,
+ subfamily= tfmdata.fontstyle_name
+ or namedata.names.prefmodifiers
+ or namedata.names.subfamily,
+ psname = namedata.names.postscriptname
+ }
+ end
+ end
+ else
+ -- no names table, propably a broken font
+ report("log", 1, "db", "broken font rejected", "%s", basefile)
+ return
+ end
+ tfmdata.fontname = metadata.fontname
+ tfmdata.fullname = metadata.fullname
+ tfmdata.familyname = metadata.familyname
+ tfmdata.filename = {
+ texmf and filebasename(filename) or filename,
+ subfont
+ }
+ tfmdata.weight = metadata.pfminfo.weight
+ tfmdata.width = metadata.pfminfo.width
+ tfmdata.slant = metadata.italicangle
+ -- don't waste the space with zero values
+ tfmdata.size = {
+ metadata.design_size ~= 0 and metadata.design_size or nil,
+ metadata.design_range_top ~= 0 and metadata.design_range_top or nil,
+ metadata.design_range_bottom ~= 0 and metadata.design_range_bottom or nil,
+ }
+ return tfmdata
+end
+
+local load_font = function (filename, fontnames, newfontnames, texmf)
+ local newmappings = newfontnames.mappings
+ local newstatus = newfontnames.status
+ local mappings = fontnames.mappings
+ local status = fontnames.status
+ local basename = filebasename(filename)
+ local basefile = texmf and basename or filename
+ if filename then
+ if names.blacklist[filename] or
+ names.blacklist[basename] then
+ report("log", 2, "db", "ignoring font", "%s", filename)
+ return
+ end
+ local timestamp, db_timestamp
+ db_timestamp = status[basefile] and status[basefile].timestamp
+ timestamp = lfs.attributes(filename, "modification")
+
+ local index_status = newstatus[basefile] or (not texmf and newstatus[basename])
+ if index_status and index_status.timestamp == timestamp then
+ -- already indexed this run
+ return
+ end
+
+ newstatus[basefile] = newstatus[basefile] or { }
+ newstatus[basefile].timestamp = timestamp
+ newstatus[basefile].index = newstatus[basefile].index or { }
+
+ if db_timestamp == timestamp and not newstatus[basefile].index[1] then
+ for _,v in next, status[basefile].index do
+ local index = #newstatus[basefile].index
+ newmappings[#newmappings+1] = mappings[v]
+ newstatus[basefile].index[index+1] = #newmappings
+ end
+ report("log", 1, "db", "font already indexed", "%s", basefile)
+ return
+ end
+ local info = fontloader.info(filename)
+ if info then
+ if type(info) == "table" and #info > 1 then
+ for i in next, info do
+ local fullinfo = font_fullinfo(filename, i-1, texmf)
+ if not fullinfo then
+ return
+ end
+ local index = newstatus[basefile].index[i]
+ if newstatus[basefile].index[i] then
+ index = newstatus[basefile].index[i]
+ else
+ index = #newmappings+1
+ end
+ newmappings[index] = fullinfo
+ newstatus[basefile].index[i] = index
+ end
+ else
+ local fullinfo = font_fullinfo(filename, false, texmf)
+ if not fullinfo then
+ return
+ end
+ local index
+ if newstatus[basefile].index[1] then
+ index = newstatus[basefile].index[1]
+ else
+ index = #newmappings+1
+ end
+ newmappings[index] = fullinfo
+ newstatus[basefile].index[1] = index
+ end
+ else
+ report("log", 1, "db", "failed to load", "%s", basefile)
+ end
+ end
+end
+
+local function path_normalize(path)
+ --[[
+ path normalization:
+ - a\b\c -> a/b/c
+ - a/../b -> b
+ - /cygdrive/a/b -> a:/b
+ - reading symlinks under non-Win32
+ - using kpse.readable_file on Win32
+ ]]
+ if os.type == "windows" or os.type == "msdos" or os.name == "cygwin" then
+ path = stringgsub(path, '\\', '/')
+ path = stringlower(path)
+ path = stringgsub(path, '^/cygdrive/(%a)/', '%1:/')
+ end
+ if os.type ~= "windows" and os.type ~= "msdos" then
+ local dest = lfs.readlink(path)
+ if dest then
+ if kpsereadable_file(dest) then
+ path = dest
+ elseif kpsereadable_file(filejoin(file.dirname(path), dest)) then
+ path = filejoin(file.dirname(path), dest)
+ else
+ -- broken symlink?
+ end
+ end
+ end
+ path = filecollapsepath(path)
+ return path
+end
+
+fonts.path_normalize = path_normalize
+
+names.blacklist = { }
+
+local function read_blacklist()
+ local files = {
+ kpselookup("luaotfload-blacklist.cnf", {all=true, format="tex"})
+ }
+ local blacklist = names.blacklist
+ local whitelist = { }
+
+ if files and type(files) == "table" then
+ for _,v in next, files do
+ for line in iolines(v) do
+ line = stringstrip(line) -- to get rid of lines like " % foo"
+ local first_chr = stringsub(line, 1, 1) --- faster than find
+ if first_chr == "%" or stringis_empty(line) then
+ -- comment or empty line
+ else
+ --- this is highly inefficient
+ line = stringsplit(line, "%")[1]
+ line = stringstrip(line)
+ if stringsub(line, 1, 1) == "-" then
+ whitelist[stringsub(line, 2, -1)] = true
+ else
+ report("log", 2, "db", "blacklisted file", "%s", line)
+ blacklist[line] = true
+ end
+ end
+ end
+ end
+ end
+ for _,fontname in next, whitelist do
+ blacklist[fontname] = nil
+ end
+end
+
+local font_extensions = { "otf", "ttf", "ttc", "dfont" }
+local font_extensions_set = {}
+for key, value in next, font_extensions do
+ font_extensions_set[value] = true
+end
+
+--local installed_fonts_scanned = false --- ugh
+
+--- we already have scan_os_fonts don’t we?
+
+--local function scan_installed_fonts(fontnames, newfontnames)
+-- --- Try to query and add font list from operating system.
+-- --- This uses the lualatex-platform module.
+-- --- <phg>what for? why can’t we do this in Lua?</phg>
+-- report("info", 0, "Scanning fonts known to operating system...")
+-- local fonts = get_installed_fonts()
+-- if fonts and #fonts > 0 then
+-- installed_fonts_scanned = true
+-- report("log", 2, "operating system fonts found", "%d", #fonts)
+-- for key, value in next, fonts do
+-- local file = value.path
+-- if file then
+-- local ext = fileextname(file)
+-- if ext and font_extensions_set[ext] then
+-- file = path_normalize(file)
+-- report("log", 1, "loading font", "%s", file)
+-- load_font(file, fontnames, newfontnames, false)
+-- end
+-- end
+-- end
+-- else
+-- report("log", 2, "Could not retrieve list of installed fonts")
+-- end
+--end
+
+local function scan_dir(dirname, fontnames, newfontnames, texmf)
+ --[[
+ This function scans a directory and populates the list of fonts
+ with all the fonts it finds.
+ - dirname is the name of the directory to scan
+ - names is the font database to fill
+ - texmf is a boolean saying if we are scanning a texmf directory
+ ]]
+ local list, found = { }, { }
+ local nbfound = 0
+ report("log", 2, "db", "scanning", "%s", dirname)
+ for _,i in next, font_extensions do
+ for _,ext in next, { i, stringupper(i) } do
+ found = dirglob(stringformat("%s/**.%s$", dirname, ext))
+ -- note that glob fails silently on broken symlinks, which happens
+ -- sometimes in TeX Live.
+ report("log", 2, "db",
+ "fonts found", "%s '%s' fonts found", #found, ext)
+ nbfound = nbfound + #found
+ tableappend(list, found)
+ end
+ end
+ report("log", 2, "db",
+ "fonts found", "%d fonts found in '%s'", nbfound, dirname)
+
+ for _,file in next, list do
+ file = path_normalize(file)
+ report("log", 1, "db",
+ "loading font", "%s", file)
+ load_font(file, fontnames, newfontnames, texmf)
+ end
+end
+
+local function scan_texmf_fonts(fontnames, newfontnames)
+ --[[
+ This function scans all fonts in the texmf tree, through kpathsea
+ variables OPENTYPEFONTS and TTFONTS of texmf.cnf
+ ]]
+ if stringis_empty(kpseexpand_path("$OSFONTDIR")) then
+ report("info", 1, "db", "Scanning TEXMF fonts...")
+ else
+ report("info", 1, "db", "Scanning TEXMF and OS fonts...")
+ end
+ local fontdirs = stringgsub(kpseexpand_path("$OPENTYPEFONTS"), "^%.", "")
+ fontdirs = fontdirs .. stringgsub(kpseexpand_path("$TTFONTS"), "^%.", "")
+ if not stringis_empty(fontdirs) then
+ for _,d in next, filesplitpath(fontdirs) do
+ scan_dir(d, fontnames, newfontnames, true)
+ end
+ end
+end
+
+--[[
+ For the OS fonts, there are several options:
+ - if OSFONTDIR is set (which is the case under windows by default but
+ not on the other OSs), it scans it at the same time as the texmf tree,
+ in the scan_texmf_fonts.
+ - if not:
+ - under Windows and Mac OSX, we take a look at some hardcoded directories
+ - under Unix, we read /etc/fonts/fonts.conf and read the directories in it
+
+ This means that if you have fonts in fancy directories, you need to set them
+ in OSFONTDIR.
+]]
+
+--- (string -> tab -> tab -> tab)
+read_fonts_conf = function (path, results, passed_paths)
+ --[[
+ This function parses /etc/fonts/fonts.conf and returns all the dir
+ it finds. The code is minimal, please report any error it may
+ generate.
+
+ TODO fonts.conf are some kind of XML so in theory the following
+ is totally inappropriate. Maybe a future version of the
+ lualibs will include the lxml-* files from Context so we
+ can write something presentable instead.
+ ]]
+ local fh = ioopen(path)
+ passed_paths[#passed_paths+1] = path
+ passed_paths_set = tabletohash(passed_paths, true)
+ if not fh then
+ report("log", 2, "db", "cannot open file", "%s", path)
+ return results
+ end
+ local incomments = false
+ for line in fh:lines() do
+ while line and line ~= "" do
+ -- spaghetti code... hmmm...
+ if incomments then
+ local tmp = stringfind(line, '-->') --- wtf?
+ if tmp then
+ incomments = false
+ line = stringsub(line, tmp+3)
+ else
+ line = nil
+ end
+ else
+ local tmp = stringfind(line, '<!--')
+ local newline = line
+ if tmp then
+ -- for the analysis, we take everything that is before the
+ -- comment sign
+ newline = stringsub(line, 1, tmp-1)
+ -- and we loop again with the comment
+ incomments = true
+ line = stringsub(line, tmp+4)
+ else
+ -- if there is no comment start, the block after that will
+ -- end the analysis, we exit the while loop
+ line = nil
+ end
+ for dir in stringgmatch(newline, '<dir>([^<]+)</dir>') do
+ -- now we need to replace ~ by kpse.expand_path('~')
+ if stringsub(dir, 1, 1) == '~' then
+ dir = filejoin(kpseexpand_path('~'), stringsub(dir, 2))
+ end
+ -- we exclude paths with texmf in them, as they should be
+ -- found anyway
+ if not stringfind(dir, 'texmf') then
+ results[#results+1] = dir
+ end
+ end
+ for include in stringgmatch(newline, '<include[^<]*>([^<]+)</include>') do
+ -- include here can be four things: a directory or a file,
+ -- in absolute or relative path.
+ if stringsub(include, 1, 1) == '~' then
+ include = filejoin(kpseexpand_path('~'),stringsub(include, 2))
+ -- First if the path is relative, we make it absolute:
+ elseif not lfs.isfile(include) and not lfs.isdir(include) then
+ include = filejoin(file.dirname(path), include)
+ end
+ if lfs.isfile(include)
+ and kpsereadable_file(include)
+ and not passed_paths_set[include]
+ then
+ -- maybe we should prevent loops here?
+ -- we exclude path with texmf in them, as they should
+ -- be found otherwise
+ read_fonts_conf(include, results, passed_paths)
+ elseif lfs.isdir(include) then
+ for _,f in next, dirglob(filejoin(include, "*.conf")) do
+ if not passed_paths_set[f] then
+ read_fonts_conf(f, results, passed_paths)
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ fh:close()
+ return results
+end
+
+-- for testing purpose
+names.read_fonts_conf = read_fonts_conf
+
+--- TODO stuff those paths into some writable table
+local function get_os_dirs()
+ if os.name == 'macosx' then
+ return {
+ filejoin(kpseexpand_path('~'), "Library/Fonts"),
+ "/Library/Fonts",
+ "/System/Library/Fonts",
+ "/Network/Library/Fonts",
+ }
+ elseif os.type == "windows" or os.type == "msdos" or os.name == "cygwin" then
+ local windir = os.getenv("WINDIR")
+ return { filejoin(windir, 'Fonts') }
+ else
+ local passed_paths = {}
+ local os_dirs = {}
+ -- what about ~/config/fontconfig/fonts.conf etc?
+ -- Answer: they should be included by the others, please report if it's not
+ for _,p in next, {"/usr/local/etc/fonts/fonts.conf", "/etc/fonts/fonts.conf"} do
+ if lfs.isfile(p) then
+ read_fonts_conf(p, os_dirs, passed_paths)
+ end
+ end
+ return os_dirs
+ end
+ return {}
+end
+
+local function scan_os_fonts(fontnames, newfontnames)
+ --[[
+ This function scans the OS fonts through
+ - fontcache for Unix (reads the fonts.conf file and scans the directories)
+ - a static set of directories for Windows and MacOSX
+ ]]
+ report("info", 1, "db", "Scanning OS fonts...")
+ report("info", 2, "db", "Searching in static system directories...")
+ for _,d in next, get_os_dirs() do
+ scan_dir(d, fontnames, newfontnames, false)
+ end
+end
+
+update_names = function (fontnames, force)
+ --[[
+ The main function, scans everything
+ - fontnames is the final table to return
+ - force is whether we rebuild it from scratch or not
+ ]]
+ report("info", 1, "db", "Updating the font names database")
+
+ if force then
+ fontnames = fontnames_init()
+ else
+ if not fontnames then
+ fontnames = load_names()
+ end
+ if fontnames.version ~= names.version then
+ fontnames = fontnames_init()
+ report("log", 1, "db", "No font names database or old "
+ .. "one found; generating new one")
+ end
+ end
+ local newfontnames = fontnames_init()
+ read_blacklist()
+ --installed_fonts_scanned = false
+ --scan_installed_fonts(fontnames, newfontnames) --- see fixme above
+ scan_texmf_fonts(fontnames, newfontnames)
+ --if not installed_fonts_scanned
+ --and stringis_empty(kpseexpand_path("$OSFONTDIR"))
+ if stringis_empty(kpseexpand_path("$OSFONTDIR"))
+ then
+ scan_os_fonts(fontnames, newfontnames)
+ end
+ return newfontnames
+end
+
+save_names = function (fontnames)
+ local path = names.path.dir
+ if not lfs.isdir(path) then
+ dirmkdirs(path)
+ end
+ path = filejoin(path, names.path.basename)
+ if fileiswritable(path) then
+ local luaname, lucname = make_name(path)
+ tabletofile(luaname, fontnames, true)
+ caches.compile(fontnames,luaname,lucname)
+ report("info", 0, "db", "Font names database saved")
+ return path
+ else
+ report("info", 0, "db", "Failed to save names database")
+ return nil
+ end
+end
+
+scan_external_dir = function (dir)
+ local old_names, new_names
+ if fonts_loaded then
+ old_names = names.data
+ else
+ old_names = load_names()
+ fonts_loaded = true
+ end
+ new_names = tablecopy(old_names)
+ scan_dir(dir, old_names, new_names)
+ names.data = new_names
+end
+
+--- export functionality to the namespace “fonts.names”
+names.scan = scan_external_dir
+names.load = load_names
+names.update = update_names
+names.save = save_names
+
+names.resolve = resolve --- replace the resolver from luatex-fonts
+names.resolvespec = resolve
+names.find_closest = find_closest
+
+--- dummy required by luatex-fonts (cf. luatex-fonts-syn.lua)
+
+fonts.names.getfilename = function (askedname,suffix) return "" end
+
+-- vim:tw=71:sw=4:ts=4:expandtab
diff --git a/luaotfload-features.lua b/luaotfload-features.lua
new file mode 100644
index 0000000..0121ede
--- /dev/null
+++ b/luaotfload-features.lua
@@ -0,0 +1,575 @@
+if not modules then modules = { } end modules ["features"] = {
+ version = 1.000,
+ comment = "companion to luaotfload.lua",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local format, insert = string.format, table.insert
+local type, next = type, next
+local lpegmatch = lpeg.match
+
+---[[ begin included font-ltx.lua ]]
+--- this appears to be based in part on luatex-fonts-def.lua
+
+local fonts = fonts
+
+-- A bit of tuning for definitions.
+
+fonts.constructors.namemode = "specification" -- somehow latex needs this (changed name!) => will change into an overload
+
+-- tricky: we sort of bypass the parser and directly feed all into
+-- the sub parser
+
+function fonts.definers.getspecification(str)
+ return "", str, "", ":", str
+end
+
+local feature_list = { }
+
+local report = logs.names_report
+
+local stringlower = string.lower
+local stringsub = string.sub
+local stringgsub = string.gsub
+local stringfind = string.find
+local stringexplode = string.explode
+local stringis_empty = string.is_empty
+
+local supported = {
+ b = "bold",
+ i = "italic",
+ bi = "bolditalic",
+ aat = false,
+ icu = false,
+ gr = false,
+}
+
+--- this parses the optional flags after the slash
+--- the original behavior is that multiple slashes
+--- are valid but they might cancel prior settings
+--- example: {name:Antykwa Torunska/I/B} -> bold
+
+local isstyle = function (request)
+ request = stringlower(request)
+ request = stringexplode(request, "/")
+
+ for _,v in next, request do
+ local stylename = supported[v]
+ if stylename then
+ feature_list.style = stylename
+ elseif stringfind(v, "^s=") then
+ --- after all, we want everything after the second byte ...
+ local val = stringsub(v, 3)
+ feature_list.optsize = val
+ elseif stylename == false then
+ report("log", 0,
+ "load font", "unsupported font option: %s", v)
+ elseif not stringis_empty(v) then
+ feature_list.style = stringgsub(v, "[^%a%d]", "")
+ end
+ end
+end
+
+local defaults = {
+ dflt = {
+ "ccmp", "locl", "rlig", "liga", "clig",
+ "kern", "mark", "mkmk", 'itlc',
+ },
+ arab = {
+ "ccmp", "locl", "isol", "fina", "fin2",
+ "fin3", "medi", "med2", "init", "rlig",
+ "calt", "liga", "cswh", "mset", "curs",
+ "kern", "mark", "mkmk",
+ },
+ deva = {
+ "ccmp", "locl", "init", "nukt", "akhn",
+ "rphf", "blwf", "half", "pstf", "vatu",
+ "pres", "blws", "abvs", "psts", "haln",
+ "calt", "blwm", "abvm", "dist", "kern",
+ "mark", "mkmk",
+ },
+ khmr = {
+ "ccmp", "locl", "pref", "blwf", "abvf",
+ "pstf", "pres", "blws", "abvs", "psts",
+ "clig", "calt", "blwm", "abvm", "dist",
+ "kern", "mark", "mkmk",
+ },
+ thai = {
+ "ccmp", "locl", "liga", "kern", "mark",
+ "mkmk",
+ },
+ hang = {
+ "ccmp", "ljmo", "vjmo", "tjmo",
+ },
+}
+
+defaults.beng = defaults.deva
+defaults.guru = defaults.deva
+defaults.gujr = defaults.deva
+defaults.orya = defaults.deva
+defaults.taml = defaults.deva
+defaults.telu = defaults.deva
+defaults.knda = defaults.deva
+defaults.mlym = defaults.deva
+defaults.sinh = defaults.deva
+
+defaults.syrc = defaults.arab
+defaults.mong = defaults.arab
+defaults.nko = defaults.arab
+
+defaults.tibt = defaults.khmr
+
+defaults.lao = defaults.thai
+
+local function set_default_features(script)
+ local features
+ local script = script or "dflt"
+ report("log", 0, "load font",
+ "auto-selecting default features for script: %s",
+ script)
+ if defaults[script] then
+ features = defaults[script]
+ else
+ features = defaults["dflt"]
+ end
+ for _,v in next, features do
+ if feature_list[v] ~= false then
+ feature_list[v] = true
+ end
+ end
+end
+
+local function issome () feature_list.lookup = 'name' end
+local function isfile () feature_list.lookup = 'file' end
+local function isname () feature_list.lookup = 'name' end
+local function thename(s) feature_list.name = s end
+local function issub (v) feature_list.sub = v end
+local function istrue (s) feature_list[s] = true end
+local function isfalse(s) feature_list[s] = false end
+local function iskey (k,v) feature_list[k] = v end
+
+local P, S, R, C = lpeg.P, lpeg.S, lpeg.R, lpeg.C
+
+local spaces = P(" ")^0
+--local namespec = (1-S("/:("))^0 -- was: (1-S("/: ("))^0
+--[[phg-- this prevents matching of absolute paths as file names --]]--
+local namespec = (1-S("/:("))^1
+local filespec = (R("az", "AZ") * P(":"))^-1 * (1-S(":("))^1
+local stylespec = spaces * P("/") * (((1-P(":"))^0)/isstyle) * spaces
+local filename = (P("file:")/isfile * (filespec/thename)) + (P("[") * P(true)/isname * (((1-P("]"))^0)/thename) * P("]"))
+local fontname = (P("name:")/isname * (namespec/thename)) + P(true)/issome * (namespec/thename)
+local sometext = (R("az","AZ","09") + S("+-.,"))^1
+local truevalue = P("+") * spaces * (sometext/istrue)
+local falsevalue = P("-") * spaces * (sometext/isfalse)
+local keyvalue = P("+") + (C(sometext) * spaces * P("=") * spaces * C(sometext))/iskey
+local somevalue = sometext/istrue
+local subvalue = P("(") * (C(P(1-S("()"))^1)/issub) * P(")") -- for Kim
+local option = spaces * (keyvalue + falsevalue + truevalue + somevalue) * spaces
+local options = P(":") * spaces * (P(";")^0 * option)^0
+local pattern = (filename + fontname) * subvalue^0 * stylespec^0 * options^0
+
+local function colonized(specification) -- xetex mode
+ feature_list = { }
+ lpeg.match(pattern,specification.specification)
+ set_default_features(feature_list.script)
+ if feature_list.style then
+ specification.style = feature_list.style
+ feature_list.style = nil
+ end
+ if feature_list.optsize then
+ specification.optsize = feature_list.optsize
+ feature_list.optsize = nil
+ end
+ if feature_list.name then
+ if resolvers.findfile(feature_list.name, "tfm") then
+ feature_list.lookup = "file"
+ feature_list.name = file.addsuffix(feature_list.name, "tfm")
+ elseif resolvers.findfile(feature_list.name, "ofm") then
+ feature_list.lookup = "file"
+ feature_list.name = file.addsuffix(feature_list.name, "ofm")
+ end
+
+ specification.name = feature_list.name
+ feature_list.name = nil
+ end
+ if feature_list.lookup then
+ specification.lookup = feature_list.lookup
+ feature_list.lookup = nil
+ end
+ if feature_list.sub then
+ specification.sub = feature_list.sub
+ feature_list.sub = nil
+ end
+ if not feature_list.mode then
+ -- if no mode is set, use our default
+ feature_list.mode = fonts.mode
+ end
+ specification.features.normal = fonts.handlers.otf.features.normalize(feature_list)
+ return specification
+end
+
+fonts.definers.registersplit(":",colonized,"cryptic")
+fonts.definers.registersplit("", colonized,"more cryptic") -- catches \font\text=[names]
+
+function fonts.definers.applypostprocessors(tfmdata)
+ local postprocessors = tfmdata.postprocessors
+ if postprocessors then
+ for i=1,#postprocessors do
+ local extrahash = postprocessors[i](tfmdata) -- after scaling etc
+ if type(extrahash) == "string" and extrahash ~= "" then
+ -- e.g. a reencoding needs this
+ extrahash = string.gsub(lower(extrahash),"[^a-z]","-")
+ tfmdata.properties.fullname = format("%s-%s",tfmdata.properties.fullname,extrahash)
+ end
+ end
+ end
+ return tfmdata
+end
+---[[ end included font-ltx.lua ]]
+
+--[[doc--
+This uses the code from luatex-fonts-merged (<- font-otc.lua) instead
+of the removed luaotfload-font-otc.lua.
+
+TODO find out how far we get setting features without these lines,
+relying on luatex-fonts only (it *does* handle features somehow, after
+all).
+--doc]]--
+
+-- we assume that the other otf stuff is loaded already
+
+---[[ begin snippet from font-otc.lua ]]
+local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
+local report_otf = logs.reporter("fonts","otf loading")
+
+local otf = fonts.handlers.otf
+local registerotffeature = otf.features.register
+local setmetatableindex = table.setmetatableindex
+
+-- In the userdata interface we can not longer tweak the loaded font as
+-- conveniently as before. For instance, instead of pushing extra data in
+-- in the table using the original structure, we now have to operate on
+-- the mkiv representation. And as the fontloader interface is modelled
+-- after fontforge we cannot change that one too much either.
+
+local types = {
+ substitution = "gsub_single",
+ ligature = "gsub_ligature",
+ alternate = "gsub_alternate",
+}
+
+setmetatableindex(types, function(t,k) t[k] = k return k end) -- "key"
+
+local everywhere = { ["*"] = { ["*"] = true } } -- or: { ["*"] = { "*" } }
+local noflags = { }
+
+local function addfeature(data,feature,specifications)
+ local descriptions = data.descriptions
+ local resources = data.resources
+ local lookups = resources.lookups
+ local gsubfeatures = resources.features.gsub
+ if gsubfeatures and gsubfeatures[feature] then
+ -- already present
+ else
+ local sequences = resources.sequences
+ local fontfeatures = resources.features
+ local unicodes = resources.unicodes
+ local lookuptypes = resources.lookuptypes
+ local splitter = lpeg.splitter(" ",unicodes)
+ local done = 0
+ local skip = 0
+ if not specifications[1] then
+ -- so we accept a one entry specification
+ specifications = { specifications }
+ end
+ -- subtables are tables themselves but we also accept flattened singular subtables
+ for s=1,#specifications do
+ local specification = specifications[s]
+ local valid = specification.valid
+ if not valid or valid(data,specification,feature) then
+ local initialize = specification.initialize
+ if initialize then
+ -- when false is returned we initialize only once
+ specification.initialize = initialize(specification) and initialize or nil
+ end
+ local askedfeatures = specification.features or everywhere
+ local subtables = specification.subtables or { specification.data } or { }
+ local featuretype = types[specification.type or "substitution"]
+ local featureflags = specification.flags or noflags
+ local added = false
+ local featurename = format("ctx_%s_%s",feature,s)
+ local st = { }
+ for t=1,#subtables do
+ local list = subtables[t]
+ local full = format("%s_%s",featurename,t)
+ st[t] = full
+ if featuretype == "gsub_ligature" then
+ lookuptypes[full] = "ligature"
+ for code, ligature in next, list do
+ local unicode = tonumber(code) or unicodes[code]
+ local description = descriptions[unicode]
+ if description then
+ local slookups = description.slookups
+ if type(ligature) == "string" then
+ ligature = { lpegmatch(splitter,ligature) }
+ end
+ local present = true
+ for i=1,#ligature do
+ if not descriptions[ligature[i]] then
+ present = false
+ break
+ end
+ end
+ if present then
+ if slookups then
+ slookups[full] = ligature
+ else
+ description.slookups = { [full] = ligature }
+ end
+ done, added = done + 1, true
+ else
+ skip = skip + 1
+ end
+ end
+ end
+ elseif featuretype == "gsub_single" then
+ lookuptypes[full] = "substitution"
+ for code, replacement in next, list do
+ local unicode = tonumber(code) or unicodes[code]
+ local description = descriptions[unicode]
+ if description then
+ local slookups = description.slookups
+ replacement = tonumber(replacement) or unicodes[replacement]
+ if descriptions[replacement] then
+ if slookups then
+ slookups[full] = replacement
+ else
+ description.slookups = { [full] = replacement }
+ end
+ done, added = done + 1, true
+ end
+ end
+ end
+ end
+ end
+ if added then
+ -- script = { lang1, lang2, lang3 } or script = { lang1 = true, ... }
+ for k, v in next, askedfeatures do
+ if v[1] then
+ askedfeatures[k] = table.tohash(v)
+ end
+ end
+ sequences[#sequences+1] = {
+ chain = 0,
+ features = { [feature] = askedfeatures },
+ flags = featureflags,
+ name = featurename,
+ subtables = st,
+ type = featuretype,
+ }
+ -- register in metadata (merge as there can be a few)
+ if not gsubfeatures then
+ gsubfeatures = { }
+ fontfeatures.gsub = gsubfeatures
+ end
+ local k = gsubfeatures[feature]
+ if not k then
+ k = { }
+ gsubfeatures[feature] = k
+ end
+ for script, languages in next, askedfeatures do
+ local kk = k[script]
+ if not kk then
+ kk = { }
+ k[script] = kk
+ end
+ for language, value in next, languages do
+ kk[language] = value
+ end
+ end
+ end
+ end
+ end
+ if trace_loading then
+ report_otf("registering feature %a, affected glyphs %a, skipped glyphs %a",feature,done,skip)
+ end
+ end
+end
+
+otf.enhancers.addfeature = addfeature
+
+local extrafeatures = { }
+
+function otf.addfeature(name,specification)
+ extrafeatures[name] = specification
+end
+
+local function enhance(data,filename,raw)
+ for feature, specification in next, extrafeatures do
+ addfeature(data,feature,specification)
+ end
+end
+
+otf.enhancers.register("check extra features",enhance)
+
+---[[ end snippet from font-otc.lua ]]
+
+local tlig = {
+ {
+ type = "substitution",
+ features = everywhere,
+ data = {
+ [0x0022] = 0x201D, -- quotedblright
+ [0x0027] = 0x2019, -- quoteleft
+ [0x0060] = 0x2018, -- quoteright
+ },
+ flags = { },
+ },
+ {
+ type = "ligature",
+ features = everywhere,
+ data = {
+ [0x2013] = {0x002D, 0x002D}, -- endash
+ [0x2014] = {0x002D, 0x002D, 0x002D}, -- emdash
+ [0x201C] = {0x2018, 0x2018}, -- quotedblleft
+ [0x201D] = {0x2019, 0x2019}, -- quotedblright
+ [0x201E] = {0x002C, 0x002C}, -- quotedblbase
+ [0x00A1] = {0x0021, 0x2018}, -- exclamdown
+ [0x00BF] = {0x003F, 0x2018}, -- questiondown
+ },
+ flags = { },
+ },
+ {
+ type = "ligature",
+ features = everywhere,
+ data = {
+ [0x201C] = {0x0060, 0x0060}, -- quotedblleft
+ [0x201D] = {0x0027, 0x0027}, -- quotedblright
+ [0x00A1] = {0x0021, 0x0060}, -- exclamdown
+ [0x00BF] = {0x003F, 0x0060}, -- questiondown
+ },
+ flags = { },
+ },
+}
+
+otf.addfeature("tlig", tlig)
+otf.addfeature("trep", { }) -- empty, all in tlig now
+
+local anum_arabic = { --- these are the same as in font-otc
+ [0x0030] = 0x0660,
+ [0x0031] = 0x0661,
+ [0x0032] = 0x0662,
+ [0x0033] = 0x0663,
+ [0x0034] = 0x0664,
+ [0x0035] = 0x0665,
+ [0x0036] = 0x0666,
+ [0x0037] = 0x0667,
+ [0x0038] = 0x0668,
+ [0x0039] = 0x0669,
+}
+
+local anum_persian = {--- these are the same as in font-otc
+ [0x0030] = 0x06F0,
+ [0x0031] = 0x06F1,
+ [0x0032] = 0x06F2,
+ [0x0033] = 0x06F3,
+ [0x0034] = 0x06F4,
+ [0x0035] = 0x06F5,
+ [0x0036] = 0x06F6,
+ [0x0037] = 0x06F7,
+ [0x0038] = 0x06F8,
+ [0x0039] = 0x06F9,
+}
+
+local function valid(data)
+ local features = data.resources.features
+ if features then
+ for k, v in next, features do
+ for k, v in next, v do
+ if v.arab then
+ return true
+ end
+ end
+ end
+ end
+end
+
+local anum_specification = {
+ {
+ type = "substitution",
+ features = { arab = { far = true, urd = true, snd = true } },
+ data = anum_persian,
+ flags = { },
+ valid = valid,
+ },
+ {
+ type = "substitution",
+ features = { arab = { ["*"] = true } },
+ data = anum_arabic,
+ flags = { },
+ valid = valid,
+ },
+}
+
+--- below the specifications as given in the removed font-otc.lua
+--- the rest was identical to what this file had from the beginning
+--- both make the “anum.tex” test pass anyways
+--
+--local anum_specification = {
+-- {
+-- type = "substitution",
+-- features = { arab = { urd = true, dflt = true } },
+-- data = anum_arabic,
+-- flags = noflags, -- { },
+-- valid = valid,
+-- },
+-- {
+-- type = "substitution",
+-- features = { arab = { urd = true } },
+-- data = anum_persian,
+-- flags = noflags, -- { },
+-- valid = valid,
+-- },
+--}
+--
+otf.addfeature("anum",anum_specification)
+
+registerotffeature {
+ name = 'anum',
+ description = 'arabic digits',
+}
+
+if characters.combined then
+
+ local tcom = { }
+
+ local function initialize()
+ characters.initialize()
+ for first, seconds in next, characters.combined do
+ for second, combination in next, seconds do
+ tcom[combination] = { first, second }
+ end
+ end
+ -- return false
+ end
+
+ local tcom_specification = {
+ type = "ligature",
+ features = everywhere,
+ data = tcom,
+ flags = noflags,
+ initialize = initialize,
+ }
+
+ otf.addfeature("tcom",tcom_specification)
+
+ registerotffeature {
+ name = 'tcom',
+ description = 'tex combinations',
+ }
+
+end
+
+-- vim:tw=71:sw=4:ts=4:expandtab
diff --git a/otfl-fonts-cbk.lua b/luaotfload-fonts-cbk.lua
index 9db94f6..9db94f6 100644
--- a/otfl-fonts-cbk.lua
+++ b/luaotfload-fonts-cbk.lua
diff --git a/luaotfload-fonts-def.lua b/luaotfload-fonts-def.lua
new file mode 100644
index 0000000..0c2f0db
--- /dev/null
+++ b/luaotfload-fonts-def.lua
@@ -0,0 +1,97 @@
+if not modules then modules = { } end modules ['luatex-font-def'] = {
+ version = 1.001,
+ comment = "companion to luatex-*.tex",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+
+local fonts = fonts
+
+-- A bit of tuning for definitions.
+
+fonts.constructors.namemode = "specification" -- somehow latex needs this (changed name!) => will change into an overload
+
+-- tricky: we sort of bypass the parser and directly feed all into
+-- the sub parser
+
+function fonts.definers.getspecification(str)
+ return "", str, "", ":", str
+end
+
+-- the generic name parser (different from context!)
+
+local list = { }
+
+local function issome () list.lookup = 'name' end -- xetex mode prefers name (not in context!)
+local function isfile () list.lookup = 'file' end
+local function isname () list.lookup = 'name' end
+local function thename(s) list.name = s end
+local function issub (v) list.sub = v end
+local function iscrap (s) list.crap = string.lower(s) end
+local function iskey (k,v) list[k] = v end
+local function istrue (s) list[s] = true end
+local function isfalse(s) list[s] = false end
+
+local P, S, R, C = lpeg.P, lpeg.S, lpeg.R, lpeg.C
+
+local spaces = P(" ")^0
+local namespec = (1-S("/:("))^0 -- was: (1-S("/: ("))^0
+local crapspec = spaces * P("/") * (((1-P(":"))^0)/iscrap) * spaces
+local filename_1 = P("file:")/isfile * (namespec/thename)
+local filename_2 = P("[") * P(true)/isname * (((1-P("]"))^0)/thename) * P("]")
+local fontname_1 = P("name:")/isname * (namespec/thename)
+local fontname_2 = P(true)/issome * (namespec/thename)
+local sometext = (R("az","AZ","09") + S("+-."))^1
+local truevalue = P("+") * spaces * (sometext/istrue)
+local falsevalue = P("-") * spaces * (sometext/isfalse)
+local keyvalue = (C(sometext) * spaces * P("=") * spaces * C(sometext))/iskey
+local somevalue = sometext/istrue
+local subvalue = P("(") * (C(P(1-S("()"))^1)/issub) * P(")") -- for Kim
+local option = spaces * (keyvalue + falsevalue + truevalue + somevalue) * spaces
+local options = P(":") * spaces * (P(";")^0 * option)^0
+
+local pattern = (filename_1 + filename_2 + fontname_1 + fontname_2) * subvalue^0 * crapspec^0 * options^0
+
+local function colonized(specification) -- xetex mode
+ list = { }
+ lpeg.match(pattern,specification.specification)
+ list.crap = nil -- style not supported, maybe some day
+ if list.name then
+ specification.name = list.name
+ list.name = nil
+ end
+ if list.lookup then
+ specification.lookup = list.lookup
+ list.lookup = nil
+ end
+ if list.sub then
+ specification.sub = list.sub
+ list.sub = nil
+ end
+ specification.features.normal = fonts.handlers.otf.features.normalize(list)
+ return specification
+end
+
+fonts.definers.registersplit(":",colonized,"cryptic")
+fonts.definers.registersplit("", colonized,"more cryptic") -- catches \font\text=[names]
+
+function fonts.definers.applypostprocessors(tfmdata)
+ local postprocessors = tfmdata.postprocessors
+ if postprocessors then
+ for i=1,#postprocessors do
+ local extrahash = postprocessors[i](tfmdata) -- after scaling etc
+ if type(extrahash) == "string" and extrahash ~= "" then
+ -- e.g. a reencoding needs this
+ extrahash = string.gsub(lower(extrahash),"[^a-z]","-")
+ tfmdata.properties.fullname = format("%s-%s",tfmdata.properties.fullname,extrahash)
+ end
+ end
+ end
+ return tfmdata
+end
diff --git a/otfl-fonts-enc.lua b/luaotfload-fonts-enc.lua
index e20c3a0..e20c3a0 100644
--- a/otfl-fonts-enc.lua
+++ b/luaotfload-fonts-enc.lua
diff --git a/otfl-fonts-ext.lua b/luaotfload-fonts-ext.lua
index d8884cc..b60d045 100644
--- a/otfl-fonts-ext.lua
+++ b/luaotfload-fonts-ext.lua
@@ -18,18 +18,14 @@ local otffeatures = fonts.constructors.newfeatures("otf")
local function initializeitlc(tfmdata,value)
if value then
- -- the magic 40 and it formula come from Dohyun Kim
- local parameters = tfmdata.parameters
+ -- the magic 40 and it formula come from Dohyun Kim but we might need another guess
+ local parameters = tfmdata.parameters
local italicangle = parameters.italicangle
if italicangle and italicangle ~= 0 then
- local uwidth = (parameters.uwidth or 40)/2
- for unicode, d in next, tfmdata.descriptions do
- local it = d.boundingbox[3] - d.width + uwidth
- if it ~= 0 then
- d.italic = it
- end
- end
- tfmdata.properties.hasitalics = true
+ local properties = tfmdata.properties
+ local factor = tonumber(value) or 1
+ properties.hasitalics = true
+ properties.autoitalicamount = factor * (parameters.uwidth or 40)/2
end
end
end
diff --git a/otfl-fonts-lua.lua b/luaotfload-fonts-lua.lua
index ec3fe38..ec3fe38 100644
--- a/otfl-fonts-lua.lua
+++ b/luaotfload-fonts-lua.lua
diff --git a/otfl-fonts-tfm.lua b/luaotfload-fonts-tfm.lua
index b9bb1bd..b9bb1bd 100644
--- a/otfl-fonts-tfm.lua
+++ b/luaotfload-fonts-tfm.lua
diff --git a/luaotfload-lib-dir.lua b/luaotfload-lib-dir.lua
new file mode 100644
index 0000000..00cda38
--- /dev/null
+++ b/luaotfload-lib-dir.lua
@@ -0,0 +1,449 @@
+if not modules then modules = { } end modules ['l-dir'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- dir.expandname will be merged with cleanpath and collapsepath
+
+local type, select = type, select
+local find, gmatch, match, gsub = string.find, string.gmatch, string.match, string.gsub
+local concat, insert, remove = table.concat, table.insert, table.remove
+local lpegmatch = lpeg.match
+
+local P, S, R, C, Cc, Cs, Ct, Cv, V = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Cv, lpeg.V
+
+dir = dir or { }
+local dir = dir
+local lfs = lfs
+
+local attributes = lfs.attributes
+local walkdir = lfs.dir
+local isdir = lfs.isdir
+local isfile = lfs.isfile
+local currentdir = lfs.currentdir
+local chdir = lfs.chdir
+
+-- in case we load outside luatex
+
+if not isdir then
+ function isdir(name)
+ local a = attributes(name)
+ return a and a.mode == "directory"
+ end
+ lfs.isdir = isdir
+end
+
+if not isfile then
+ function isfile(name)
+ local a = attributes(name)
+ return a and a.mode == "file"
+ end
+ lfs.isfile = isfile
+end
+
+-- handy
+
+function dir.current()
+ return (gsub(currentdir(),"\\","/"))
+end
+
+-- optimizing for no find (*) does not save time
+
+--~ local function globpattern(path,patt,recurse,action) -- fails in recent luatex due to some change in lfs
+--~ local ok, scanner
+--~ if path == "/" then
+--~ ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
+--~ else
+--~ ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
+--~ end
+--~ if ok and type(scanner) == "function" then
+--~ if not find(path,"/$") then path = path .. '/' end
+--~ for name in scanner do
+--~ local full = path .. name
+--~ local mode = attributes(full,'mode')
+--~ if mode == 'file' then
+--~ if find(full,patt) then
+--~ action(full)
+--~ end
+--~ elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
+--~ globpattern(full,patt,recurse,action)
+--~ end
+--~ end
+--~ end
+--~ end
+
+local lfsisdir = isdir
+
+local function isdir(path)
+ path = gsub(path,"[/\\]+$","")
+ return lfsisdir(path)
+end
+
+lfs.isdir = isdir
+
+local function globpattern(path,patt,recurse,action)
+ if path == "/" then
+ path = path .. "."
+ elseif not find(path,"/$") then
+ path = path .. '/'
+ end
+ if isdir(path) then -- lfs.isdir does not like trailing /
+ for name in walkdir(path) do -- lfs.dir accepts trailing /
+ local full = path .. name
+ local mode = attributes(full,'mode')
+ if mode == 'file' then
+ if find(full,patt) then
+ action(full)
+ end
+ elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
+ globpattern(full,patt,recurse,action)
+ end
+ end
+ end
+end
+
+dir.globpattern = globpattern
+
+local function collectpattern(path,patt,recurse,result)
+ local ok, scanner
+ result = result or { }
+ if path == "/" then
+ ok, scanner, first = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
+ else
+ ok, scanner, first = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
+ end
+ if ok and type(scanner) == "function" then
+ if not find(path,"/$") then path = path .. '/' end
+ for name in scanner, first do
+ local full = path .. name
+ local attr = attributes(full)
+ local mode = attr.mode
+ if mode == 'file' then
+ if find(full,patt) then
+ result[name] = attr
+ end
+ elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
+ attr.list = collectpattern(full,patt,recurse)
+ result[name] = attr
+ end
+ end
+ end
+ return result
+end
+
+dir.collectpattern = collectpattern
+
+local pattern = Ct {
+ [1] = (C(P(".") + P("/")^1) + C(R("az","AZ") * P(":") * P("/")^0) + Cc("./")) * V(2) * V(3),
+ [2] = C(((1-S("*?/"))^0 * P("/"))^0),
+ [3] = C(P(1)^0)
+}
+
+local filter = Cs ( (
+ P("**") / ".*" +
+ P("*") / "[^/]*" +
+ P("?") / "[^/]" +
+ P(".") / "%%." +
+ P("+") / "%%+" +
+ P("-") / "%%-" +
+ P(1)
+)^0 )
+
+local function glob(str,t)
+ if type(t) == "function" then
+ if type(str) == "table" then
+ for s=1,#str do
+ glob(str[s],t)
+ end
+ elseif isfile(str) then
+ t(str)
+ else
+ local split = lpegmatch(pattern,str) -- we could use the file splitter
+ if split then
+ local root, path, base = split[1], split[2], split[3]
+ local recurse = find(base,"%*%*")
+ local start = root .. path
+ local result = lpegmatch(filter,start .. base)
+ globpattern(start,result,recurse,t)
+ end
+ end
+ else
+ if type(str) == "table" then
+ local t = t or { }
+ for s=1,#str do
+ glob(str[s],t)
+ end
+ return t
+ elseif isfile(str) then
+ if t then
+ t[#t+1] = str
+ return t
+ else
+ return { str }
+ end
+ else
+ local split = lpegmatch(pattern,str) -- we could use the file splitter
+ if split then
+ local t = t or { }
+ local action = action or function(name) t[#t+1] = name end
+ local root, path, base = split[1], split[2], split[3]
+ local recurse = find(base,"%*%*")
+ local start = root .. path
+ local result = lpegmatch(filter,start .. base)
+ globpattern(start,result,recurse,action)
+ return t
+ else
+ return { }
+ end
+ end
+ end
+end
+
+dir.glob = glob
+
+--~ list = dir.glob("**/*.tif")
+--~ list = dir.glob("/**/*.tif")
+--~ list = dir.glob("./**/*.tif")
+--~ list = dir.glob("oeps/**/*.tif")
+--~ list = dir.glob("/oeps/**/*.tif")
+
+local function globfiles(path,recurse,func,files) -- func == pattern or function
+ if type(func) == "string" then
+ local s = func
+ func = function(name) return find(name,s) end
+ end
+ files = files or { }
+ local noffiles = #files
+ for name in walkdir(path) do
+ if find(name,"^%.") then
+ --- skip
+ else
+ local mode = attributes(name,'mode')
+ if mode == "directory" then
+ if recurse then
+ globfiles(path .. "/" .. name,recurse,func,files)
+ end
+ elseif mode == "file" then
+ if not func or func(name) then
+ noffiles = noffiles + 1
+ files[noffiles] = path .. "/" .. name
+ end
+ end
+ end
+ end
+ return files
+end
+
+dir.globfiles = globfiles
+
+-- t = dir.glob("c:/data/develop/context/sources/**/????-*.tex")
+-- t = dir.glob("c:/data/develop/tex/texmf/**/*.tex")
+-- t = dir.glob("c:/data/develop/context/texmf/**/*.tex")
+-- t = dir.glob("f:/minimal/tex/**/*")
+-- print(dir.ls("f:/minimal/tex/**/*"))
+-- print(dir.ls("*.tex"))
+
+function dir.ls(pattern)
+ return concat(glob(pattern),"\n")
+end
+
+--~ mkdirs("temp")
+--~ mkdirs("a/b/c")
+--~ mkdirs(".","/a/b/c")
+--~ mkdirs("a","b","c")
+
+local make_indeed = true -- false
+
+local onwindows = os.type == "windows" or find(os.getenv("PATH"),";")
+
+if onwindows then
+
+ function dir.mkdirs(...)
+ local str, pth = "", ""
+ for i=1,select("#",...) do
+ local s = select(i,...)
+ if s == "" then
+ -- skip
+ elseif str == "" then
+ str = s
+ else
+ str = str .. "/" .. s
+ end
+ end
+ local first, middle, last
+ local drive = false
+ first, middle, last = match(str,"^(//)(//*)(.*)$")
+ if first then
+ -- empty network path == local path
+ else
+ first, last = match(str,"^(//)/*(.-)$")
+ if first then
+ middle, last = match(str,"([^/]+)/+(.-)$")
+ if middle then
+ pth = "//" .. middle
+ else
+ pth = "//" .. last
+ last = ""
+ end
+ else
+ first, middle, last = match(str,"^([a-zA-Z]:)(/*)(.-)$")
+ if first then
+ pth, drive = first .. middle, true
+ else
+ middle, last = match(str,"^(/*)(.-)$")
+ if not middle then
+ last = str
+ end
+ end
+ end
+ end
+ for s in gmatch(last,"[^/]+") do
+ if pth == "" then
+ pth = s
+ elseif drive then
+ pth, drive = pth .. s, false
+ else
+ pth = pth .. "/" .. s
+ end
+ if make_indeed and not isdir(pth) then
+ lfs.mkdir(pth)
+ end
+ end
+ return pth, (isdir(pth) == true)
+ end
+
+ --~ print(dir.mkdirs("","","a","c"))
+ --~ print(dir.mkdirs("a"))
+ --~ print(dir.mkdirs("a:"))
+ --~ print(dir.mkdirs("a:/b/c"))
+ --~ print(dir.mkdirs("a:b/c"))
+ --~ print(dir.mkdirs("a:/bbb/c"))
+ --~ print(dir.mkdirs("/a/b/c"))
+ --~ print(dir.mkdirs("/aaa/b/c"))
+ --~ print(dir.mkdirs("//a/b/c"))
+ --~ print(dir.mkdirs("///a/b/c"))
+ --~ print(dir.mkdirs("a/bbb//ccc/"))
+
+else
+
+ function dir.mkdirs(...)
+ local str, pth = "", ""
+ for i=1,select("#",...) do
+ local s = select(i,...)
+ if s and s ~= "" then -- we catch nil and false
+ if str ~= "" then
+ str = str .. "/" .. s
+ else
+ str = s
+ end
+ end
+ end
+ str = gsub(str,"/+","/")
+ if find(str,"^/") then
+ pth = "/"
+ for s in gmatch(str,"[^/]+") do
+ local first = (pth == "/")
+ if first then
+ pth = pth .. s
+ else
+ pth = pth .. "/" .. s
+ end
+ if make_indeed and not first and not isdir(pth) then
+ lfs.mkdir(pth)
+ end
+ end
+ else
+ pth = "."
+ for s in gmatch(str,"[^/]+") do
+ pth = pth .. "/" .. s
+ if make_indeed and not isdir(pth) then
+ lfs.mkdir(pth)
+ end
+ end
+ end
+ return pth, (isdir(pth) == true)
+ end
+
+ --~ print(dir.mkdirs("","","a","c"))
+ --~ print(dir.mkdirs("a"))
+ --~ print(dir.mkdirs("/a/b/c"))
+ --~ print(dir.mkdirs("/aaa/b/c"))
+ --~ print(dir.mkdirs("//a/b/c"))
+ --~ print(dir.mkdirs("///a/b/c"))
+ --~ print(dir.mkdirs("a/bbb//ccc/"))
+
+end
+
+dir.makedirs = dir.mkdirs
+
+-- we can only define it here as it uses dir.current
+
+if onwindows then
+
+ function dir.expandname(str) -- will be merged with cleanpath and collapsepath
+ local first, nothing, last = match(str,"^(//)(//*)(.*)$")
+ if first then
+ first = dir.current() .. "/" -- dir.current sanitizes
+ end
+ if not first then
+ first, last = match(str,"^(//)/*(.*)$")
+ end
+ if not first then
+ first, last = match(str,"^([a-zA-Z]:)(.*)$")
+ if first and not find(last,"^/") then
+ local d = currentdir()
+ if chdir(first) then
+ first = dir.current()
+ end
+ chdir(d)
+ end
+ end
+ if not first then
+ first, last = dir.current(), str
+ end
+ last = gsub(last,"//","/")
+ last = gsub(last,"/%./","/")
+ last = gsub(last,"^/*","")
+ first = gsub(first,"/*$","")
+ if last == "" or last == "." then
+ return first
+ else
+ return first .. "/" .. last
+ end
+ end
+
+else
+
+ function dir.expandname(str) -- will be merged with cleanpath and collapsepath
+ if not find(str,"^/") then
+ str = currentdir() .. "/" .. str
+ end
+ str = gsub(str,"//","/")
+ str = gsub(str,"/%./","/")
+ str = gsub(str,"(.)/%.$","%1")
+ return str
+ end
+
+end
+
+file.expandname = dir.expandname -- for convenience
+
+local stack = { }
+
+function dir.push(newdir)
+ insert(stack,currentdir())
+ if newdir and newdir ~= "" then
+ chdir(newdir)
+ end
+end
+
+function dir.pop()
+ local d = remove(stack)
+ if d then
+ chdir(d)
+ end
+ return d
+end
diff --git a/luaotfload-loaders.lua b/luaotfload-loaders.lua
new file mode 100644
index 0000000..8ab6b29
--- /dev/null
+++ b/luaotfload-loaders.lua
@@ -0,0 +1,24 @@
+local fonts = fonts
+
+---
+--- opentype reader (from font-otf.lua):
+--- (spec : table) -> (suffix : string) -> (format : string) -> (font : table)
+---
+
+local pfb_reader = function (specification)
+ return readers.opentype(specification,"pfb","type1")
+end
+
+local pfa_reader = function (specification)
+ return readers.opentype(specification,"pfa","type1")
+end
+
+fonts.formats.pfb = "type1"
+fonts.readers.pfb = pfb_reader
+fonts.handlers.pfb = { } --- empty, as with tfm
+
+fonts.formats.pfa = "type1"
+fonts.readers.pfa = pfa_reader
+fonts.handlers.pfa = { }
+
+-- vim:tw=71:sw=2:ts=2:expandtab
diff --git a/luaotfload-merged.lua b/luaotfload-merged.lua
new file mode 100644
index 0000000..314305a
--- /dev/null
+++ b/luaotfload-merged.lua
@@ -0,0 +1,11041 @@
+-- merged file : luatex-fonts-merged.lua
+-- parent file : luatex-fonts.lua
+-- merge date : 04/20/13 13:33:53
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['l-lua']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local major,minor=string.match(_VERSION,"^[^%d]+(%d+)%.(%d+).*$")
+_MAJORVERSION=tonumber(major) or 5
+_MINORVERSION=tonumber(minor) or 1
+_LUAVERSION=_MAJORVERSION+_MINORVERSION/10
+if not lpeg then
+ lpeg=require("lpeg")
+end
+if loadstring then
+ local loadnormal=load
+ function load(first,...)
+ if type(first)=="string" then
+ return loadstring(first,...)
+ else
+ return loadnormal(first,...)
+ end
+ end
+else
+ loadstring=load
+end
+if not ipairs then
+ local function iterate(a,i)
+ i=i+1
+ local v=a[i]
+ if v~=nil then
+ return i,v
+ end
+ end
+ function ipairs(a)
+ return iterate,a,0
+ end
+end
+if not pairs then
+ function pairs(t)
+ return next,t
+ end
+end
+if not table.unpack then
+ table.unpack=_G.unpack
+elseif not unpack then
+ _G.unpack=table.unpack
+end
+if not package.loaders then
+ package.loaders=package.searchers
+end
+local print,select,tostring=print,select,tostring
+local inspectors={}
+function setinspector(inspector)
+ inspectors[#inspectors+1]=inspector
+end
+function inspect(...)
+ for s=1,select("#",...) do
+ local value=select(s,...)
+ local done=false
+ for i=1,#inspectors do
+ done=inspectors[i](value)
+ if done then
+ break
+ end
+ end
+ if not done then
+ print(tostring(value))
+ end
+ end
+end
+local dummy=function() end
+function optionalrequire(...)
+ local ok,result=xpcall(require,dummy,...)
+ if ok then
+ return result
+ end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['l-lpeg']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+lpeg=require("lpeg")
+local type,next,tostring=type,next,tostring
+local byte,char,gmatch,format=string.byte,string.char,string.gmatch,string.format
+local floor=math.floor
+local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt
+local lpegtype,lpegmatch,lpegprint=lpeg.type,lpeg.match,lpeg.print
+setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
+lpeg.patterns=lpeg.patterns or {}
+local patterns=lpeg.patterns
+local anything=P(1)
+local endofstring=P(-1)
+local alwaysmatched=P(true)
+patterns.anything=anything
+patterns.endofstring=endofstring
+patterns.beginofstring=alwaysmatched
+patterns.alwaysmatched=alwaysmatched
+local digit,sign=R('09'),S('+-')
+local cr,lf,crlf=P("\r"),P("\n"),P("\r\n")
+local newline=crlf+S("\r\n")
+local escaped=P("\\")*anything
+local squote=P("'")
+local dquote=P('"')
+local space=P(" ")
+local utfbom_32_be=P('\000\000\254\255')
+local utfbom_32_le=P('\255\254\000\000')
+local utfbom_16_be=P('\255\254')
+local utfbom_16_le=P('\254\255')
+local utfbom_8=P('\239\187\191')
+local utfbom=utfbom_32_be+utfbom_32_le+utfbom_16_be+utfbom_16_le+utfbom_8
+local utftype=utfbom_32_be*Cc("utf-32-be")+utfbom_32_le*Cc("utf-32-le")+utfbom_16_be*Cc("utf-16-be")+utfbom_16_le*Cc("utf-16-le")+utfbom_8*Cc("utf-8")+alwaysmatched*Cc("utf-8")
+local utfoffset=utfbom_32_be*Cc(4)+utfbom_32_le*Cc(4)+utfbom_16_be*Cc(2)+utfbom_16_le*Cc(2)+utfbom_8*Cc(3)+Cc(0)
+local utf8next=R("\128\191")
+patterns.utf8one=R("\000\127")
+patterns.utf8two=R("\194\223")*utf8next
+patterns.utf8three=R("\224\239")*utf8next*utf8next
+patterns.utf8four=R("\240\244")*utf8next*utf8next*utf8next
+patterns.utfbom=utfbom
+patterns.utftype=utftype
+patterns.utfoffset=utfoffset
+local utf8char=patterns.utf8one+patterns.utf8two+patterns.utf8three+patterns.utf8four
+local validutf8char=utf8char^0*endofstring*Cc(true)+Cc(false)
+local utf8character=P(1)*R("\128\191")^0
+patterns.utf8=utf8char
+patterns.utf8char=utf8char
+patterns.utf8character=utf8character
+patterns.validutf8=validutf8char
+patterns.validutf8char=validutf8char
+local eol=S("\n\r")
+local spacer=S(" \t\f\v")
+local whitespace=eol+spacer
+local nonspacer=1-spacer
+local nonwhitespace=1-whitespace
+patterns.eol=eol
+patterns.spacer=spacer
+patterns.whitespace=whitespace
+patterns.nonspacer=nonspacer
+patterns.nonwhitespace=nonwhitespace
+local stripper=spacer^0*C((spacer^0*nonspacer^1)^0)
+local collapser=Cs(spacer^0/""*nonspacer^0*((spacer^0/" "*nonspacer^1)^0))
+patterns.stripper=stripper
+patterns.collapser=collapser
+patterns.digit=digit
+patterns.sign=sign
+patterns.cardinal=sign^0*digit^1
+patterns.integer=sign^0*digit^1
+patterns.unsigned=digit^0*P('.')*digit^1
+patterns.float=sign^0*patterns.unsigned
+patterns.cunsigned=digit^0*P(',')*digit^1
+patterns.cfloat=sign^0*patterns.cunsigned
+patterns.number=patterns.float+patterns.integer
+patterns.cnumber=patterns.cfloat+patterns.integer
+patterns.oct=P("0")*R("07")^1
+patterns.octal=patterns.oct
+patterns.HEX=P("0x")*R("09","AF")^1
+patterns.hex=P("0x")*R("09","af")^1
+patterns.hexadecimal=P("0x")*R("09","AF","af")^1
+patterns.lowercase=R("az")
+patterns.uppercase=R("AZ")
+patterns.letter=patterns.lowercase+patterns.uppercase
+patterns.space=space
+patterns.tab=P("\t")
+patterns.spaceortab=patterns.space+patterns.tab
+patterns.newline=newline
+patterns.emptyline=newline^1
+patterns.equal=P("=")
+patterns.comma=P(",")
+patterns.commaspacer=P(",")*spacer^0
+patterns.period=P(".")
+patterns.colon=P(":")
+patterns.semicolon=P(";")
+patterns.underscore=P("_")
+patterns.escaped=escaped
+patterns.squote=squote
+patterns.dquote=dquote
+patterns.nosquote=(escaped+(1-squote))^0
+patterns.nodquote=(escaped+(1-dquote))^0
+patterns.unsingle=(squote/"")*patterns.nosquote*(squote/"")
+patterns.undouble=(dquote/"")*patterns.nodquote*(dquote/"")
+patterns.unquoted=patterns.undouble+patterns.unsingle
+patterns.unspacer=((patterns.spacer^1)/"")^0
+patterns.singlequoted=squote*patterns.nosquote*squote
+patterns.doublequoted=dquote*patterns.nodquote*dquote
+patterns.quoted=patterns.doublequoted+patterns.singlequoted
+patterns.propername=R("AZ","az","__")*R("09","AZ","az","__")^0*P(-1)
+patterns.somecontent=(anything-newline-space)^1
+patterns.beginline=#(1-newline)
+patterns.longtostring=Cs(whitespace^0/""*nonwhitespace^0*((whitespace^0/" "*(patterns.quoted+nonwhitespace)^1)^0))
+local function anywhere(pattern)
+ return P { P(pattern)+1*V(1) }
+end
+lpeg.anywhere=anywhere
+function lpeg.instringchecker(p)
+ p=anywhere(p)
+ return function(str)
+ return lpegmatch(p,str) and true or false
+ end
+end
+function lpeg.splitter(pattern,action)
+ return (((1-P(pattern))^1)/action+1)^0
+end
+function lpeg.tsplitter(pattern,action)
+ return Ct((((1-P(pattern))^1)/action+1)^0)
+end
+local splitters_s,splitters_m,splitters_t={},{},{}
+local function splitat(separator,single)
+ local splitter=(single and splitters_s[separator]) or splitters_m[separator]
+ if not splitter then
+ separator=P(separator)
+ local other=C((1-separator)^0)
+ if single then
+ local any=anything
+ splitter=other*(separator*C(any^0)+"")
+ splitters_s[separator]=splitter
+ else
+ splitter=other*(separator*other)^0
+ splitters_m[separator]=splitter
+ end
+ end
+ return splitter
+end
+local function tsplitat(separator)
+ local splitter=splitters_t[separator]
+ if not splitter then
+ splitter=Ct(splitat(separator))
+ splitters_t[separator]=splitter
+ end
+ return splitter
+end
+lpeg.splitat=splitat
+lpeg.tsplitat=tsplitat
+function string.splitup(str,separator)
+ if not separator then
+ separator=","
+ end
+ return lpegmatch(splitters_m[separator] or splitat(separator),str)
+end
+local cache={}
+function lpeg.split(separator,str)
+ local c=cache[separator]
+ if not c then
+ c=tsplitat(separator)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
+end
+function string.split(str,separator)
+ if separator then
+ local c=cache[separator]
+ if not c then
+ c=tsplitat(separator)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
+ else
+ return { str }
+ end
+end
+local spacing=patterns.spacer^0*newline
+local empty=spacing*Cc("")
+local nonempty=Cs((1-spacing)^1)*spacing^-1
+local content=(empty+nonempty)^1
+patterns.textline=content
+local linesplitter=tsplitat(newline)
+patterns.linesplitter=linesplitter
+function string.splitlines(str)
+ return lpegmatch(linesplitter,str)
+end
+local cache={}
+function lpeg.checkedsplit(separator,str)
+ local c=cache[separator]
+ if not c then
+ separator=P(separator)
+ local other=C((1-separator)^1)
+ c=Ct(separator^0*other*(separator^1*other)^0)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
+end
+function string.checkedsplit(str,separator)
+ local c=cache[separator]
+ if not c then
+ separator=P(separator)
+ local other=C((1-separator)^1)
+ c=Ct(separator^0*other*(separator^1*other)^0)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
+end
+local function f2(s) local c1,c2=byte(s,1,2) return c1*64+c2-12416 end
+local function f3(s) local c1,c2,c3=byte(s,1,3) return (c1*64+c2)*64+c3-925824 end
+local function f4(s) local c1,c2,c3,c4=byte(s,1,4) return ((c1*64+c2)*64+c3)*64+c4-63447168 end
+local utf8byte=patterns.utf8one/byte+patterns.utf8two/f2+patterns.utf8three/f3+patterns.utf8four/f4
+patterns.utf8byte=utf8byte
+local cache={}
+function lpeg.stripper(str)
+ if type(str)=="string" then
+ local s=cache[str]
+ if not s then
+ s=Cs(((S(str)^1)/""+1)^0)
+ cache[str]=s
+ end
+ return s
+ else
+ return Cs(((str^1)/""+1)^0)
+ end
+end
+local cache={}
+function lpeg.keeper(str)
+ if type(str)=="string" then
+ local s=cache[str]
+ if not s then
+ s=Cs((((1-S(str))^1)/""+1)^0)
+ cache[str]=s
+ end
+ return s
+ else
+ return Cs((((1-str)^1)/""+1)^0)
+ end
+end
+function lpeg.frontstripper(str)
+ return (P(str)+P(true))*Cs(anything^0)
+end
+function lpeg.endstripper(str)
+ return Cs((1-P(str)*endofstring)^0)
+end
+function lpeg.replacer(one,two,makefunction,isutf)
+ local pattern
+ local u=isutf and utf8char or 1
+ if type(one)=="table" then
+ local no=#one
+ local p=P(false)
+ if no==0 then
+ for k,v in next,one do
+ p=p+P(k)/v
+ end
+ pattern=Cs((p+u)^0)
+ elseif no==1 then
+ local o=one[1]
+ one,two=P(o[1]),o[2]
+ pattern=Cs((one/two+u)^0)
+ else
+ for i=1,no do
+ local o=one[i]
+ p=p+P(o[1])/o[2]
+ end
+ pattern=Cs((p+u)^0)
+ end
+ else
+ pattern=Cs((P(one)/(two or "")+u)^0)
+ end
+ if makefunction then
+ return function(str)
+ return lpegmatch(pattern,str)
+ end
+ else
+ return pattern
+ end
+end
+function lpeg.finder(lst,makefunction)
+ local pattern
+ if type(lst)=="table" then
+ pattern=P(false)
+ if #lst==0 then
+ for k,v in next,lst do
+ pattern=pattern+P(k)
+ end
+ else
+ for i=1,#lst do
+ pattern=pattern+P(lst[i])
+ end
+ end
+ else
+ pattern=P(lst)
+ end
+ pattern=(1-pattern)^0*pattern
+ if makefunction then
+ return function(str)
+ return lpegmatch(pattern,str)
+ end
+ else
+ return pattern
+ end
+end
+local splitters_f,splitters_s={},{}
+function lpeg.firstofsplit(separator)
+ local splitter=splitters_f[separator]
+ if not splitter then
+ separator=P(separator)
+ splitter=C((1-separator)^0)
+ splitters_f[separator]=splitter
+ end
+ return splitter
+end
+function lpeg.secondofsplit(separator)
+ local splitter=splitters_s[separator]
+ if not splitter then
+ separator=P(separator)
+ splitter=(1-separator)^0*separator*C(anything^0)
+ splitters_s[separator]=splitter
+ end
+ return splitter
+end
+function lpeg.balancer(left,right)
+ left,right=P(left),P(right)
+ return P { left*((1-left-right)+V(1))^0*right }
+end
+local nany=utf8char/""
+function lpeg.counter(pattern)
+ pattern=Cs((P(pattern)/" "+nany)^0)
+ return function(str)
+ return #lpegmatch(pattern,str)
+ end
+end
+utf=utf or (unicode and unicode.utf8) or {}
+local utfcharacters=utf and utf.characters or string.utfcharacters
+local utfgmatch=utf and utf.gmatch
+local utfchar=utf and utf.char
+lpeg.UP=lpeg.P
+if utfcharacters then
+ function lpeg.US(str)
+ local p=P(false)
+ for uc in utfcharacters(str) do
+ p=p+P(uc)
+ end
+ return p
+ end
+elseif utfgmatch then
+ function lpeg.US(str)
+ local p=P(false)
+ for uc in utfgmatch(str,".") do
+ p=p+P(uc)
+ end
+ return p
+ end
+else
+ function lpeg.US(str)
+ local p=P(false)
+ local f=function(uc)
+ p=p+P(uc)
+ end
+ lpegmatch((utf8char/f)^0,str)
+ return p
+ end
+end
+local range=utf8byte*utf8byte+Cc(false)
+function lpeg.UR(str,more)
+ local first,last
+ if type(str)=="number" then
+ first=str
+ last=more or first
+ else
+ first,last=lpegmatch(range,str)
+ if not last then
+ return P(str)
+ end
+ end
+ if first==last then
+ return P(str)
+ elseif utfchar and (last-first<8) then
+ local p=P(false)
+ for i=first,last do
+ p=p+P(utfchar(i))
+ end
+ return p
+ else
+ local f=function(b)
+ return b>=first and b<=last
+ end
+ return utf8byte/f
+ end
+end
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p)=="pattern"
+end
+function lpeg.oneof(list,...)
+ if type(list)~="table" then
+ list={ list,... }
+ end
+ local p=P(list[1])
+ for l=2,#list do
+ p=p+P(list[l])
+ end
+ return p
+end
+local sort=table.sort
+local function copyindexed(old)
+ local new={}
+ for i=1,#old do
+ new[i]=old
+ end
+ return new
+end
+local function sortedkeys(tab)
+ local keys,s={},0
+ for key,_ in next,tab do
+ s=s+1
+ keys[s]=key
+ end
+ sort(keys)
+ return keys
+end
+function lpeg.append(list,pp,delayed,checked)
+ local p=pp
+ if #list>0 then
+ local keys=copyindexed(list)
+ sort(keys)
+ for i=#keys,1,-1 do
+ local k=keys[i]
+ if p then
+ p=P(k)+p
+ else
+ p=P(k)
+ end
+ end
+ elseif delayed then
+ local keys=sortedkeys(list)
+ if p then
+ for i=1,#keys,1 do
+ local k=keys[i]
+ local v=list[k]
+ p=P(k)/list+p
+ end
+ else
+ for i=1,#keys do
+ local k=keys[i]
+ local v=list[k]
+ if p then
+ p=P(k)+p
+ else
+ p=P(k)
+ end
+ end
+ if p then
+ p=p/list
+ end
+ end
+ elseif checked then
+ local keys=sortedkeys(list)
+ for i=1,#keys do
+ local k=keys[i]
+ local v=list[k]
+ if p then
+ if k==v then
+ p=P(k)+p
+ else
+ p=P(k)/v+p
+ end
+ else
+ if k==v then
+ p=P(k)
+ else
+ p=P(k)/v
+ end
+ end
+ end
+ else
+ local keys=sortedkeys(list)
+ for i=1,#keys do
+ local k=keys[i]
+ local v=list[k]
+ if p then
+ p=P(k)/v+p
+ else
+ p=P(k)/v
+ end
+ end
+ end
+ return p
+end
+local function make(t)
+ local p
+ local keys=sortedkeys(t)
+ for i=1,#keys do
+ local k=keys[i]
+ local v=t[k]
+ if not p then
+ if next(v) then
+ p=P(k)*make(v)
+ else
+ p=P(k)
+ end
+ else
+ if next(v) then
+ p=p+P(k)*make(v)
+ else
+ p=p+P(k)
+ end
+ end
+ end
+ return p
+end
+function lpeg.utfchartabletopattern(list)
+ local tree={}
+ for i=1,#list do
+ local t=tree
+ for c in gmatch(list[i],".") do
+ if not t[c] then
+ t[c]={}
+ end
+ t=t[c]
+ end
+ end
+ return make(tree)
+end
+patterns.containseol=lpeg.finder(eol)
+local function nextstep(n,step,result)
+ local m=n%step
+ local d=floor(n/step)
+ if d>0 then
+ local v=V(tostring(step))
+ local s=result.start
+ for i=1,d do
+ if s then
+ s=v*s
+ else
+ s=v
+ end
+ end
+ result.start=s
+ end
+ if step>1 and result.start then
+ local v=V(tostring(step/2))
+ result[tostring(step)]=v*v
+ end
+ if step>0 then
+ return nextstep(m,step/2,result)
+ else
+ return result
+ end
+end
+function lpeg.times(pattern,n)
+ return P(nextstep(n,2^16,{ "start",["1"]=pattern }))
+end
+local digit=R("09")
+local period=P(".")
+local zero=P("0")
+local trailingzeros=zero^0*-digit
+local case_1=period*trailingzeros/""
+local case_2=period*(digit-trailingzeros)^1*(trailingzeros/"")
+local number=digit^1*(case_1+case_2)
+local stripper=Cs((number+1)^0)
+lpeg.patterns.stripzeros=stripper
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['l-functions']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+functions=functions or {}
+function functions.dummy() end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['l-string']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local string=string
+local sub,gmatch,format,char,byte,rep,lower=string.sub,string.gmatch,string.format,string.char,string.byte,string.rep,string.lower
+local lpegmatch,patterns=lpeg.match,lpeg.patterns
+local P,S,C,Ct,Cc,Cs=lpeg.P,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cc,lpeg.Cs
+local unquoted=patterns.squote*C(patterns.nosquote)*patterns.squote+patterns.dquote*C(patterns.nodquote)*patterns.dquote
+function string.unquoted(str)
+ return lpegmatch(unquoted,str) or str
+end
+function string.quoted(str)
+ return format("%q",str)
+end
+function string.count(str,pattern)
+ local n=0
+ for _ in gmatch(str,pattern) do
+ n=n+1
+ end
+ return n
+end
+function string.limit(str,n,sentinel)
+ if #str>n then
+ sentinel=sentinel or "..."
+ return sub(str,1,(n-#sentinel))..sentinel
+ else
+ return str
+ end
+end
+local stripper=patterns.stripper
+local collapser=patterns.collapser
+local longtostring=patterns.longtostring
+function string.strip(str)
+ return lpegmatch(stripper,str) or ""
+end
+function string.collapsespaces(str)
+ return lpegmatch(collapser,str) or ""
+end
+function string.longtostring(str)
+ return lpegmatch(longtostring,str) or ""
+end
+local pattern=P(" ")^0*P(-1)
+function string.is_empty(str)
+ if str=="" then
+ return true
+ else
+ return lpegmatch(pattern,str) and true or false
+ end
+end
+local anything=patterns.anything
+local allescapes=Cc("%")*S(".-+%?()[]*")
+local someescapes=Cc("%")*S(".-+%()[]")
+local matchescapes=Cc(".")*S("*?")
+local pattern_a=Cs ((allescapes+anything )^0 )
+local pattern_b=Cs ((someescapes+matchescapes+anything )^0 )
+local pattern_c=Cs (Cc("^")*(someescapes+matchescapes+anything )^0*Cc("$") )
+function string.escapedpattern(str,simple)
+ return lpegmatch(simple and pattern_b or pattern_a,str)
+end
+function string.topattern(str,lowercase,strict)
+ if str=="" or type(str)~="string" then
+ return ".*"
+ elseif strict then
+ str=lpegmatch(pattern_c,str)
+ else
+ str=lpegmatch(pattern_b,str)
+ end
+ if lowercase then
+ return lower(str)
+ else
+ return str
+ end
+end
+function string.valid(str,default)
+ return (type(str)=="string" and str~="" and str) or default or nil
+end
+string.itself=function(s) return s end
+local pattern=Ct(C(1)^0)
+function string.totable(str)
+ return lpegmatch(pattern,str)
+end
+local replacer=lpeg.replacer("@","%%")
+function string.tformat(fmt,...)
+ return format(lpegmatch(replacer,fmt),...)
+end
+string.quote=string.quoted
+string.unquote=string.unquoted
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['l-table']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type,next,tostring,tonumber,ipairs,select=type,next,tostring,tonumber,ipairs,select
+local table,string=table,string
+local concat,sort,insert,remove=table.concat,table.sort,table.insert,table.remove
+local format,lower,dump=string.format,string.lower,string.dump
+local getmetatable,setmetatable=getmetatable,setmetatable
+local getinfo=debug.getinfo
+local lpegmatch,patterns=lpeg.match,lpeg.patterns
+local floor=math.floor
+local stripper=patterns.stripper
+function table.strip(tab)
+ local lst,l={},0
+ for i=1,#tab do
+ local s=lpegmatch(stripper,tab[i]) or ""
+ if s=="" then
+ else
+ l=l+1
+ lst[l]=s
+ end
+ end
+ return lst
+end
+function table.keys(t)
+ if t then
+ local keys,k={},0
+ for key,_ in next,t do
+ k=k+1
+ keys[k]=key
+ end
+ return keys
+ else
+ return {}
+ end
+end
+local function compare(a,b)
+ local ta,tb=type(a),type(b)
+ if ta==tb then
+ return a<b
+ else
+ return tostring(a)<tostring(b)
+ end
+end
+local function sortedkeys(tab)
+ if tab then
+ local srt,category,s={},0,0
+ for key,_ in next,tab do
+ s=s+1
+ srt[s]=key
+ if category==3 then
+ else
+ local tkey=type(key)
+ if tkey=="string" then
+ category=(category==2 and 3) or 1
+ elseif tkey=="number" then
+ category=(category==1 and 3) or 2
+ else
+ category=3
+ end
+ end
+ end
+ if category==0 or category==3 then
+ sort(srt,compare)
+ else
+ sort(srt)
+ end
+ return srt
+ else
+ return {}
+ end
+end
+local function sortedhashkeys(tab,cmp)
+ if tab then
+ local srt,s={},0
+ for key,_ in next,tab do
+ if key then
+ s=s+1
+ srt[s]=key
+ end
+ end
+ sort(srt,cmp)
+ return srt
+ else
+ return {}
+ end
+end
+function table.allkeys(t)
+ local keys={}
+ for k,v in next,t do
+ for k,v in next,v do
+ keys[k]=true
+ end
+ end
+ return sortedkeys(keys)
+end
+table.sortedkeys=sortedkeys
+table.sortedhashkeys=sortedhashkeys
+local function nothing() end
+local function sortedhash(t,cmp)
+ if t then
+ local s
+ if cmp then
+ s=sortedhashkeys(t,function(a,b) return cmp(t,a,b) end)
+ else
+ s=sortedkeys(t)
+ end
+ local n=0
+ local function kv(s)
+ n=n+1
+ local k=s[n]
+ return k,t[k]
+ end
+ return kv,s
+ else
+ return nothing
+ end
+end
+table.sortedhash=sortedhash
+table.sortedpairs=sortedhash
+function table.append(t,list)
+ local n=#t
+ for i=1,#list do
+ n=n+1
+ t[n]=list[i]
+ end
+ return t
+end
+function table.prepend(t,list)
+ local nl=#list
+ local nt=nl+#t
+ for i=#t,1,-1 do
+ t[nt]=t[i]
+ nt=nt-1
+ end
+ for i=1,#list do
+ t[i]=list[i]
+ end
+ return t
+end
+function table.merge(t,...)
+ t=t or {}
+ for i=1,select("#",...) do
+ for k,v in next,(select(i,...)) do
+ t[k]=v
+ end
+ end
+ return t
+end
+function table.merged(...)
+ local t={}
+ for i=1,select("#",...) do
+ for k,v in next,(select(i,...)) do
+ t[k]=v
+ end
+ end
+ return t
+end
+function table.imerge(t,...)
+ local nt=#t
+ for i=1,select("#",...) do
+ local nst=select(i,...)
+ for j=1,#nst do
+ nt=nt+1
+ t[nt]=nst[j]
+ end
+ end
+ return t
+end
+function table.imerged(...)
+ local tmp,ntmp={},0
+ for i=1,select("#",...) do
+ local nst=select(i,...)
+ for j=1,#nst do
+ ntmp=ntmp+1
+ tmp[ntmp]=nst[j]
+ end
+ end
+ return tmp
+end
+local function fastcopy(old,metatabletoo)
+ if old then
+ local new={}
+ for k,v in next,old do
+ if type(v)=="table" then
+ new[k]=fastcopy(v,metatabletoo)
+ else
+ new[k]=v
+ end
+ end
+ if metatabletoo then
+ local mt=getmetatable(old)
+ if mt then
+ setmetatable(new,mt)
+ end
+ end
+ return new
+ else
+ return {}
+ end
+end
+local function copy(t,tables)
+ tables=tables or {}
+ local tcopy={}
+ if not tables[t] then
+ tables[t]=tcopy
+ end
+ for i,v in next,t do
+ if type(i)=="table" then
+ if tables[i] then
+ i=tables[i]
+ else
+ i=copy(i,tables)
+ end
+ end
+ if type(v)~="table" then
+ tcopy[i]=v
+ elseif tables[v] then
+ tcopy[i]=tables[v]
+ else
+ tcopy[i]=copy(v,tables)
+ end
+ end
+ local mt=getmetatable(t)
+ if mt then
+ setmetatable(tcopy,mt)
+ end
+ return tcopy
+end
+table.fastcopy=fastcopy
+table.copy=copy
+function table.derive(parent)
+ local child={}
+ if parent then
+ setmetatable(child,{ __index=parent })
+ end
+ return child
+end
+function table.tohash(t,value)
+ local h={}
+ if t then
+ if value==nil then value=true end
+ for _,v in next,t do
+ h[v]=value
+ end
+ end
+ return h
+end
+function table.fromhash(t)
+ local hsh,h={},0
+ for k,v in next,t do
+ if v then
+ h=h+1
+ hsh[h]=k
+ end
+ end
+ return hsh
+end
+local noquotes,hexify,handle,reduce,compact,inline,functions
+local reserved=table.tohash {
+ 'and','break','do','else','elseif','end','false','for','function','if',
+ 'in','local','nil','not','or','repeat','return','then','true','until','while',
+}
+local function simple_table(t)
+ if #t>0 then
+ local n=0
+ for _,v in next,t do
+ n=n+1
+ end
+ if n==#t then
+ local tt,nt={},0
+ for i=1,#t do
+ local v=t[i]
+ local tv=type(v)
+ if tv=="number" then
+ nt=nt+1
+ if hexify then
+ tt[nt]=format("0x%04X",v)
+ else
+ tt[nt]=tostring(v)
+ end
+ elseif tv=="boolean" then
+ nt=nt+1
+ tt[nt]=tostring(v)
+ elseif tv=="string" then
+ nt=nt+1
+ tt[nt]=format("%q",v)
+ else
+ tt=nil
+ break
+ end
+ end
+ return tt
+ end
+ end
+ return nil
+end
+local propername=patterns.propername
+local function dummy() end
+local function do_serialize(root,name,depth,level,indexed)
+ if level>0 then
+ depth=depth.." "
+ if indexed then
+ handle(format("%s{",depth))
+ else
+ local tn=type(name)
+ if tn=="number" then
+ if hexify then
+ handle(format("%s[0x%04X]={",depth,name))
+ else
+ handle(format("%s[%s]={",depth,name))
+ end
+ elseif tn=="string" then
+ if noquotes and not reserved[name] and lpegmatch(propername,name) then
+ handle(format("%s%s={",depth,name))
+ else
+ handle(format("%s[%q]={",depth,name))
+ end
+ elseif tn=="boolean" then
+ handle(format("%s[%s]={",depth,tostring(name)))
+ else
+ handle(format("%s{",depth))
+ end
+ end
+ end
+ if root and next(root) then
+ local first,last=nil,0
+ if compact then
+ last=#root
+ for k=1,last do
+ if root[k]==nil then
+ last=k-1
+ break
+ end
+ end
+ if last>0 then
+ first=1
+ end
+ end
+ local sk=sortedkeys(root)
+ for i=1,#sk do
+ local k=sk[i]
+ local v=root[k]
+ local t,tk=type(v),type(k)
+ if compact and first and tk=="number" and k>=first and k<=last then
+ if t=="number" then
+ if hexify then
+ handle(format("%s 0x%04X,",depth,v))
+ else
+ handle(format("%s %s,",depth,v))
+ end
+ elseif t=="string" then
+ if reduce and tonumber(v) then
+ handle(format("%s %s,",depth,v))
+ else
+ handle(format("%s %q,",depth,v))
+ end
+ elseif t=="table" then
+ if not next(v) then
+ handle(format("%s {},",depth))
+ elseif inline then
+ local st=simple_table(v)
+ if st then
+ handle(format("%s { %s },",depth,concat(st,", ")))
+ else
+ do_serialize(v,k,depth,level+1,true)
+ end
+ else
+ do_serialize(v,k,depth,level+1,true)
+ end
+ elseif t=="boolean" then
+ handle(format("%s %s,",depth,tostring(v)))
+ elseif t=="function" then
+ if functions then
+ handle(format('%s load(%q),',depth,dump(v)))
+ else
+ handle(format('%s "function",',depth))
+ end
+ else
+ handle(format("%s %q,",depth,tostring(v)))
+ end
+ elseif k=="__p__" then
+ if false then
+ handle(format("%s __p__=nil,",depth))
+ end
+ elseif t=="number" then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=0x%04X,",depth,k,v))
+ else
+ handle(format("%s [%s]=%s,",depth,k,v))
+ end
+ elseif tk=="boolean" then
+ if hexify then
+ handle(format("%s [%s]=0x%04X,",depth,tostring(k),v))
+ else
+ handle(format("%s [%s]=%s,",depth,tostring(k),v))
+ end
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ if hexify then
+ handle(format("%s %s=0x%04X,",depth,k,v))
+ else
+ handle(format("%s %s=%s,",depth,k,v))
+ end
+ else
+ if hexify then
+ handle(format("%s [%q]=0x%04X,",depth,k,v))
+ else
+ handle(format("%s [%q]=%s,",depth,k,v))
+ end
+ end
+ elseif t=="string" then
+ if reduce and tonumber(v) then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=%s,",depth,k,v))
+ else
+ handle(format("%s [%s]=%s,",depth,k,v))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%s,",depth,tostring(k),v))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%s,",depth,k,v))
+ else
+ handle(format("%s [%q]=%s,",depth,k,v))
+ end
+ else
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=%q,",depth,k,v))
+ else
+ handle(format("%s [%s]=%q,",depth,k,v))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%q,",depth,tostring(k),v))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%q,",depth,k,v))
+ else
+ handle(format("%s [%q]=%q,",depth,k,v))
+ end
+ end
+ elseif t=="table" then
+ if not next(v) then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]={},",depth,k))
+ else
+ handle(format("%s [%s]={},",depth,k))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]={},",depth,tostring(k)))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s={},",depth,k))
+ else
+ handle(format("%s [%q]={},",depth,k))
+ end
+ elseif inline then
+ local st=simple_table(v)
+ if st then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]={ %s },",depth,k,concat(st,", ")))
+ else
+ handle(format("%s [%s]={ %s },",depth,k,concat(st,", ")))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]={ %s },",depth,tostring(k),concat(st,", ")))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s={ %s },",depth,k,concat(st,", ")))
+ else
+ handle(format("%s [%q]={ %s },",depth,k,concat(st,", ")))
+ end
+ else
+ do_serialize(v,k,depth,level+1)
+ end
+ else
+ do_serialize(v,k,depth,level+1)
+ end
+ elseif t=="boolean" then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=%s,",depth,k,tostring(v)))
+ else
+ handle(format("%s [%s]=%s,",depth,k,tostring(v)))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%s,",depth,tostring(k),tostring(v)))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%s,",depth,k,tostring(v)))
+ else
+ handle(format("%s [%q]=%s,",depth,k,tostring(v)))
+ end
+ elseif t=="function" then
+ if functions then
+ local f=getinfo(v).what=="C" and dump(dummy) or dump(v)
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=load(%q),",depth,k,f))
+ else
+ handle(format("%s [%s]=load(%q),",depth,k,f))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=load(%q),",depth,tostring(k),f))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=load(%q),",depth,k,f))
+ else
+ handle(format("%s [%q]=load(%q),",depth,k,f))
+ end
+ end
+ else
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=%q,",depth,k,tostring(v)))
+ else
+ handle(format("%s [%s]=%q,",depth,k,tostring(v)))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%q,",depth,tostring(k),tostring(v)))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%q,",depth,k,tostring(v)))
+ else
+ handle(format("%s [%q]=%q,",depth,k,tostring(v)))
+ end
+ end
+ end
+ end
+ if level>0 then
+ handle(format("%s},",depth))
+ end
+end
+local function serialize(_handle,root,name,specification)
+ local tname=type(name)
+ if type(specification)=="table" then
+ noquotes=specification.noquotes
+ hexify=specification.hexify
+ handle=_handle or specification.handle or print
+ reduce=specification.reduce or false
+ functions=specification.functions
+ compact=specification.compact
+ inline=specification.inline and compact
+ if functions==nil then
+ functions=true
+ end
+ if compact==nil then
+ compact=true
+ end
+ if inline==nil then
+ inline=compact
+ end
+ else
+ noquotes=false
+ hexify=false
+ handle=_handle or print
+ reduce=false
+ compact=true
+ inline=true
+ functions=true
+ end
+ if tname=="string" then
+ if name=="return" then
+ handle("return {")
+ else
+ handle(name.."={")
+ end
+ elseif tname=="number" then
+ if hexify then
+ handle(format("[0x%04X]={",name))
+ else
+ handle("["..name.."]={")
+ end
+ elseif tname=="boolean" then
+ if name then
+ handle("return {")
+ else
+ handle("{")
+ end
+ else
+ handle("t={")
+ end
+ if root then
+ if getmetatable(root) then
+ local dummy=root._w_h_a_t_e_v_e_r_
+ root._w_h_a_t_e_v_e_r_=nil
+ end
+ if next(root) then
+ do_serialize(root,name,"",0)
+ end
+ end
+ handle("}")
+end
+function table.serialize(root,name,specification)
+ local t,n={},0
+ local function flush(s)
+ n=n+1
+ t[n]=s
+ end
+ serialize(flush,root,name,specification)
+ return concat(t,"\n")
+end
+table.tohandle=serialize
+local maxtab=2*1024
+function table.tofile(filename,root,name,specification)
+ local f=io.open(filename,'w')
+ if f then
+ if maxtab>1 then
+ local t,n={},0
+ local function flush(s)
+ n=n+1
+ t[n]=s
+ if n>maxtab then
+ f:write(concat(t,"\n"),"\n")
+ t,n={},0
+ end
+ end
+ serialize(flush,root,name,specification)
+ f:write(concat(t,"\n"),"\n")
+ else
+ local function flush(s)
+ f:write(s,"\n")
+ end
+ serialize(flush,root,name,specification)
+ end
+ f:close()
+ io.flush()
+ end
+end
+local function flattened(t,f,depth)
+ if f==nil then
+ f={}
+ depth=0xFFFF
+ elseif tonumber(f) then
+ depth=f
+ f={}
+ elseif not depth then
+ depth=0xFFFF
+ end
+ for k,v in next,t do
+ if type(k)~="number" then
+ if depth>0 and type(v)=="table" then
+ flattened(v,f,depth-1)
+ else
+ f[#f+1]=v
+ end
+ end
+ end
+ for k=1,#t do
+ local v=t[k]
+ if depth>0 and type(v)=="table" then
+ flattened(v,f,depth-1)
+ else
+ f[#f+1]=v
+ end
+ end
+ return f
+end
+table.flattened=flattened
+local function unnest(t,f)
+ if not f then
+ f={}
+ end
+ for i=1,#t do
+ local v=t[i]
+ if type(v)=="table" then
+ if type(v[1])=="table" then
+ unnest(v,f)
+ else
+ f[#f+1]=v
+ end
+ else
+ f[#f+1]=v
+ end
+ end
+ return f
+end
+function table.unnest(t)
+ return unnest(t)
+end
+local function are_equal(a,b,n,m)
+ if a and b and #a==#b then
+ n=n or 1
+ m=m or #a
+ for i=n,m do
+ local ai,bi=a[i],b[i]
+ if ai==bi then
+ elseif type(ai)=="table" and type(bi)=="table" then
+ if not are_equal(ai,bi) then
+ return false
+ end
+ else
+ return false
+ end
+ end
+ return true
+ else
+ return false
+ end
+end
+local function identical(a,b)
+ for ka,va in next,a do
+ local vb=b[ka]
+ if va==vb then
+ elseif type(va)=="table" and type(vb)=="table" then
+ if not identical(va,vb) then
+ return false
+ end
+ else
+ return false
+ end
+ end
+ return true
+end
+table.identical=identical
+table.are_equal=are_equal
+function table.compact(t)
+ if t then
+ for k,v in next,t do
+ if not next(v) then
+ t[k]=nil
+ end
+ end
+ end
+end
+function table.contains(t,v)
+ if t then
+ for i=1,#t do
+ if t[i]==v then
+ return i
+ end
+ end
+ end
+ return false
+end
+function table.count(t)
+ local n=0
+ for k,v in next,t do
+ n=n+1
+ end
+ return n
+end
+function table.swapped(t,s)
+ local n={}
+ if s then
+ for k,v in next,s do
+ n[k]=v
+ end
+ end
+ for k,v in next,t do
+ n[v]=k
+ end
+ return n
+end
+function table.mirrored(t)
+ local n={}
+ for k,v in next,t do
+ n[v]=k
+ n[k]=v
+ end
+ return n
+end
+function table.reversed(t)
+ if t then
+ local tt,tn={},#t
+ if tn>0 then
+ local ttn=0
+ for i=tn,1,-1 do
+ ttn=ttn+1
+ tt[ttn]=t[i]
+ end
+ end
+ return tt
+ end
+end
+function table.reverse(t)
+ if t then
+ local n=#t
+ for i=1,floor(n/2) do
+ local j=n-i+1
+ t[i],t[j]=t[j],t[i]
+ end
+ return t
+ end
+end
+function table.sequenced(t,sep,simple)
+ if not t then
+ return ""
+ end
+ local n=#t
+ local s={}
+ if n>0 then
+ for i=1,n do
+ s[i]=tostring(t[i])
+ end
+ else
+ n=0
+ for k,v in sortedhash(t) do
+ if simple then
+ if v==true then
+ n=n+1
+ s[n]=k
+ elseif v and v~="" then
+ n=n+1
+ s[n]=k.."="..tostring(v)
+ end
+ else
+ n=n+1
+ s[n]=k.."="..tostring(v)
+ end
+ end
+ end
+ return concat(s,sep or " | ")
+end
+function table.print(t,...)
+ if type(t)~="table" then
+ print(tostring(t))
+ else
+ serialize(print,t,...)
+ end
+end
+setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end)
+function table.sub(t,i,j)
+ return { unpack(t,i,j) }
+end
+function table.is_empty(t)
+ return not t or not next(t)
+end
+function table.has_one_entry(t)
+ return t and not next(t,next(t))
+end
+function table.loweredkeys(t)
+ local l={}
+ for k,v in next,t do
+ l[lower(k)]=v
+ end
+ return l
+end
+function table.unique(old)
+ local hash={}
+ local new={}
+ local n=0
+ for i=1,#old do
+ local oi=old[i]
+ if not hash[oi] then
+ n=n+1
+ new[n]=oi
+ hash[oi]=true
+ end
+ end
+ return new
+end
+function table.sorted(t,...)
+ sort(t,...)
+ return t
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['l-io']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local io=io
+local byte,find,gsub,format=string.byte,string.find,string.gsub,string.format
+local concat=table.concat
+local floor=math.floor
+local type=type
+if string.find(os.getenv("PATH"),";") then
+ io.fileseparator,io.pathseparator="\\",";"
+else
+ io.fileseparator,io.pathseparator="/",":"
+end
+local function readall(f)
+ return f:read("*all")
+end
+local function readall(f)
+ local size=f:seek("end")
+ if size==0 then
+ return ""
+ elseif size<1024*1024 then
+ f:seek("set",0)
+ return f:read('*all')
+ else
+ local done=f:seek("set",0)
+ if size<1024*1024 then
+ step=1024*1024
+ elseif size>16*1024*1024 then
+ step=16*1024*1024
+ else
+ step=floor(size/(1024*1024))*1024*1024/8
+ end
+ local data={}
+ while true do
+ local r=f:read(step)
+ if not r then
+ return concat(data)
+ else
+ data[#data+1]=r
+ end
+ end
+ end
+end
+io.readall=readall
+function io.loaddata(filename,textmode)
+ local f=io.open(filename,(textmode and 'r') or 'rb')
+ if f then
+ local data=readall(f)
+ f:close()
+ if #data>0 then
+ return data
+ end
+ end
+end
+function io.savedata(filename,data,joiner)
+ local f=io.open(filename,"wb")
+ if f then
+ if type(data)=="table" then
+ f:write(concat(data,joiner or ""))
+ elseif type(data)=="function" then
+ data(f)
+ else
+ f:write(data or "")
+ end
+ f:close()
+ io.flush()
+ return true
+ else
+ return false
+ end
+end
+function io.loadlines(filename,n)
+ local f=io.open(filename,'r')
+ if not f then
+ elseif n then
+ local lines={}
+ for i=1,n do
+ local line=f:read("*lines")
+ if line then
+ lines[#lines+1]=line
+ else
+ break
+ end
+ end
+ f:close()
+ lines=concat(lines,"\n")
+ if #lines>0 then
+ return lines
+ end
+ else
+ local line=f:read("*line") or ""
+ f:close()
+ if #line>0 then
+ return line
+ end
+ end
+end
+function io.loadchunk(filename,n)
+ local f=io.open(filename,'rb')
+ if f then
+ local data=f:read(n or 1024)
+ f:close()
+ if #data>0 then
+ return data
+ end
+ end
+end
+function io.exists(filename)
+ local f=io.open(filename)
+ if f==nil then
+ return false
+ else
+ f:close()
+ return true
+ end
+end
+function io.size(filename)
+ local f=io.open(filename)
+ if f==nil then
+ return 0
+ else
+ local s=f:seek("end")
+ f:close()
+ return s
+ end
+end
+function io.noflines(f)
+ if type(f)=="string" then
+ local f=io.open(filename)
+ if f then
+ local n=f and io.noflines(f) or 0
+ f:close()
+ return n
+ else
+ return 0
+ end
+ else
+ local n=0
+ for _ in f:lines() do
+ n=n+1
+ end
+ f:seek('set',0)
+ return n
+ end
+end
+local nextchar={
+ [ 4]=function(f)
+ return f:read(1,1,1,1)
+ end,
+ [ 2]=function(f)
+ return f:read(1,1)
+ end,
+ [ 1]=function(f)
+ return f:read(1)
+ end,
+ [-2]=function(f)
+ local a,b=f:read(1,1)
+ return b,a
+ end,
+ [-4]=function(f)
+ local a,b,c,d=f:read(1,1,1,1)
+ return d,c,b,a
+ end
+}
+function io.characters(f,n)
+ if f then
+ return nextchar[n or 1],f
+ end
+end
+local nextbyte={
+ [4]=function(f)
+ local a,b,c,d=f:read(1,1,1,1)
+ if d then
+ return byte(a),byte(b),byte(c),byte(d)
+ end
+ end,
+ [3]=function(f)
+ local a,b,c=f:read(1,1,1)
+ if b then
+ return byte(a),byte(b),byte(c)
+ end
+ end,
+ [2]=function(f)
+ local a,b=f:read(1,1)
+ if b then
+ return byte(a),byte(b)
+ end
+ end,
+ [1]=function (f)
+ local a=f:read(1)
+ if a then
+ return byte(a)
+ end
+ end,
+ [-2]=function (f)
+ local a,b=f:read(1,1)
+ if b then
+ return byte(b),byte(a)
+ end
+ end,
+ [-3]=function(f)
+ local a,b,c=f:read(1,1,1)
+ if b then
+ return byte(c),byte(b),byte(a)
+ end
+ end,
+ [-4]=function(f)
+ local a,b,c,d=f:read(1,1,1,1)
+ if d then
+ return byte(d),byte(c),byte(b),byte(a)
+ end
+ end
+}
+function io.bytes(f,n)
+ if f then
+ return nextbyte[n or 1],f
+ else
+ return nil,nil
+ end
+end
+function io.ask(question,default,options)
+ while true do
+ io.write(question)
+ if options then
+ io.write(format(" [%s]",concat(options,"|")))
+ end
+ if default then
+ io.write(format(" [%s]",default))
+ end
+ io.write(format(" "))
+ io.flush()
+ local answer=io.read()
+ answer=gsub(answer,"^%s*(.*)%s*$","%1")
+ if answer=="" and default then
+ return default
+ elseif not options then
+ return answer
+ else
+ for k=1,#options do
+ if options[k]==answer then
+ return answer
+ end
+ end
+ local pattern="^"..answer
+ for k=1,#options do
+ local v=options[k]
+ if find(v,pattern) then
+ return v
+ end
+ end
+ end
+ end
+end
+local function readnumber(f,n,m)
+ if m then
+ f:seek("set",n)
+ n=m
+ end
+ if n==1 then
+ return byte(f:read(1))
+ elseif n==2 then
+ local a,b=byte(f:read(2),1,2)
+ return 256*a+b
+ elseif n==3 then
+ local a,b,c=byte(f:read(3),1,3)
+ return 256*256*a+256*b+c
+ elseif n==4 then
+ local a,b,c,d=byte(f:read(4),1,4)
+ return 256*256*256*a+256*256*b+256*c+d
+ elseif n==8 then
+ local a,b=readnumber(f,4),readnumber(f,4)
+ return 256*a+b
+ elseif n==12 then
+ local a,b,c=readnumber(f,4),readnumber(f,4),readnumber(f,4)
+ return 256*256*a+256*b+c
+ elseif n==-2 then
+ local b,a=byte(f:read(2),1,2)
+ return 256*a+b
+ elseif n==-3 then
+ local c,b,a=byte(f:read(3),1,3)
+ return 256*256*a+256*b+c
+ elseif n==-4 then
+ local d,c,b,a=byte(f:read(4),1,4)
+ return 256*256*256*a+256*256*b+256*c+d
+ elseif n==-8 then
+ local h,g,f,e,d,c,b,a=byte(f:read(8),1,8)
+ return 256*256*256*256*256*256*256*a+256*256*256*256*256*256*b+256*256*256*256*256*c+256*256*256*256*d+256*256*256*e+256*256*f+256*g+h
+ else
+ return 0
+ end
+end
+io.readnumber=readnumber
+function io.readstring(f,n,m)
+ if m then
+ f:seek("set",n)
+ n=m
+ end
+ local str=gsub(f:read(n),"\000","")
+ return str
+end
+if not io.i_limiter then function io.i_limiter() end end
+if not io.o_limiter then function io.o_limiter() end end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['l-file']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+file=file or {}
+local file=file
+if not lfs then
+ lfs=optionalrequire("lfs")
+end
+if not lfs then
+ lfs={
+ getcurrentdir=function()
+ return "."
+ end,
+ attributes=function()
+ return nil
+ end,
+ isfile=function(name)
+ local f=io.open(name,'rb')
+ if f then
+ f:close()
+ return true
+ end
+ end,
+ isdir=function(name)
+ print("you need to load lfs")
+ return false
+ end
+ }
+elseif not lfs.isfile then
+ local attributes=lfs.attributes
+ function lfs.isdir(name)
+ return attributes(name,"mode")=="directory"
+ end
+ function lfs.isfile(name)
+ return attributes(name,"mode")=="file"
+ end
+end
+local insert,concat=table.insert,table.concat
+local match,find=string.match,string.find
+local lpegmatch=lpeg.match
+local getcurrentdir,attributes=lfs.currentdir,lfs.attributes
+local checkedsplit=string.checkedsplit
+local P,R,S,C,Cs,Cp,Cc,Ct=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Cs,lpeg.Cp,lpeg.Cc,lpeg.Ct
+local colon=P(":")
+local period=P(".")
+local periods=P("..")
+local fwslash=P("/")
+local bwslash=P("\\")
+local slashes=S("\\/")
+local noperiod=1-period
+local noslashes=1-slashes
+local name=noperiod^1
+local suffix=period/""*(1-period-slashes)^1*-1
+local pattern=C((1-(slashes^1*noslashes^1*-1))^1)*P(1)
+local function pathpart(name,default)
+ return name and lpegmatch(pattern,name) or default or ""
+end
+local pattern=(noslashes^0*slashes)^1*C(noslashes^1)*-1
+local function basename(name)
+ return name and lpegmatch(pattern,name) or name
+end
+local pattern=(noslashes^0*slashes^1)^0*Cs((1-suffix)^1)*suffix^0
+local function nameonly(name)
+ return name and lpegmatch(pattern,name) or name
+end
+local pattern=(noslashes^0*slashes)^0*(noperiod^1*period)^1*C(noperiod^1)*-1
+local function suffixonly(name)
+ return name and lpegmatch(pattern,name) or ""
+end
+file.pathpart=pathpart
+file.basename=basename
+file.nameonly=nameonly
+file.suffixonly=suffixonly
+file.suffix=suffixonly
+file.dirname=pathpart
+file.extname=suffixonly
+local drive=C(R("az","AZ"))*colon
+local path=C((noslashes^0*slashes)^0)
+local suffix=period*C(P(1-period)^0*P(-1))
+local base=C((1-suffix)^0)
+local rest=C(P(1)^0)
+drive=drive+Cc("")
+path=path+Cc("")
+base=base+Cc("")
+suffix=suffix+Cc("")
+local pattern_a=drive*path*base*suffix
+local pattern_b=path*base*suffix
+local pattern_c=C(drive*path)*C(base*suffix)
+local pattern_d=path*rest
+function file.splitname(str,splitdrive)
+ if not str then
+ elseif splitdrive then
+ return lpegmatch(pattern_a,str)
+ else
+ return lpegmatch(pattern_b,str)
+ end
+end
+function file.splitbase(str)
+ return str and lpegmatch(pattern_d,str)
+end
+function file.nametotable(str,splitdrive)
+ if str then
+ local path,drive,subpath,name,base,suffix=lpegmatch(pattern_c,str)
+ if splitdrive then
+ return {
+ path=path,
+ drive=drive,
+ subpath=subpath,
+ name=name,
+ base=base,
+ suffix=suffix,
+ }
+ else
+ return {
+ path=path,
+ name=name,
+ base=base,
+ suffix=suffix,
+ }
+ end
+ end
+end
+local pattern=Cs(((period*(1-period-slashes)^1*-1)/""+1)^1)
+function file.removesuffix(name)
+ return name and lpegmatch(pattern,name)
+end
+local suffix=period/""*(1-period-slashes)^1*-1
+local pattern=Cs((noslashes^0*slashes^1)^0*((1-suffix)^1))*Cs(suffix)
+function file.addsuffix(filename,suffix,criterium)
+ if not filename or not suffix or suffix=="" then
+ return filename
+ elseif criterium==true then
+ return filename.."."..suffix
+ elseif not criterium then
+ local n,s=lpegmatch(pattern,filename)
+ if not s or s=="" then
+ return filename.."."..suffix
+ else
+ return filename
+ end
+ else
+ local n,s=lpegmatch(pattern,filename)
+ if s and s~="" then
+ local t=type(criterium)
+ if t=="table" then
+ for i=1,#criterium do
+ if s==criterium[i] then
+ return filename
+ end
+ end
+ elseif t=="string" then
+ if s==criterium then
+ return filename
+ end
+ end
+ end
+ return (n or filename).."."..suffix
+ end
+end
+local suffix=period*(1-period-slashes)^1*-1
+local pattern=Cs((1-suffix)^0)
+function file.replacesuffix(name,suffix)
+ if name and suffix and suffix~="" then
+ return lpegmatch(pattern,name).."."..suffix
+ else
+ return name
+ end
+end
+local reslasher=lpeg.replacer(P("\\"),"/")
+function file.reslash(str)
+ return str and lpegmatch(reslasher,str)
+end
+function file.is_writable(name)
+ if not name then
+ elseif lfs.isdir(name) then
+ name=name.."/m_t_x_t_e_s_t.tmp"
+ local f=io.open(name,"wb")
+ if f then
+ f:close()
+ os.remove(name)
+ return true
+ end
+ elseif lfs.isfile(name) then
+ local f=io.open(name,"ab")
+ if f then
+ f:close()
+ return true
+ end
+ else
+ local f=io.open(name,"ab")
+ if f then
+ f:close()
+ os.remove(name)
+ return true
+ end
+ end
+ return false
+end
+local readable=P("r")*Cc(true)
+function file.is_readable(name)
+ if name then
+ local a=attributes(name)
+ return a and lpegmatch(readable,a.permissions) or false
+ else
+ return false
+ end
+end
+file.isreadable=file.is_readable
+file.iswritable=file.is_writable
+function file.size(name)
+ if name then
+ local a=attributes(name)
+ return a and a.size or 0
+ else
+ return 0
+ end
+end
+function file.splitpath(str,separator)
+ return str and checkedsplit(lpegmatch(reslasher,str),separator or io.pathseparator)
+end
+function file.joinpath(tab,separator)
+ return tab and concat(tab,separator or io.pathseparator)
+end
+local stripper=Cs(P(fwslash)^0/""*reslasher)
+local isnetwork=fwslash*fwslash*(1-fwslash)+(1-fwslash-colon)^1*colon
+local isroot=fwslash^1*-1
+local hasroot=fwslash^1
+local deslasher=lpeg.replacer(S("\\/")^1,"/")
+function file.join(...)
+ local lst={... }
+ local one=lst[1]
+ if lpegmatch(isnetwork,one) then
+ local two=lpegmatch(deslasher,concat(lst,"/",2))
+ return one.."/"..two
+ elseif lpegmatch(isroot,one) then
+ local two=lpegmatch(deslasher,concat(lst,"/",2))
+ if lpegmatch(hasroot,two) then
+ return two
+ else
+ return "/"..two
+ end
+ elseif one=="" then
+ return lpegmatch(stripper,concat(lst,"/",2))
+ else
+ return lpegmatch(deslasher,concat(lst,"/"))
+ end
+end
+local drivespec=R("az","AZ")^1*colon
+local anchors=fwslash+drivespec
+local untouched=periods+(1-period)^1*P(-1)
+local splitstarter=(Cs(drivespec*(bwslash/"/"+fwslash)^0)+Cc(false))*Ct(lpeg.splitat(S("/\\")^1))
+local absolute=fwslash
+function file.collapsepath(str,anchor)
+ if not str then
+ return
+ end
+ if anchor==true and not lpegmatch(anchors,str) then
+ str=getcurrentdir().."/"..str
+ end
+ if str=="" or str=="." then
+ return "."
+ elseif lpegmatch(untouched,str) then
+ return lpegmatch(reslasher,str)
+ end
+ local starter,oldelements=lpegmatch(splitstarter,str)
+ local newelements={}
+ local i=#oldelements
+ while i>0 do
+ local element=oldelements[i]
+ if element=='.' then
+ elseif element=='..' then
+ local n=i-1
+ while n>0 do
+ local element=oldelements[n]
+ if element~='..' and element~='.' then
+ oldelements[n]='.'
+ break
+ else
+ n=n-1
+ end
+ end
+ if n<1 then
+ insert(newelements,1,'..')
+ end
+ elseif element~="" then
+ insert(newelements,1,element)
+ end
+ i=i-1
+ end
+ if #newelements==0 then
+ return starter or "."
+ elseif starter then
+ return starter..concat(newelements,'/')
+ elseif lpegmatch(absolute,str) then
+ return "/"..concat(newelements,'/')
+ else
+ newelements=concat(newelements,'/')
+ if anchor=="." and find(str,"^%./") then
+ return "./"..newelements
+ else
+ return newelements
+ end
+ end
+end
+local validchars=R("az","09","AZ","--","..")
+local pattern_a=lpeg.replacer(1-validchars)
+local pattern_a=Cs((validchars+P(1)/"-")^1)
+local whatever=P("-")^0/""
+local pattern_b=Cs(whatever*(1-whatever*-1)^1)
+function file.robustname(str,strict)
+ if str then
+ str=lpegmatch(pattern_a,str) or str
+ if strict then
+ return lpegmatch(pattern_b,str) or str
+ else
+ return str
+ end
+ end
+end
+file.readdata=io.loaddata
+file.savedata=io.savedata
+function file.copy(oldname,newname)
+ if oldname and newname then
+ local data=io.loaddata(oldname)
+ if data and data~="" then
+ file.savedata(newname,data)
+ end
+ end
+end
+local letter=R("az","AZ")+S("_-+")
+local separator=P("://")
+local qualified=period^0*fwslash+letter*colon+letter^1*separator+letter^1*fwslash
+local rootbased=fwslash+letter*colon
+lpeg.patterns.qualified=qualified
+lpeg.patterns.rootbased=rootbased
+function file.is_qualified_path(filename)
+ return filename and lpegmatch(qualified,filename)~=nil
+end
+function file.is_rootbased_path(filename)
+ return filename and lpegmatch(rootbased,filename)~=nil
+end
+function file.strip(name,dir)
+ if name then
+ local b,a=match(name,"^(.-)"..dir.."(.*)$")
+ return a~="" and a or name
+ end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['l-boolean']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type,tonumber=type,tonumber
+boolean=boolean or {}
+local boolean=boolean
+function boolean.tonumber(b)
+ if b then return 1 else return 0 end
+end
+function toboolean(str,tolerant)
+ if str==nil then
+ return false
+ elseif str==false then
+ return false
+ elseif str==true then
+ return true
+ elseif str=="true" then
+ return true
+ elseif str=="false" then
+ return false
+ elseif not tolerant then
+ return false
+ elseif str==0 then
+ return false
+ elseif (tonumber(str) or 0)>0 then
+ return true
+ else
+ return str=="yes" or str=="on" or str=="t"
+ end
+end
+string.toboolean=toboolean
+function string.booleanstring(str)
+ if str=="0" then
+ return false
+ elseif str=="1" then
+ return true
+ elseif str=="" then
+ return false
+ elseif str=="false" then
+ return false
+ elseif str=="true" then
+ return true
+ elseif (tonumber(str) or 0)>0 then
+ return true
+ else
+ return str=="yes" or str=="on" or str=="t"
+ end
+end
+function string.is_boolean(str,default)
+ if type(str)=="string" then
+ if str=="true" or str=="yes" or str=="on" or str=="t" then
+ return true
+ elseif str=="false" or str=="no" or str=="off" or str=="f" then
+ return false
+ end
+ end
+ return default
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['l-math']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local floor,sin,cos,tan=math.floor,math.sin,math.cos,math.tan
+if not math.round then
+ function math.round(x) return floor(x+0.5) end
+end
+if not math.div then
+ function math.div(n,m) return floor(n/m) end
+end
+if not math.mod then
+ function math.mod(n,m) return n%m end
+end
+local pipi=2*math.pi/360
+if not math.sind then
+ function math.sind(d) return sin(d*pipi) end
+ function math.cosd(d) return cos(d*pipi) end
+ function math.tand(d) return tan(d*pipi) end
+end
+if not math.odd then
+ function math.odd (n) return n%2~=0 end
+ function math.even(n) return n%2==0 end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['util-str']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+utilities=utilities or {}
+utilities.strings=utilities.strings or {}
+local strings=utilities.strings
+local format,gsub,rep,sub=string.format,string.gsub,string.rep,string.sub
+local load,dump=load,string.dump
+local tonumber,type,tostring=tonumber,type,tostring
+local unpack,concat=table.unpack,table.concat
+local P,V,C,S,R,Ct,Cs,Cp,Carg,Cc=lpeg.P,lpeg.V,lpeg.C,lpeg.S,lpeg.R,lpeg.Ct,lpeg.Cs,lpeg.Cp,lpeg.Carg,lpeg.Cc
+local patterns,lpegmatch=lpeg.patterns,lpeg.match
+local utfchar,utfbyte=utf.char,utf.byte
+local loadstripped=_LUAVERSION<5.2 and load or function(str)
+ return load(dump(load(str),true))
+end
+if not number then number={} end
+local stripper=patterns.stripzeros
+local function points(n)
+ return (not n or n==0) and "0pt" or lpegmatch(stripper,format("%.5fpt",n/65536))
+end
+local function basepoints(n)
+ return (not n or n==0) and "0bp" or lpegmatch(stripper,format("%.5fbp",n*(7200/7227)/65536))
+end
+number.points=points
+number.basepoints=basepoints
+local rubish=patterns.spaceortab^0*patterns.newline
+local anyrubish=patterns.spaceortab+patterns.newline
+local anything=patterns.anything
+local stripped=(patterns.spaceortab^1/"")*patterns.newline
+local leading=rubish^0/""
+local trailing=(anyrubish^1*patterns.endofstring)/""
+local redundant=rubish^3/"\n"
+local pattern=Cs(leading*(trailing+redundant+stripped+anything)^0)
+function strings.collapsecrlf(str)
+ return lpegmatch(pattern,str)
+end
+local repeaters={}
+function strings.newrepeater(str,offset)
+ offset=offset or 0
+ local s=repeaters[str]
+ if not s then
+ s={}
+ repeaters[str]=s
+ end
+ local t=s[offset]
+ if t then
+ return t
+ end
+ t={}
+ setmetatable(t,{ __index=function(t,k)
+ if not k then
+ return ""
+ end
+ local n=k+offset
+ local s=n>0 and rep(str,n) or ""
+ t[k]=s
+ return s
+ end })
+ s[offset]=t
+ return t
+end
+local extra,tab,start=0,0,4,0
+local nspaces=strings.newrepeater(" ")
+string.nspaces=nspaces
+local pattern=Carg(1)/function(t)
+ extra,tab,start=0,t or 7,1
+ end*Cs((
+ Cp()*patterns.tab/function(position)
+ local current=(position-start+1)+extra
+ local spaces=tab-(current-1)%tab
+ if spaces>0 then
+ extra=extra+spaces-1
+ return nspaces[spaces]
+ else
+ return ""
+ end
+ end+patterns.newline*Cp()/function(position)
+ extra,start=0,position
+ end+patterns.anything
+ )^1)
+function strings.tabtospace(str,tab)
+ return lpegmatch(pattern,str,1,tab or 7)
+end
+function strings.striplong(str)
+ str=gsub(str,"^%s*","")
+ str=gsub(str,"[\n\r]+ *","\n")
+ return str
+end
+function strings.nice(str)
+ str=gsub(str,"[:%-+_]+"," ")
+ return str
+end
+local n=0
+local sequenced=table.sequenced
+function string.autodouble(s,sep)
+ if s==nil then
+ return '""'
+ end
+ local t=type(s)
+ if t=="number" then
+ return tostring(s)
+ end
+ if t=="table" then
+ return ('"'..sequenced(s,sep or ",")..'"')
+ end
+ return ('"'..tostring(s)..'"')
+end
+function string.autosingle(s,sep)
+ if s==nil then
+ return "''"
+ end
+ local t=type(s)
+ if t=="number" then
+ return tostring(s)
+ end
+ if t=="table" then
+ return ("'"..sequenced(s,sep or ",").."'")
+ end
+ return ("'"..tostring(s).."'")
+end
+local tracedchars={}
+string.tracedchars=tracedchars
+strings.tracers=tracedchars
+function string.tracedchar(b)
+ if type(b)=="number" then
+ return tracedchars[b] or (utfchar(b).." (U+"..format('%05X',b)..")")
+ else
+ local c=utfbyte(b)
+ return tracedchars[c] or (b.." (U+"..format('%05X',c)..")")
+ end
+end
+function number.signed(i)
+ if i>0 then
+ return "+",i
+ else
+ return "-",-i
+ end
+end
+local preamble=[[
+local type = type
+local tostring = tostring
+local tonumber = tonumber
+local format = string.format
+local concat = table.concat
+local signed = number.signed
+local points = number.points
+local basepoints = number.basepoints
+local utfchar = utf.char
+local utfbyte = utf.byte
+local lpegmatch = lpeg.match
+local nspaces = string.nspaces
+local tracedchar = string.tracedchar
+local autosingle = string.autosingle
+local autodouble = string.autodouble
+local sequenced = table.sequenced
+]]
+local template=[[
+%s
+%s
+return function(%s) return %s end
+]]
+local arguments={ "a1" }
+setmetatable(arguments,{ __index=function(t,k)
+ local v=t[k-1]..",a"..k
+ t[k]=v
+ return v
+ end
+})
+local prefix_any=C((S("+- .")+R("09"))^0)
+local prefix_tab=C((1-R("az","AZ","09","%%"))^0)
+local format_s=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("format('%%%ss',a%s)",f,n)
+ else
+ return format("(a%s or '')",n)
+ end
+end
+local format_S=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("format('%%%ss',tostring(a%s))",f,n)
+ else
+ return format("tostring(a%s)",n)
+ end
+end
+local format_q=function()
+ n=n+1
+ return format("(a%s and format('%%q',a%s) or '')",n,n)
+end
+local format_Q=function()
+ n=n+1
+ return format("format('%%q',tostring(a%s))",n)
+end
+local format_i=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("format('%%%si',a%s)",f,n)
+ else
+ return format("a%s",n)
+ end
+end
+local format_d=format_i
+local format_I=function(f)
+ n=n+1
+ return format("format('%%s%%%si',signed(a%s))",f,n)
+end
+local format_f=function(f)
+ n=n+1
+ return format("format('%%%sf',a%s)",f,n)
+end
+local format_g=function(f)
+ n=n+1
+ return format("format('%%%sg',a%s)",f,n)
+end
+local format_G=function(f)
+ n=n+1
+ return format("format('%%%sG',a%s)",f,n)
+end
+local format_e=function(f)
+ n=n+1
+ return format("format('%%%se',a%s)",f,n)
+end
+local format_E=function(f)
+ n=n+1
+ return format("format('%%%sE',a%s)",f,n)
+end
+local format_x=function(f)
+ n=n+1
+ return format("format('%%%sx',a%s)",f,n)
+end
+local format_X=function(f)
+ n=n+1
+ return format("format('%%%sX',a%s)",f,n)
+end
+local format_o=function(f)
+ n=n+1
+ return format("format('%%%so',a%s)",f,n)
+end
+local format_c=function()
+ n=n+1
+ return format("utfchar(a%s)",n)
+end
+local format_C=function()
+ n=n+1
+ return format("tracedchar(a%s)",n)
+end
+local format_r=function(f)
+ n=n+1
+ return format("format('%%%s.0f',a%s)",f,n)
+end
+local format_h=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('0x%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_H=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('0x%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_u=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('u+%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_U=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('U+%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_p=function()
+ n=n+1
+ return format("points(a%s)",n)
+end
+local format_b=function()
+ n=n+1
+ return format("basepoints(a%s)",n)
+end
+local format_t=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("concat(a%s,%q)",n,f)
+ else
+ return format("concat(a%s)",n)
+ end
+end
+local format_T=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("sequenced(a%s,%q)",n,f)
+ else
+ return format("sequenced(a%s)",n)
+ end
+end
+local format_l=function()
+ n=n+1
+ return format("(a%s and 'true' or 'false')",n)
+end
+local format_L=function()
+ n=n+1
+ return format("(a%s and 'TRUE' or 'FALSE')",n)
+end
+local format_N=function()
+ n=n+1
+ return format("tostring(tonumber(a%s) or a%s)",n,n)
+end
+local format_a=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("autosingle(a%s,%q)",n,f)
+ else
+ return format("autosingle(a%s)",n)
+ end
+end
+local format_A=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("autodouble(a%s,%q)",n,f)
+ else
+ return format("autodouble(a%s)",n)
+ end
+end
+local format_w=function(f)
+ n=n+1
+ f=tonumber(f)
+ if f then
+ return format("nspaces[%s+a%s]",f,n)
+ else
+ return format("nspaces[a%s]",n)
+ end
+end
+local format_W=function(f)
+ return format("nspaces[%s]",tonumber(f) or 0)
+end
+local format_rest=function(s)
+ return format("%q",s)
+end
+local format_extension=function(extensions,f,name)
+ local extension=extensions[name] or "tostring(%s)"
+ local f=tonumber(f) or 1
+ if f==0 then
+ return extension
+ elseif f==1 then
+ n=n+1
+ local a="a"..n
+ return format(extension,a,a)
+ elseif f<0 then
+ local a="a"..(n+f+1)
+ return format(extension,a,a)
+ else
+ local t={}
+ for i=1,f do
+ n=n+1
+ t[#t+1]="a"..n
+ end
+ return format(extension,unpack(t))
+ end
+end
+local builder=Cs { "start",
+ start=(
+ (
+ P("%")/""*(
+ V("!")
++V("s")+V("q")+V("i")+V("d")+V("f")+V("g")+V("G")+V("e")+V("E")+V("x")+V("X")+V("o")
++V("c")+V("C")+V("S")
++V("Q")
++V("N")
++V("r")+V("h")+V("H")+V("u")+V("U")+V("p")+V("b")+V("t")+V("T")+V("l")+V("L")+V("I")+V("h")
++V("w")
++V("W")
++V("a")
++V("A")
++V("*")
+ )+V("*")
+ )*(P(-1)+Carg(1))
+ )^0,
+ ["s"]=(prefix_any*P("s"))/format_s,
+ ["q"]=(prefix_any*P("q"))/format_q,
+ ["i"]=(prefix_any*P("i"))/format_i,
+ ["d"]=(prefix_any*P("d"))/format_d,
+ ["f"]=(prefix_any*P("f"))/format_f,
+ ["g"]=(prefix_any*P("g"))/format_g,
+ ["G"]=(prefix_any*P("G"))/format_G,
+ ["e"]=(prefix_any*P("e"))/format_e,
+ ["E"]=(prefix_any*P("E"))/format_E,
+ ["x"]=(prefix_any*P("x"))/format_x,
+ ["X"]=(prefix_any*P("X"))/format_X,
+ ["o"]=(prefix_any*P("o"))/format_o,
+ ["S"]=(prefix_any*P("S"))/format_S,
+ ["Q"]=(prefix_any*P("Q"))/format_S,
+ ["N"]=(prefix_any*P("N"))/format_N,
+ ["c"]=(prefix_any*P("c"))/format_c,
+ ["C"]=(prefix_any*P("C"))/format_C,
+ ["r"]=(prefix_any*P("r"))/format_r,
+ ["h"]=(prefix_any*P("h"))/format_h,
+ ["H"]=(prefix_any*P("H"))/format_H,
+ ["u"]=(prefix_any*P("u"))/format_u,
+ ["U"]=(prefix_any*P("U"))/format_U,
+ ["p"]=(prefix_any*P("p"))/format_p,
+ ["b"]=(prefix_any*P("b"))/format_b,
+ ["t"]=(prefix_tab*P("t"))/format_t,
+ ["T"]=(prefix_tab*P("T"))/format_T,
+ ["l"]=(prefix_tab*P("l"))/format_l,
+ ["L"]=(prefix_tab*P("L"))/format_L,
+ ["I"]=(prefix_any*P("I"))/format_I,
+ ["w"]=(prefix_any*P("w"))/format_w,
+ ["W"]=(prefix_any*P("W"))/format_W,
+ ["a"]=(prefix_any*P("a"))/format_a,
+ ["A"]=(prefix_any*P("A"))/format_A,
+ ["*"]=Cs(((1-P("%"))^1+P("%%")/"%%%%")^1)/format_rest,
+ ["!"]=Carg(2)*prefix_any*P("!")*C((1-P("!"))^1)*P("!")/format_extension,
+}
+local direct=Cs (
+ P("%")/""*Cc([[local format = string.format return function(str) return format("%]])*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*Cc([[",str) end]])*P(-1)
+ )
+local function make(t,str)
+ local f
+ local p
+ local p=lpegmatch(direct,str)
+ if p then
+ f=loadstripped(p)()
+ else
+ n=0
+ p=lpegmatch(builder,str,1,"..",t._extensions_)
+ if n>0 then
+ p=format(template,preamble,t._preamble_,arguments[n],p)
+ f=loadstripped(p)()
+ else
+ f=function() return str end
+ end
+ end
+ t[str]=f
+ return f
+end
+local function use(t,fmt,...)
+ return t[fmt](...)
+end
+strings.formatters={}
+function strings.formatters.new()
+ local t={ _extensions_={},_preamble_="",_type_="formatter" }
+ setmetatable(t,{ __index=make,__call=use })
+ return t
+end
+local formatters=strings.formatters.new()
+string.formatters=formatters
+string.formatter=function(str,...) return formatters[str](...) end
+local function add(t,name,template,preamble)
+ if type(t)=="table" and t._type_=="formatter" then
+ t._extensions_[name]=template or "%s"
+ if preamble then
+ t._preamble_=preamble.."\n"..t._preamble_
+ end
+ end
+end
+strings.formatters.add=add
+lpeg.patterns.xmlescape=Cs((P("<")/"&lt;"+P(">")/"&gt;"+P("&")/"&amp;"+P('"')/"&quot;"+P(1))^0)
+lpeg.patterns.texescape=Cs((C(S("#$%\\{}"))/"\\%1"+P(1))^0)
+add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]])
+add(formatters,"tex",[[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]])
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luat-basics-gen']={
+ version=1.100,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local dummyfunction=function() end
+local dummyreporter=function(c) return function(...) texio.write_nl(c.." : "..string.formatters(...)) end end
+statistics={
+ register=dummyfunction,
+ starttiming=dummyfunction,
+ stoptiming=dummyfunction,
+ elapsedtime=nil,
+}
+directives={
+ register=dummyfunction,
+ enable=dummyfunction,
+ disable=dummyfunction,
+}
+trackers={
+ register=dummyfunction,
+ enable=dummyfunction,
+ disable=dummyfunction,
+}
+experiments={
+ register=dummyfunction,
+ enable=dummyfunction,
+ disable=dummyfunction,
+}
+storage={
+ register=dummyfunction,
+ shared={},
+}
+logs={
+ new=dummyreporter,
+ reporter=dummyreporter,
+ messenger=dummyreporter,
+ report=dummyfunction,
+}
+callbacks={
+ register=function(n,f) return callback.register(n,f) end,
+}
+utilities={
+ storage={
+ allocate=function(t) return t or {} end,
+ mark=function(t) return t or {} end,
+ },
+}
+characters=characters or {
+ data={}
+}
+texconfig.kpse_init=true
+resolvers=resolvers or {}
+local remapper={
+ otf="opentype fonts",
+ ttf="truetype fonts",
+ ttc="truetype fonts",
+ dfont="truetype fonts",
+ cid="cid maps",
+ cidmap="cid maps",
+ fea="font feature files",
+ pfa="type1 fonts",
+ pfb="type1 fonts",
+}
+function resolvers.findfile(name,fileformat)
+ name=string.gsub(name,"\\","/")
+ if not fileformat or fileformat=="" then
+ fileformat=file.suffix(name)
+ if fileformat=="" then
+ fileformat="tex"
+ end
+ end
+ fileformat=string.lower(fileformat)
+ fileformat=remapper[fileformat] or fileformat
+ local found=kpse.find_file(name,fileformat)
+ if not found or found=="" then
+ found=kpse.find_file(name,"other text files")
+ end
+ return found
+end
+resolvers.findbinfile=resolvers.findfile
+function resolvers.resolve(s)
+ return s
+end
+function resolvers.unresolve(s)
+ return s
+end
+caches={}
+local writable,readables=nil,{}
+if not caches.namespace or caches.namespace=="" or caches.namespace=="context" then
+ caches.namespace='generic'
+end
+do
+ local cachepaths=kpse.expand_path('$TEXMFCACHE') or ""
+ if cachepaths=="" then
+ cachepaths=kpse.expand_path('$TEXMFVAR')
+ end
+ if cachepaths=="" then
+ cachepaths=kpse.expand_path('$VARTEXMF')
+ end
+ if cachepaths=="" then
+ cachepaths="."
+ end
+ cachepaths=string.split(cachepaths,os.type=="windows" and ";" or ":")
+ for i=1,#cachepaths do
+ if file.is_writable(cachepaths[i]) then
+ writable=file.join(cachepaths[i],"luatex-cache")
+ lfs.mkdir(writable)
+ writable=file.join(writable,caches.namespace)
+ lfs.mkdir(writable)
+ break
+ end
+ end
+ for i=1,#cachepaths do
+ if file.is_readable(cachepaths[i]) then
+ readables[#readables+1]=file.join(cachepaths[i],"luatex-cache",caches.namespace)
+ end
+ end
+ if not writable then
+ texio.write_nl("quiting: fix your writable cache path")
+ os.exit()
+ elseif #readables==0 then
+ texio.write_nl("quiting: fix your readable cache path")
+ os.exit()
+ elseif #readables==1 and readables[1]==writable then
+ texio.write(string.format("(using cache: %s)",writable))
+ else
+ texio.write(string.format("(using write cache: %s)",writable))
+ texio.write(string.format("(using read cache: %s)",table.concat(readables," ")))
+ end
+end
+function caches.getwritablepath(category,subcategory)
+ local path=file.join(writable,category)
+ lfs.mkdir(path)
+ path=file.join(path,subcategory)
+ lfs.mkdir(path)
+ return path
+end
+function caches.getreadablepaths(category,subcategory)
+ local t={}
+ for i=1,#readables do
+ t[i]=file.join(readables[i],category,subcategory)
+ end
+ return t
+end
+local function makefullname(path,name)
+ if path and path~="" then
+ name="temp-"..name
+ return file.addsuffix(file.join(path,name),"lua"),file.addsuffix(file.join(path,name),"luc")
+ end
+end
+function caches.is_writable(path,name)
+ local fullname=makefullname(path,name)
+ return fullname and file.is_writable(fullname)
+end
+function caches.loaddata(paths,name)
+ for i=1,#paths do
+ local data=false
+ local luaname,lucname=makefullname(paths[i],name)
+ if lucname and lfs.isfile(lucname) then
+ texio.write(string.format("(load luc: %s)",lucname))
+ data=loadfile(lucname)
+ if data then
+ data=data()
+ end
+ if data then
+ return data
+ else
+ texio.write(string.format("(loading failed: %s)",lucname))
+ end
+ end
+ if luaname and lfs.isfile(luaname) then
+ texio.write(string.format("(load lua: %s)",luaname))
+ data=loadfile(luaname)
+ if data then
+ data=data()
+ end
+ if data then
+ return data
+ end
+ end
+ end
+end
+function caches.savedata(path,name,data)
+ local luaname,lucname=makefullname(path,name)
+ if luaname then
+ texio.write(string.format("(save: %s)",luaname))
+ table.tofile(luaname,data,true,{ reduce=true })
+ if lucname and type(caches.compile)=="function" then
+ os.remove(lucname)
+ texio.write(string.format("(save: %s)",lucname))
+ caches.compile(data,luaname,lucname)
+ end
+ end
+end
+caches.compilemethod="both"
+function caches.compile(data,luaname,lucname)
+ local done=false
+ if caches.compilemethod=="luac" or caches.compilemethod=="both" then
+ done=os.spawn("texluac -o "..string.quoted(lucname).." -s "..string.quoted(luaname))==0
+ end
+ if not done and (caches.compilemethod=="dump" or caches.compilemethod=="both") then
+ local d=io.loaddata(luaname)
+ if not d or d=="" then
+ d=table.serialize(data,true)
+ end
+ if d and d~="" then
+ local f=io.open(lucname,'w')
+ if f then
+ local s=loadstring(d)
+ if s then
+ f:write(string.dump(s,true))
+ end
+ f:close()
+ end
+ end
+ end
+end
+function table.setmetatableindex(t,f)
+ setmetatable(t,{ __index=f })
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['data-con']={
+ version=1.100,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local format,lower,gsub=string.format,string.lower,string.gsub
+local trace_cache=false trackers.register("resolvers.cache",function(v) trace_cache=v end)
+local trace_containers=false trackers.register("resolvers.containers",function(v) trace_containers=v end)
+local trace_storage=false trackers.register("resolvers.storage",function(v) trace_storage=v end)
+containers=containers or {}
+local containers=containers
+containers.usecache=true
+local report_containers=logs.reporter("resolvers","containers")
+local allocated={}
+local mt={
+ __index=function(t,k)
+ if k=="writable" then
+ local writable=caches.getwritablepath(t.category,t.subcategory) or { "." }
+ t.writable=writable
+ return writable
+ elseif k=="readables" then
+ local readables=caches.getreadablepaths(t.category,t.subcategory) or { "." }
+ t.readables=readables
+ return readables
+ end
+ end,
+ __storage__=true
+}
+function containers.define(category,subcategory,version,enabled)
+ if category and subcategory then
+ local c=allocated[category]
+ if not c then
+ c={}
+ allocated[category]=c
+ end
+ local s=c[subcategory]
+ if not s then
+ s={
+ category=category,
+ subcategory=subcategory,
+ storage={},
+ enabled=enabled,
+ version=version or math.pi,
+ trace=false,
+ }
+ setmetatable(s,mt)
+ c[subcategory]=s
+ end
+ return s
+ end
+end
+function containers.is_usable(container,name)
+ return container.enabled and caches and caches.is_writable(container.writable,name)
+end
+function containers.is_valid(container,name)
+ if name and name~="" then
+ local storage=container.storage[name]
+ return storage and storage.cache_version==container.version
+ else
+ return false
+ end
+end
+function containers.read(container,name)
+ local storage=container.storage
+ local stored=storage[name]
+ if not stored and container.enabled and caches and containers.usecache then
+ stored=caches.loaddata(container.readables,name)
+ if stored and stored.cache_version==container.version then
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","load",container.subcategory,name)
+ end
+ else
+ stored=nil
+ end
+ storage[name]=stored
+ elseif stored then
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","reuse",container.subcategory,name)
+ end
+ end
+ return stored
+end
+function containers.write(container,name,data)
+ if data then
+ data.cache_version=container.version
+ if container.enabled and caches then
+ local unique,shared=data.unique,data.shared
+ data.unique,data.shared=nil,nil
+ caches.savedata(container.writable,name,data)
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","save",container.subcategory,name)
+ end
+ data.unique,data.shared=unique,shared
+ end
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","store",container.subcategory,name)
+ end
+ container.storage[name]=data
+ end
+ return data
+end
+function containers.content(container,name)
+ return container.storage[name]
+end
+function containers.cleanname(name)
+ return (gsub(lower(name),"[^%w%d]+","-"))
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-fonts-nod']={
+ version=1.001,
+ comment="companion to luatex-fonts.lua",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+if tex.attribute[0]~=0 then
+ texio.write_nl("log","!")
+ texio.write_nl("log","! Attribute 0 is reserved for ConTeXt's font feature management and has to be")
+ texio.write_nl("log","! set to zero. Also, some attributes in the range 1-255 are used for special")
+ texio.write_nl("log","! purposes so setting them at the TeX end might break the font handler.")
+ texio.write_nl("log","!")
+ tex.attribute[0]=0
+end
+attributes=attributes or {}
+attributes.unsetvalue=-0x7FFFFFFF
+local numbers,last={},127
+attributes.private=attributes.private or function(name)
+ local number=numbers[name]
+ if not number then
+ if last<255 then
+ last=last+1
+ end
+ number=last
+ numbers[name]=number
+ end
+ return number
+end
+nodes={}
+nodes.pool={}
+nodes.handlers={}
+local nodecodes={} for k,v in next,node.types () do nodecodes[string.gsub(v,"_","")]=k end
+local whatcodes={} for k,v in next,node.whatsits() do whatcodes[string.gsub(v,"_","")]=k end
+local glyphcodes={ [0]="character","glyph","ligature","ghost","left","right" }
+nodes.nodecodes=nodecodes
+nodes.whatcodes=whatcodes
+nodes.whatsitcodes=whatcodes
+nodes.glyphcodes=glyphcodes
+local free_node=node.free
+local remove_node=node.remove
+local new_node=node.new
+local traverse_id=node.traverse_id
+local math_code=nodecodes.math
+nodes.handlers.protectglyphs=node.protect_glyphs
+nodes.handlers.unprotectglyphs=node.unprotect_glyphs
+function nodes.remove(head,current,free_too)
+ local t=current
+ head,current=remove_node(head,current)
+ if t then
+ if free_too then
+ free_node(t)
+ t=nil
+ else
+ t.next,t.prev=nil,nil
+ end
+ end
+ return head,current,t
+end
+function nodes.delete(head,current)
+ return nodes.remove(head,current,true)
+end
+nodes.before=node.insert_before
+nodes.after=node.insert_after
+function nodes.pool.kern(k)
+ local n=new_node("kern",1)
+ n.kern=k
+ return n
+end
+function nodes.endofmath(n)
+ for n in traverse_id(math_code,n.next) do
+ return n
+ end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-ini']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local allocate=utilities.storage.allocate
+local report_defining=logs.reporter("fonts","defining")
+fonts=fonts or {}
+local fonts=fonts
+fonts.hashes={ identifiers=allocate() }
+fonts.tables=fonts.tables or {}
+fonts.helpers=fonts.helpers or {}
+fonts.tracers=fonts.tracers or {}
+fonts.specifiers=fonts.specifiers or {}
+fonts.analyzers={}
+fonts.readers={}
+fonts.definers={ methods={} }
+fonts.loggers={ register=function() end }
+fontloader.totable=fontloader.to_table
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-con']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local next,tostring,rawget=next,tostring,rawget
+local format,match,lower,gsub=string.format,string.match,string.lower,string.gsub
+local utfbyte=utf.byte
+local sort,insert,concat,sortedkeys,serialize,fastcopy=table.sort,table.insert,table.concat,table.sortedkeys,table.serialize,table.fastcopy
+local derivetable=table.derive
+local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end)
+local trace_scaling=false trackers.register("fonts.scaling",function(v) trace_scaling=v end)
+local report_defining=logs.reporter("fonts","defining")
+local fonts=fonts
+local constructors=fonts.constructors or {}
+fonts.constructors=constructors
+local handlers=fonts.handlers or {}
+fonts.handlers=handlers
+local allocate=utilities.storage.allocate
+local setmetatableindex=table.setmetatableindex
+constructors.dontembed=allocate()
+constructors.autocleanup=true
+constructors.namemode="fullpath"
+constructors.version=1.01
+constructors.cache=containers.define("fonts","constructors",constructors.version,false)
+constructors.privateoffset=0xF0000
+constructors.keys={
+ properties={
+ encodingbytes="number",
+ embedding="number",
+ cidinfo={},
+ format="string",
+ fontname="string",
+ fullname="string",
+ filename="filename",
+ psname="string",
+ name="string",
+ virtualized="boolean",
+ hasitalics="boolean",
+ autoitalicamount="basepoints",
+ nostackmath="boolean",
+ noglyphnames="boolean",
+ mode="string",
+ hasmath="boolean",
+ mathitalics="boolean",
+ textitalics="boolean",
+ finalized="boolean",
+ },
+ parameters={
+ mathsize="number",
+ scriptpercentage="float",
+ scriptscriptpercentage="float",
+ units="cardinal",
+ designsize="scaledpoints",
+ expansion={
+ stretch="integerscale",
+ shrink="integerscale",
+ step="integerscale",
+ auto="boolean",
+ },
+ protrusion={
+ auto="boolean",
+ },
+ slantfactor="float",
+ extendfactor="float",
+ factor="float",
+ hfactor="float",
+ vfactor="float",
+ size="scaledpoints",
+ units="scaledpoints",
+ scaledpoints="scaledpoints",
+ slantperpoint="scaledpoints",
+ spacing={
+ width="scaledpoints",
+ stretch="scaledpoints",
+ shrink="scaledpoints",
+ extra="scaledpoints",
+ },
+ xheight="scaledpoints",
+ quad="scaledpoints",
+ ascender="scaledpoints",
+ descender="scaledpoints",
+ synonyms={
+ space="spacing.width",
+ spacestretch="spacing.stretch",
+ spaceshrink="spacing.shrink",
+ extraspace="spacing.extra",
+ x_height="xheight",
+ space_stretch="spacing.stretch",
+ space_shrink="spacing.shrink",
+ extra_space="spacing.extra",
+ em="quad",
+ ex="xheight",
+ slant="slantperpoint",
+ },
+ },
+ description={
+ width="basepoints",
+ height="basepoints",
+ depth="basepoints",
+ boundingbox={},
+ },
+ character={
+ width="scaledpoints",
+ height="scaledpoints",
+ depth="scaledpoints",
+ italic="scaledpoints",
+ },
+}
+local designsizes=allocate()
+constructors.designsizes=designsizes
+local loadedfonts=allocate()
+constructors.loadedfonts=loadedfonts
+local factors={
+ pt=65536.0,
+ bp=65781.8,
+}
+function constructors.setfactor(f)
+ constructors.factor=factors[f or 'pt'] or factors.pt
+end
+constructors.setfactor()
+function constructors.scaled(scaledpoints,designsize)
+ if scaledpoints<0 then
+ if designsize then
+ local factor=constructors.factor
+ if designsize>factor then
+ return (- scaledpoints/1000)*designsize
+ else
+ return (- scaledpoints/1000)*designsize*factor
+ end
+ else
+ return (- scaledpoints/1000)*10*factor
+ end
+ else
+ return scaledpoints
+ end
+end
+function constructors.cleanuptable(tfmdata)
+ if constructors.autocleanup and tfmdata.properties.virtualized then
+ for k,v in next,tfmdata.characters do
+ if v.commands then v.commands=nil end
+ end
+ end
+end
+function constructors.calculatescale(tfmdata,scaledpoints)
+ local parameters=tfmdata.parameters
+ if scaledpoints<0 then
+ scaledpoints=(- scaledpoints/1000)*(tfmdata.designsize or parameters.designsize)
+ end
+ return scaledpoints,scaledpoints/(parameters.units or 1000)
+end
+local unscaled={
+ ScriptPercentScaleDown=true,
+ ScriptScriptPercentScaleDown=true,
+ RadicalDegreeBottomRaisePercent=true
+}
+function constructors.assignmathparameters(target,original)
+ local mathparameters=original.mathparameters
+ if mathparameters and next(mathparameters) then
+ local targetparameters=target.parameters
+ local targetproperties=target.properties
+ local targetmathparameters={}
+ local factor=targetproperties.math_is_scaled and 1 or targetparameters.factor
+ for name,value in next,mathparameters do
+ if unscaled[name] then
+ targetmathparameters[name]=value
+ else
+ targetmathparameters[name]=value*factor
+ end
+ end
+ if not targetmathparameters.FractionDelimiterSize then
+ targetmathparameters.FractionDelimiterSize=1.01*targetparameters.size
+ end
+ if not mathparameters.FractionDelimiterDisplayStyleSize then
+ targetmathparameters.FractionDelimiterDisplayStyleSize=2.40*targetparameters.size
+ end
+ target.mathparameters=targetmathparameters
+ end
+end
+function constructors.beforecopyingcharacters(target,original)
+end
+function constructors.aftercopyingcharacters(target,original)
+end
+function constructors.enhanceparameters(parameters)
+ local xheight=parameters.x_height
+ local quad=parameters.quad
+ local space=parameters.space
+ local stretch=parameters.space_stretch
+ local shrink=parameters.space_shrink
+ local extra=parameters.extra_space
+ local slant=parameters.slant
+ parameters.xheight=xheight
+ parameters.spacestretch=stretch
+ parameters.spaceshrink=shrink
+ parameters.extraspace=extra
+ parameters.em=quad
+ parameters.ex=xheight
+ parameters.slantperpoint=slant
+ parameters.spacing={
+ width=space,
+ stretch=stretch,
+ shrink=shrink,
+ extra=extra,
+ }
+end
+function constructors.scale(tfmdata,specification)
+ local target={}
+ if tonumber(specification) then
+ specification={ size=specification }
+ end
+ local scaledpoints=specification.size
+ local relativeid=specification.relativeid
+ local properties=tfmdata.properties or {}
+ local goodies=tfmdata.goodies or {}
+ local resources=tfmdata.resources or {}
+ local descriptions=tfmdata.descriptions or {}
+ local characters=tfmdata.characters or {}
+ local changed=tfmdata.changed or {}
+ local shared=tfmdata.shared or {}
+ local parameters=tfmdata.parameters or {}
+ local mathparameters=tfmdata.mathparameters or {}
+ local targetcharacters={}
+ local targetdescriptions=derivetable(descriptions)
+ local targetparameters=derivetable(parameters)
+ local targetproperties=derivetable(properties)
+ local targetgoodies=goodies
+ target.characters=targetcharacters
+ target.descriptions=targetdescriptions
+ target.parameters=targetparameters
+ target.properties=targetproperties
+ target.goodies=targetgoodies
+ target.shared=shared
+ target.resources=resources
+ target.unscaled=tfmdata
+ local mathsize=tonumber(specification.mathsize) or 0
+ local textsize=tonumber(specification.textsize) or scaledpoints
+ local forcedsize=tonumber(parameters.mathsize ) or 0
+ local extrafactor=tonumber(specification.factor ) or 1
+ if (mathsize==2 or forcedsize==2) and parameters.scriptpercentage then
+ scaledpoints=parameters.scriptpercentage*textsize/100
+ elseif (mathsize==3 or forcedsize==3) and parameters.scriptscriptpercentage then
+ scaledpoints=parameters.scriptscriptpercentage*textsize/100
+ elseif forcedsize>1000 then
+ scaledpoints=forcedsize
+ end
+ targetparameters.mathsize=mathsize
+ targetparameters.textsize=textsize
+ targetparameters.forcedsize=forcedsize
+ targetparameters.extrafactor=extrafactor
+ local tounicode=resources.tounicode
+ local defaultwidth=resources.defaultwidth or 0
+ local defaultheight=resources.defaultheight or 0
+ local defaultdepth=resources.defaultdepth or 0
+ local units=parameters.units or 1000
+ if target.fonts then
+ target.fonts=fastcopy(target.fonts)
+ end
+ targetproperties.language=properties.language or "dflt"
+ targetproperties.script=properties.script or "dflt"
+ targetproperties.mode=properties.mode or "base"
+ local askedscaledpoints=scaledpoints
+ local scaledpoints,delta=constructors.calculatescale(tfmdata,scaledpoints)
+ local hdelta=delta
+ local vdelta=delta
+ target.designsize=parameters.designsize
+ target.units_per_em=units
+ local direction=properties.direction or tfmdata.direction or 0
+ target.direction=direction
+ properties.direction=direction
+ target.size=scaledpoints
+ target.encodingbytes=properties.encodingbytes or 1
+ target.embedding=properties.embedding or "subset"
+ target.tounicode=1
+ target.cidinfo=properties.cidinfo
+ target.format=properties.format
+ local fontname=properties.fontname or tfmdata.fontname
+ local fullname=properties.fullname or tfmdata.fullname
+ local filename=properties.filename or tfmdata.filename
+ local psname=properties.psname or tfmdata.psname
+ local name=properties.name or tfmdata.name
+ if not psname or psname=="" then
+ psname=fontname or (fullname and fonts.names.cleanname(fullname))
+ end
+ target.fontname=fontname
+ target.fullname=fullname
+ target.filename=filename
+ target.psname=psname
+ target.name=name
+ properties.fontname=fontname
+ properties.fullname=fullname
+ properties.filename=filename
+ properties.psname=psname
+ properties.name=name
+ local expansion=parameters.expansion
+ if expansion then
+ target.stretch=expansion.stretch
+ target.shrink=expansion.shrink
+ target.step=expansion.step
+ target.auto_expand=expansion.auto
+ end
+ local protrusion=parameters.protrusion
+ if protrusion then
+ target.auto_protrude=protrusion.auto
+ end
+ local extendfactor=parameters.extendfactor or 0
+ if extendfactor~=0 and extendfactor~=1 then
+ hdelta=hdelta*extendfactor
+ target.extend=extendfactor*1000
+ else
+ target.extend=1000
+ end
+ local slantfactor=parameters.slantfactor or 0
+ if slantfactor~=0 then
+ target.slant=slantfactor*1000
+ else
+ target.slant=0
+ end
+ targetparameters.factor=delta
+ targetparameters.hfactor=hdelta
+ targetparameters.vfactor=vdelta
+ targetparameters.size=scaledpoints
+ targetparameters.units=units
+ targetparameters.scaledpoints=askedscaledpoints
+ local isvirtual=properties.virtualized or tfmdata.type=="virtual"
+ local hasquality=target.auto_expand or target.auto_protrude
+ local hasitalics=properties.hasitalics
+ local autoitalicamount=properties.autoitalicamount
+ local stackmath=not properties.nostackmath
+ local nonames=properties.noglyphnames
+ local nodemode=properties.mode=="node"
+ if changed and not next(changed) then
+ changed=false
+ end
+ target.type=isvirtual and "virtual" or "real"
+ target.postprocessors=tfmdata.postprocessors
+ local targetslant=(parameters.slant or parameters[1] or 0)
+ local targetspace=(parameters.space or parameters[2] or 0)*hdelta
+ local targetspace_stretch=(parameters.space_stretch or parameters[3] or 0)*hdelta
+ local targetspace_shrink=(parameters.space_shrink or parameters[4] or 0)*hdelta
+ local targetx_height=(parameters.x_height or parameters[5] or 0)*vdelta
+ local targetquad=(parameters.quad or parameters[6] or 0)*hdelta
+ local targetextra_space=(parameters.extra_space or parameters[7] or 0)*hdelta
+ targetparameters.slant=targetslant
+ targetparameters.space=targetspace
+ targetparameters.space_stretch=targetspace_stretch
+ targetparameters.space_shrink=targetspace_shrink
+ targetparameters.x_height=targetx_height
+ targetparameters.quad=targetquad
+ targetparameters.extra_space=targetextra_space
+ local ascender=parameters.ascender
+ if ascender then
+ targetparameters.ascender=delta*ascender
+ end
+ local descender=parameters.descender
+ if descender then
+ targetparameters.descender=delta*descender
+ end
+ constructors.enhanceparameters(targetparameters)
+ local protrusionfactor=(targetquad~=0 and 1000/targetquad) or 0
+ local scaledwidth=defaultwidth*hdelta
+ local scaledheight=defaultheight*vdelta
+ local scaleddepth=defaultdepth*vdelta
+ local hasmath=(properties.hasmath or next(mathparameters)) and true
+ if hasmath then
+ constructors.assignmathparameters(target,tfmdata)
+ properties.hasmath=true
+ target.nomath=false
+ target.MathConstants=target.mathparameters
+ else
+ properties.hasmath=false
+ target.nomath=true
+ target.mathparameters=nil
+ end
+ local italickey="italic"
+ local useitalics=true
+ if hasmath then
+ autoitalicamount=false
+ elseif properties.textitalics then
+ italickey="italic_correction"
+ useitalics=false
+ if properties.delaytextitalics then
+ autoitalicamount=false
+ end
+ end
+ if trace_defining then
+ report_defining("defining tfm, name %a, fullname %a, filename %a, hscale %a, vscale %a, math %a, italics %a",
+ name,fullname,filename,hdelta,vdelta,
+ hasmath and "enabled" or "disabled",useitalics and "enabled" or "disabled")
+ end
+ constructors.beforecopyingcharacters(target,tfmdata)
+ local sharedkerns={}
+ for unicode,character in next,characters do
+ local chr,description,index,touni
+ if changed then
+ local c=changed[unicode]
+ if c then
+ description=descriptions[c] or descriptions[unicode] or character
+ character=characters[c] or character
+ index=description.index or c
+ if tounicode then
+ touni=tounicode[index]
+ if not touni then
+ local d=descriptions[unicode] or characters[unicode]
+ local i=d.index or unicode
+ touni=tounicode[i]
+ end
+ end
+ else
+ description=descriptions[unicode] or character
+ index=description.index or unicode
+ if tounicode then
+ touni=tounicode[index]
+ end
+ end
+ else
+ description=descriptions[unicode] or character
+ index=description.index or unicode
+ if tounicode then
+ touni=tounicode[index]
+ end
+ end
+ local width=description.width
+ local height=description.height
+ local depth=description.depth
+ if width then width=hdelta*width else width=scaledwidth end
+ if height then height=vdelta*height else height=scaledheight end
+ if depth and depth~=0 then
+ depth=delta*depth
+ if nonames then
+ chr={
+ index=index,
+ height=height,
+ depth=depth,
+ width=width,
+ }
+ else
+ chr={
+ name=description.name,
+ index=index,
+ height=height,
+ depth=depth,
+ width=width,
+ }
+ end
+ else
+ if nonames then
+ chr={
+ index=index,
+ height=height,
+ width=width,
+ }
+ else
+ chr={
+ name=description.name,
+ index=index,
+ height=height,
+ width=width,
+ }
+ end
+ end
+ if touni then
+ chr.tounicode=touni
+ end
+ if hasquality then
+ local ve=character.expansion_factor
+ if ve then
+ chr.expansion_factor=ve*1000
+ end
+ local vl=character.left_protruding
+ if vl then
+ chr.left_protruding=protrusionfactor*width*vl
+ end
+ local vr=character.right_protruding
+ if vr then
+ chr.right_protruding=protrusionfactor*width*vr
+ end
+ end
+ if autoitalicamount then
+ local vi=description.italic
+ if not vi then
+ local vi=description.boundingbox[3]-description.width+autoitalicamount
+ if vi>0 then
+ chr[italickey]=vi*hdelta
+ end
+ elseif vi~=0 then
+ chr[italickey]=vi*hdelta
+ end
+ elseif hasitalics then
+ local vi=description.italic
+ if vi and vi~=0 then
+ chr[italickey]=vi*hdelta
+ end
+ end
+ if hasmath then
+ local vn=character.next
+ if vn then
+ chr.next=vn
+ else
+ local vv=character.vert_variants
+ if vv then
+ local t={}
+ for i=1,#vv do
+ local vvi=vv[i]
+ t[i]={
+ ["start"]=(vvi["start"] or 0)*vdelta,
+ ["end"]=(vvi["end"] or 0)*vdelta,
+ ["advance"]=(vvi["advance"] or 0)*vdelta,
+ ["extender"]=vvi["extender"],
+ ["glyph"]=vvi["glyph"],
+ }
+ end
+ chr.vert_variants=t
+ else
+ local hv=character.horiz_variants
+ if hv then
+ local t={}
+ for i=1,#hv do
+ local hvi=hv[i]
+ t[i]={
+ ["start"]=(hvi["start"] or 0)*hdelta,
+ ["end"]=(hvi["end"] or 0)*hdelta,
+ ["advance"]=(hvi["advance"] or 0)*hdelta,
+ ["extender"]=hvi["extender"],
+ ["glyph"]=hvi["glyph"],
+ }
+ end
+ chr.horiz_variants=t
+ end
+ end
+ end
+ local va=character.top_accent
+ if va then
+ chr.top_accent=vdelta*va
+ end
+ if stackmath then
+ local mk=character.mathkerns
+ if mk then
+ local kerns={}
+ local v=mk.top_right if v then local k={} for i=1,#v do local vi=v[i]
+ k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern }
+ end kerns.top_right=k end
+ local v=mk.top_left if v then local k={} for i=1,#v do local vi=v[i]
+ k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern }
+ end kerns.top_left=k end
+ local v=mk.bottom_left if v then local k={} for i=1,#v do local vi=v[i]
+ k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern }
+ end kerns.bottom_left=k end
+ local v=mk.bottom_right if v then local k={} for i=1,#v do local vi=v[i]
+ k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern }
+ end kerns.bottom_right=k end
+ chr.mathkern=kerns
+ end
+ end
+ end
+ if not nodemode then
+ local vk=character.kerns
+ if vk then
+ local s=sharedkerns[vk]
+ if not s then
+ s={}
+ for k,v in next,vk do s[k]=v*hdelta end
+ sharedkerns[vk]=s
+ end
+ chr.kerns=s
+ end
+ local vl=character.ligatures
+ if vl then
+ if true then
+ chr.ligatures=vl
+ else
+ local tt={}
+ for i,l in next,vl do
+ tt[i]=l
+ end
+ chr.ligatures=tt
+ end
+ end
+ end
+ if isvirtual then
+ local vc=character.commands
+ if vc then
+ local ok=false
+ for i=1,#vc do
+ local key=vc[i][1]
+ if key=="right" or key=="down" then
+ ok=true
+ break
+ end
+ end
+ if ok then
+ local tt={}
+ for i=1,#vc do
+ local ivc=vc[i]
+ local key=ivc[1]
+ if key=="right" then
+ tt[i]={ key,ivc[2]*hdelta }
+ elseif key=="down" then
+ tt[i]={ key,ivc[2]*vdelta }
+ elseif key=="rule" then
+ tt[i]={ key,ivc[2]*vdelta,ivc[3]*hdelta }
+ else
+ tt[i]=ivc
+ end
+ end
+ chr.commands=tt
+ else
+ chr.commands=vc
+ end
+ chr.index=nil
+ end
+ end
+ targetcharacters[unicode]=chr
+ end
+ constructors.aftercopyingcharacters(target,tfmdata)
+ return target
+end
+function constructors.finalize(tfmdata)
+ if tfmdata.properties and tfmdata.properties.finalized then
+ return
+ end
+ if not tfmdata.characters then
+ return nil
+ end
+ if not tfmdata.goodies then
+ tfmdata.goodies={}
+ end
+ local parameters=tfmdata.parameters
+ if not parameters then
+ return nil
+ end
+ if not parameters.expansion then
+ parameters.expansion={
+ stretch=tfmdata.stretch or 0,
+ shrink=tfmdata.shrink or 0,
+ step=tfmdata.step or 0,
+ auto=tfmdata.auto_expand or false,
+ }
+ end
+ if not parameters.protrusion then
+ parameters.protrusion={
+ auto=auto_protrude
+ }
+ end
+ if not parameters.size then
+ parameters.size=tfmdata.size
+ end
+ if not parameters.extendfactor then
+ parameters.extendfactor=tfmdata.extend or 0
+ end
+ if not parameters.slantfactor then
+ parameters.slantfactor=tfmdata.slant or 0
+ end
+ if not parameters.designsize then
+ parameters.designsize=tfmdata.designsize or 655360
+ end
+ if not parameters.units then
+ parameters.units=tfmdata.units_per_em or 1000
+ end
+ if not tfmdata.descriptions then
+ local descriptions={}
+ setmetatableindex(descriptions,function(t,k) local v={} t[k]=v return v end)
+ tfmdata.descriptions=descriptions
+ end
+ local properties=tfmdata.properties
+ if not properties then
+ properties={}
+ tfmdata.properties=properties
+ end
+ if not properties.virtualized then
+ properties.virtualized=tfmdata.type=="virtual"
+ end
+ if not tfmdata.properties then
+ tfmdata.properties={
+ fontname=tfmdata.fontname,
+ filename=tfmdata.filename,
+ fullname=tfmdata.fullname,
+ name=tfmdata.name,
+ psname=tfmdata.psname,
+ encodingbytes=tfmdata.encodingbytes or 1,
+ embedding=tfmdata.embedding or "subset",
+ tounicode=tfmdata.tounicode or 1,
+ cidinfo=tfmdata.cidinfo or nil,
+ format=tfmdata.format or "type1",
+ direction=tfmdata.direction or 0,
+ }
+ end
+ if not tfmdata.resources then
+ tfmdata.resources={}
+ end
+ if not tfmdata.shared then
+ tfmdata.shared={}
+ end
+ if not properties.hasmath then
+ properties.hasmath=not tfmdata.nomath
+ end
+ tfmdata.MathConstants=nil
+ tfmdata.postprocessors=nil
+ tfmdata.fontname=nil
+ tfmdata.filename=nil
+ tfmdata.fullname=nil
+ tfmdata.name=nil
+ tfmdata.psname=nil
+ tfmdata.encodingbytes=nil
+ tfmdata.embedding=nil
+ tfmdata.tounicode=nil
+ tfmdata.cidinfo=nil
+ tfmdata.format=nil
+ tfmdata.direction=nil
+ tfmdata.type=nil
+ tfmdata.nomath=nil
+ tfmdata.designsize=nil
+ tfmdata.size=nil
+ tfmdata.stretch=nil
+ tfmdata.shrink=nil
+ tfmdata.step=nil
+ tfmdata.auto_expand=nil
+ tfmdata.auto_protrude=nil
+ tfmdata.extend=nil
+ tfmdata.slant=nil
+ tfmdata.units_per_em=nil
+ properties.finalized=true
+ return tfmdata
+end
+local hashmethods={}
+constructors.hashmethods=hashmethods
+function constructors.hashfeatures(specification)
+ local features=specification.features
+ if features then
+ local t,tn={},0
+ for category,list in next,features do
+ if next(list) then
+ local hasher=hashmethods[category]
+ if hasher then
+ local hash=hasher(list)
+ if hash then
+ tn=tn+1
+ t[tn]=category..":"..hash
+ end
+ end
+ end
+ end
+ if tn>0 then
+ return concat(t," & ")
+ end
+ end
+ return "unknown"
+end
+hashmethods.normal=function(list)
+ local s={}
+ local n=0
+ for k,v in next,list do
+ if not k then
+ elseif k=="number" or k=="features" then
+ else
+ n=n+1
+ s[n]=k
+ end
+ end
+ if n>0 then
+ sort(s)
+ for i=1,n do
+ local k=s[i]
+ s[i]=k..'='..tostring(list[k])
+ end
+ return concat(s,"+")
+ end
+end
+function constructors.hashinstance(specification,force)
+ local hash,size,fallbacks=specification.hash,specification.size,specification.fallbacks
+ if force or not hash then
+ hash=constructors.hashfeatures(specification)
+ specification.hash=hash
+ end
+ if size<1000 and designsizes[hash] then
+ size=math.round(constructors.scaled(size,designsizes[hash]))
+ specification.size=size
+ end
+ if fallbacks then
+ return hash..' @ '..tostring(size)..' @ '..fallbacks
+ else
+ return hash..' @ '..tostring(size)
+ end
+end
+function constructors.setname(tfmdata,specification)
+ if constructors.namemode=="specification" then
+ local specname=specification.specification
+ if specname then
+ tfmdata.properties.name=specname
+ if trace_defining then
+ report_otf("overloaded fontname %a",specname)
+ end
+ end
+ end
+end
+function constructors.checkedfilename(data)
+ local foundfilename=data.foundfilename
+ if not foundfilename then
+ local askedfilename=data.filename or ""
+ if askedfilename~="" then
+ askedfilename=resolvers.resolve(askedfilename)
+ foundfilename=resolvers.findbinfile(askedfilename,"") or ""
+ if foundfilename=="" then
+ report_defining("source file %a is not found",askedfilename)
+ foundfilename=resolvers.findbinfile(file.basename(askedfilename),"") or ""
+ if foundfilename~="" then
+ report_defining("using source file %a due to cache mismatch",foundfilename)
+ end
+ end
+ end
+ data.foundfilename=foundfilename
+ end
+ return foundfilename
+end
+local formats=allocate()
+fonts.formats=formats
+setmetatableindex(formats,function(t,k)
+ local l=lower(k)
+ if rawget(t,k) then
+ t[k]=l
+ return l
+ end
+ return rawget(t,file.suffix(l))
+end)
+local locations={}
+local function setindeed(mode,target,group,name,action,position)
+ local t=target[mode]
+ if not t then
+ report_defining("fatal error in setting feature %a, group %a, mode %a",name,group,mode)
+ os.exit()
+ elseif position then
+ insert(t,position,{ name=name,action=action })
+ else
+ for i=1,#t do
+ local ti=t[i]
+ if ti.name==name then
+ ti.action=action
+ return
+ end
+ end
+ insert(t,{ name=name,action=action })
+ end
+end
+local function set(group,name,target,source)
+ target=target[group]
+ if not target then
+ report_defining("fatal target error in setting feature %a, group %a",name,group)
+ os.exit()
+ end
+ local source=source[group]
+ if not source then
+ report_defining("fatal source error in setting feature %a, group %a",name,group)
+ os.exit()
+ end
+ local node=source.node
+ local base=source.base
+ local position=source.position
+ if node then
+ setindeed("node",target,group,name,node,position)
+ end
+ if base then
+ setindeed("base",target,group,name,base,position)
+ end
+end
+local function register(where,specification)
+ local name=specification.name
+ if name and name~="" then
+ local default=specification.default
+ local description=specification.description
+ local initializers=specification.initializers
+ local processors=specification.processors
+ local manipulators=specification.manipulators
+ local modechecker=specification.modechecker
+ if default then
+ where.defaults[name]=default
+ end
+ if description and description~="" then
+ where.descriptions[name]=description
+ end
+ if initializers then
+ set('initializers',name,where,specification)
+ end
+ if processors then
+ set('processors',name,where,specification)
+ end
+ if manipulators then
+ set('manipulators',name,where,specification)
+ end
+ if modechecker then
+ where.modechecker=modechecker
+ end
+ end
+end
+constructors.registerfeature=register
+function constructors.getfeatureaction(what,where,mode,name)
+ what=handlers[what].features
+ if what then
+ where=what[where]
+ if where then
+ mode=where[mode]
+ if mode then
+ for i=1,#mode do
+ local m=mode[i]
+ if m.name==name then
+ return m.action
+ end
+ end
+ end
+ end
+ end
+end
+function constructors.newhandler(what)
+ local handler=handlers[what]
+ if not handler then
+ handler={}
+ handlers[what]=handler
+ end
+ return handler
+end
+function constructors.newfeatures(what)
+ local handler=handlers[what]
+ local features=handler.features
+ if not features then
+ local tables=handler.tables
+ local statistics=handler.statistics
+ features=allocate {
+ defaults={},
+ descriptions=tables and tables.features or {},
+ used=statistics and statistics.usedfeatures or {},
+ initializers={ base={},node={} },
+ processors={ base={},node={} },
+ manipulators={ base={},node={} },
+ }
+ features.register=function(specification) return register(features,specification) end
+ handler.features=features
+ end
+ return features
+end
+function constructors.checkedfeatures(what,features)
+ local defaults=handlers[what].features.defaults
+ if features and next(features) then
+ features=fastcopy(features)
+ for key,value in next,defaults do
+ if features[key]==nil then
+ features[key]=value
+ end
+ end
+ return features
+ else
+ return fastcopy(defaults)
+ end
+end
+function constructors.initializefeatures(what,tfmdata,features,trace,report)
+ if features and next(features) then
+ local properties=tfmdata.properties or {}
+ local whathandler=handlers[what]
+ local whatfeatures=whathandler.features
+ local whatinitializers=whatfeatures.initializers
+ local whatmodechecker=whatfeatures.modechecker
+ local mode=properties.mode or (whatmodechecker and whatmodechecker(tfmdata,features,features.mode)) or features.mode or "base"
+ properties.mode=mode
+ features.mode=mode
+ local done={}
+ while true do
+ local redo=false
+ local initializers=whatfeatures.initializers[mode]
+ if initializers then
+ for i=1,#initializers do
+ local step=initializers[i]
+ local feature=step.name
+ local value=features[feature]
+ if not value then
+ elseif done[feature] then
+ else
+ local action=step.action
+ if trace then
+ report("initializing feature %a to %a for mode %a for font %a",feature,
+ value,mode,tfmdata.properties.fullname)
+ end
+ action(tfmdata,value,features)
+ if mode~=properties.mode or mode~=features.mode then
+ if whatmodechecker then
+ properties.mode=whatmodechecker(tfmdata,features,properties.mode)
+ features.mode=properties.mode
+ end
+ if mode~=properties.mode then
+ mode=properties.mode
+ redo=true
+ end
+ end
+ done[feature]=true
+ end
+ if redo then
+ break
+ end
+ end
+ if not redo then
+ break
+ end
+ else
+ break
+ end
+ end
+ properties.mode=mode
+ return true
+ else
+ return false
+ end
+end
+function constructors.collectprocessors(what,tfmdata,features,trace,report)
+ local processes,nofprocesses={},0
+ if features and next(features) then
+ local properties=tfmdata.properties
+ local whathandler=handlers[what]
+ local whatfeatures=whathandler.features
+ local whatprocessors=whatfeatures.processors
+ local processors=whatprocessors[properties.mode]
+ if processors then
+ for i=1,#processors do
+ local step=processors[i]
+ local feature=step.name
+ if features[feature] then
+ local action=step.action
+ if trace then
+ report("installing feature processor %a for mode %a for font %a",feature,mode,tfmdata.properties.fullname)
+ end
+ if action then
+ nofprocesses=nofprocesses+1
+ processes[nofprocesses]=action
+ end
+ end
+ end
+ elseif trace then
+ report("no feature processors for mode %a for font %a",mode,tfmdata.properties.fullname)
+ end
+ end
+ return processes
+end
+function constructors.applymanipulators(what,tfmdata,features,trace,report)
+ if features and next(features) then
+ local properties=tfmdata.properties
+ local whathandler=handlers[what]
+ local whatfeatures=whathandler.features
+ local whatmanipulators=whatfeatures.manipulators
+ local manipulators=whatmanipulators[properties.mode]
+ if manipulators then
+ for i=1,#manipulators do
+ local step=manipulators[i]
+ local feature=step.name
+ local value=features[feature]
+ if value then
+ local action=step.action
+ if trace then
+ report("applying feature manipulator %a for mode %a for font %a",feature,mode,tfmdata.properties.fullname)
+ end
+ if action then
+ action(tfmdata,feature,value)
+ end
+ end
+ end
+ end
+ end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-font-enc']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+fonts.encodings={}
+fonts.encodings.agl={}
+setmetatable(fonts.encodings.agl,{ __index=function(t,k)
+ if k=="unicodes" then
+ texio.write(" <loading (extended) adobe glyph list>")
+ local unicodes=dofile(resolvers.findfile("font-age.lua"))
+ fonts.encodings.agl={ unicodes=unicodes }
+ return unicodes
+ else
+ return nil
+ end
+end })
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-cid']={
+ version=1.001,
+ comment="companion to font-otf.lua (cidmaps)",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local format,match,lower=string.format,string.match,string.lower
+local tonumber=tonumber
+local P,S,R,C,V,lpegmatch=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.V,lpeg.match
+local fonts,logs,trackers=fonts,logs,trackers
+local trace_loading=false trackers.register("otf.loading",function(v) trace_loading=v end)
+local report_otf=logs.reporter("fonts","otf loading")
+local cid={}
+fonts.cid=cid
+local cidmap={}
+local cidmax=10
+local number=C(R("09","af","AF")^1)
+local space=S(" \n\r\t")
+local spaces=space^0
+local period=P(".")
+local periods=period*period
+local name=P("/")*C((1-space)^1)
+local unicodes,names={},{}
+local function do_one(a,b)
+ unicodes[tonumber(a)]=tonumber(b,16)
+end
+local function do_range(a,b,c)
+ c=tonumber(c,16)
+ for i=tonumber(a),tonumber(b) do
+ unicodes[i]=c
+ c=c+1
+ end
+end
+local function do_name(a,b)
+ names[tonumber(a)]=b
+end
+local grammar=P { "start",
+ start=number*spaces*number*V("series"),
+ series=(spaces*(V("one")+V("range")+V("named")))^1,
+ one=(number*spaces*number)/do_one,
+ range=(number*periods*number*spaces*number)/do_range,
+ named=(number*spaces*name)/do_name
+}
+local function loadcidfile(filename)
+ local data=io.loaddata(filename)
+ if data then
+ unicodes,names={},{}
+ lpegmatch(grammar,data)
+ local supplement,registry,ordering=match(filename,"^(.-)%-(.-)%-()%.(.-)$")
+ return {
+ supplement=supplement,
+ registry=registry,
+ ordering=ordering,
+ filename=filename,
+ unicodes=unicodes,
+ names=names
+ }
+ end
+end
+cid.loadfile=loadcidfile
+local template="%s-%s-%s.cidmap"
+local function locate(registry,ordering,supplement)
+ local filename=format(template,registry,ordering,supplement)
+ local hashname=lower(filename)
+ local found=cidmap[hashname]
+ if not found then
+ if trace_loading then
+ report_otf("checking cidmap, registry %a, ordering %a, supplement %a, filename %a",registry,ordering,supplement,filename)
+ end
+ local fullname=resolvers.findfile(filename,'cid') or ""
+ if fullname~="" then
+ found=loadcidfile(fullname)
+ if found then
+ if trace_loading then
+ report_otf("using cidmap file %a",filename)
+ end
+ cidmap[hashname]=found
+ found.usedname=file.basename(filename)
+ end
+ end
+ end
+ return found
+end
+function cid.getmap(specification)
+ if not specification then
+ report_otf("invalid cidinfo specification, table expected")
+ return
+ end
+ local registry=specification.registry
+ local ordering=specification.ordering
+ local supplement=specification.supplement
+ local filename=format(registry,ordering,supplement)
+ local found=cidmap[lower(filename)]
+ if found then
+ return found
+ end
+ if trace_loading then
+ report_otf("cidmap needed, registry %a, ordering %a, supplement %a",registry,ordering,supplement)
+ end
+ found=locate(registry,ordering,supplement)
+ if not found then
+ local supnum=tonumber(supplement)
+ local cidnum=nil
+ if supnum<cidmax then
+ for s=supnum+1,cidmax do
+ local c=locate(registry,ordering,s)
+ if c then
+ found,cidnum=c,s
+ break
+ end
+ end
+ end
+ if not found and supnum>0 then
+ for s=supnum-1,0,-1 do
+ local c=locate(registry,ordering,s)
+ if c then
+ found,cidnum=c,s
+ break
+ end
+ end
+ end
+ registry=lower(registry)
+ ordering=lower(ordering)
+ if found and cidnum>0 then
+ for s=0,cidnum-1 do
+ local filename=format(template,registry,ordering,s)
+ if not cidmap[filename] then
+ cidmap[filename]=found
+ end
+ end
+ end
+ end
+ return found
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-map']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local tonumber=tonumber
+local match,format,find,concat,gsub,lower=string.match,string.format,string.find,table.concat,string.gsub,string.lower
+local P,R,S,C,Ct,Cc,lpegmatch=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cc,lpeg.match
+local utfbyte=utf.byte
+local floor=math.floor
+local trace_loading=false trackers.register("fonts.loading",function(v) trace_loading=v end)
+local trace_mapping=false trackers.register("fonts.mapping",function(v) trace_unimapping=v end)
+local report_fonts=logs.reporter("fonts","loading")
+local fonts=fonts
+local mappings=fonts.mappings or {}
+fonts.mappings=mappings
+local function loadlumtable(filename)
+ local lumname=file.replacesuffix(file.basename(filename),"lum")
+ local lumfile=resolvers.findfile(lumname,"map") or ""
+ if lumfile~="" and lfs.isfile(lumfile) then
+ if trace_loading or trace_mapping then
+ report_fonts("loading map table %a",lumfile)
+ end
+ lumunic=dofile(lumfile)
+ return lumunic,lumfile
+ end
+end
+local hex=R("AF","09")
+local hexfour=(hex*hex*hex*hex)/function(s) return tonumber(s,16) end
+local hexsix=(hex^1)/function(s) return tonumber(s,16) end
+local dec=(R("09")^1)/tonumber
+local period=P(".")
+local unicode=P("uni")*(hexfour*(period+P(-1))*Cc(false)+Ct(hexfour^1)*Cc(true))
+local ucode=P("u")*(hexsix*(period+P(-1))*Cc(false)+Ct(hexsix^1)*Cc(true))
+local index=P("index")*dec*Cc(false)
+local parser=unicode+ucode+index
+local parsers={}
+local function makenameparser(str)
+ if not str or str=="" then
+ return parser
+ else
+ local p=parsers[str]
+ if not p then
+ p=P(str)*period*dec*Cc(false)
+ parsers[str]=p
+ end
+ return p
+ end
+end
+local function tounicode16(unicode)
+ if unicode<0x10000 then
+ return format("%04X",unicode)
+ elseif unicode<0x1FFFFFFFFF then
+ return format("%04X%04X",floor(unicode/1024),unicode%1024+0xDC00)
+ else
+ report_fonts("can't convert %a into tounicode",unicode)
+ end
+end
+local function tounicode16sequence(unicodes)
+ local t={}
+ for l=1,#unicodes do
+ local unicode=unicodes[l]
+ if unicode<0x10000 then
+ t[l]=format("%04X",unicode)
+ elseif unicode<0x1FFFFFFFFF then
+ t[l]=format("%04X%04X",floor(unicode/1024),unicode%1024+0xDC00)
+ else
+ report_fonts ("can't convert %a into tounicode",unicode)
+ end
+ end
+ return concat(t)
+end
+local function fromunicode16(str)
+ if #str==4 then
+ return tonumber(str,16)
+ else
+ local l,r=match(str,"(....)(....)")
+ return (tonumber(l,16)- 0xD800)*0x400+tonumber(r,16)-0xDC00
+ end
+end
+mappings.loadlumtable=loadlumtable
+mappings.makenameparser=makenameparser
+mappings.tounicode16=tounicode16
+mappings.tounicode16sequence=tounicode16sequence
+mappings.fromunicode16=fromunicode16
+local separator=S("_.")
+local other=C((1-separator)^1)
+local ligsplitter=Ct(other*(separator*other)^0)
+function mappings.addtounicode(data,filename)
+ local resources=data.resources
+ local properties=data.properties
+ local descriptions=data.descriptions
+ local unicodes=resources.unicodes
+ if not unicodes then
+ return
+ end
+ unicodes['space']=unicodes['space'] or 32
+ unicodes['hyphen']=unicodes['hyphen'] or 45
+ unicodes['zwj']=unicodes['zwj'] or 0x200D
+ unicodes['zwnj']=unicodes['zwnj'] or 0x200C
+ local private=fonts.constructors.privateoffset
+ local unknown=format("%04X",utfbyte("?"))
+ local unicodevector=fonts.encodings.agl.unicodes
+ local tounicode={}
+ local originals={}
+ resources.tounicode=tounicode
+ resources.originals=originals
+ local lumunic,uparser,oparser
+ local cidinfo,cidnames,cidcodes,usedmap
+ if false then
+ lumunic=loadlumtable(filename)
+ lumunic=lumunic and lumunic.tounicode
+ end
+ cidinfo=properties.cidinfo
+ usedmap=cidinfo and fonts.cid.getmap(cidinfo)
+ if usedmap then
+ oparser=usedmap and makenameparser(cidinfo.ordering)
+ cidnames=usedmap.names
+ cidcodes=usedmap.unicodes
+ end
+ uparser=makenameparser()
+ local ns,nl=0,0
+ for unic,glyph in next,descriptions do
+ local index=glyph.index
+ local name=glyph.name
+ if unic==-1 or unic>=private or (unic>=0xE000 and unic<=0xF8FF) or unic==0xFFFE or unic==0xFFFF then
+ local unicode=lumunic and lumunic[name] or unicodevector[name]
+ if unicode then
+ originals[index]=unicode
+ tounicode[index]=tounicode16(unicode)
+ ns=ns+1
+ end
+ if (not unicode) and usedmap then
+ local foundindex=lpegmatch(oparser,name)
+ if foundindex then
+ unicode=cidcodes[foundindex]
+ if unicode then
+ originals[index]=unicode
+ tounicode[index]=tounicode16(unicode)
+ ns=ns+1
+ else
+ local reference=cidnames[foundindex]
+ if reference then
+ local foundindex=lpegmatch(oparser,reference)
+ if foundindex then
+ unicode=cidcodes[foundindex]
+ if unicode then
+ originals[index]=unicode
+ tounicode[index]=tounicode16(unicode)
+ ns=ns+1
+ end
+ end
+ if not unicode then
+ local foundcodes,multiple=lpegmatch(uparser,reference)
+ if foundcodes then
+ originals[index]=foundcodes
+ if multiple then
+ tounicode[index]=tounicode16sequence(foundcodes)
+ nl=nl+1
+ unicode=true
+ else
+ tounicode[index]=tounicode16(foundcodes)
+ ns=ns+1
+ unicode=foundcodes
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ if not unicode then
+ local split=lpegmatch(ligsplitter,name)
+ local nplit=split and #split or 0
+ if nplit>=2 then
+ local t,n={},0
+ for l=1,nplit do
+ local base=split[l]
+ local u=unicodes[base] or unicodevector[base]
+ if not u then
+ break
+ elseif type(u)=="table" then
+ n=n+1
+ t[n]=u[1]
+ else
+ n=n+1
+ t[n]=u
+ end
+ end
+ if n==0 then
+ elseif n==1 then
+ originals[index]=t[1]
+ tounicode[index]=tounicode16(t[1])
+ else
+ originals[index]=t
+ tounicode[index]=tounicode16sequence(t)
+ end
+ nl=nl+1
+ unicode=true
+ else
+ end
+ end
+ if not unicode then
+ local foundcodes,multiple=lpegmatch(uparser,name)
+ if foundcodes then
+ if multiple then
+ originals[index]=foundcodes
+ tounicode[index]=tounicode16sequence(foundcodes)
+ nl=nl+1
+ unicode=true
+ else
+ originals[index]=foundcodes
+ tounicode[index]=tounicode16(foundcodes)
+ ns=ns+1
+ unicode=foundcodes
+ end
+ end
+ end
+ end
+ end
+ if trace_mapping then
+ for unic,glyph in table.sortedhash(descriptions) do
+ local name=glyph.name
+ local index=glyph.index
+ local toun=tounicode[index]
+ if toun then
+ report_fonts("internal slot %U, name %a, unicode %U, tounicode %a",index,name,unic,toun)
+ else
+ report_fonts("internal slot %U, name %a, unicode %U",index,name,unic)
+ end
+ end
+ end
+ if trace_loading and (ns>0 or nl>0) then
+ report_fonts("%s tounicode entries added, ligatures %s",nl+ns,ns)
+ end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-fonts-syn']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+fonts.names=fonts.names or {}
+fonts.names.version=1.001
+fonts.names.basename="luatex-fonts-names.lua"
+fonts.names.new_to_old={}
+fonts.names.old_to_new={}
+local data,loaded=nil,false
+local fileformats={ "lua","tex","other text files" }
+function fonts.names.resolve(name,sub)
+ if not loaded then
+ local basename=fonts.names.basename
+ if basename and basename~="" then
+ for i=1,#fileformats do
+ local format=fileformats[i]
+ local foundname=resolvers.findfile(basename,format) or ""
+ if foundname~="" then
+ data=dofile(foundname)
+ texio.write("<font database loaded: ",foundname,">")
+ break
+ end
+ end
+ end
+ loaded=true
+ end
+ if type(data)=="table" and data.version==fonts.names.version then
+ local condensed=string.gsub(string.lower(name),"[^%a%d]","")
+ local found=data.mappings and data.mappings[condensed]
+ if found then
+ local fontname,filename,subfont=found[1],found[2],found[3]
+ if subfont then
+ return filename,fontname
+ else
+ return filename,false
+ end
+ else
+ return name,false
+ end
+ end
+end
+fonts.names.resolvespec=fonts.names.resolve
+function fonts.names.getfilename(askedname,suffix)
+ return ""
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-fonts-tfm']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+local tfm={}
+fonts.handlers.tfm=tfm
+fonts.formats.tfm="type1"
+function fonts.readers.tfm(specification)
+ local fullname=specification.filename or ""
+ if fullname=="" then
+ local forced=specification.forced or ""
+ if forced~="" then
+ fullname=specification.name.."."..forced
+ else
+ fullname=specification.name
+ end
+ end
+ local foundname=resolvers.findbinfile(fullname,'tfm') or ""
+ if foundname=="" then
+ foundname=resolvers.findbinfile(fullname,'ofm') or ""
+ end
+ if foundname~="" then
+ specification.filename=foundname
+ specification.format="ofm"
+ return font.read_tfm(specification.filename,specification.size)
+ end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-oti']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local lower=string.lower
+local fonts=fonts
+local constructors=fonts.constructors
+local otf=constructors.newhandler("otf")
+local otffeatures=constructors.newfeatures("otf")
+local otftables=otf.tables
+local registerotffeature=otffeatures.register
+local allocate=utilities.storage.allocate
+registerotffeature {
+ name="features",
+ description="initialization of feature handler",
+ default=true,
+}
+local function setmode(tfmdata,value)
+ if value then
+ tfmdata.properties.mode=lower(value)
+ end
+end
+local function setlanguage(tfmdata,value)
+ if value then
+ local cleanvalue=lower(value)
+ local languages=otftables and otftables.languages
+ local properties=tfmdata.properties
+ if not languages then
+ properties.language=cleanvalue
+ elseif languages[value] then
+ properties.language=cleanvalue
+ else
+ properties.language="dflt"
+ end
+ end
+end
+local function setscript(tfmdata,value)
+ if value then
+ local cleanvalue=lower(value)
+ local scripts=otftables and otftables.scripts
+ local properties=tfmdata.properties
+ if not scripts then
+ properties.script=cleanvalue
+ elseif scripts[value] then
+ properties.script=cleanvalue
+ else
+ properties.script="dflt"
+ end
+ end
+end
+registerotffeature {
+ name="mode",
+ description="mode",
+ initializers={
+ base=setmode,
+ node=setmode,
+ }
+}
+registerotffeature {
+ name="language",
+ description="language",
+ initializers={
+ base=setlanguage,
+ node=setlanguage,
+ }
+}
+registerotffeature {
+ name="script",
+ description="script",
+ initializers={
+ base=setscript,
+ node=setscript,
+ }
+}
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-otf']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local utfbyte=utf.byte
+local format,gmatch,gsub,find,match,lower,strip=string.format,string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip
+local type,next,tonumber,tostring=type,next,tonumber,tostring
+local abs=math.abs
+local getn=table.getn
+local lpegmatch=lpeg.match
+local reversed,concat,remove=table.reversed,table.concat,table.remove
+local ioflush=io.flush
+local fastcopy,tohash,derivetable=table.fastcopy,table.tohash,table.derive
+local formatters=string.formatters
+local allocate=utilities.storage.allocate
+local registertracker=trackers.register
+local registerdirective=directives.register
+local starttiming=statistics.starttiming
+local stoptiming=statistics.stoptiming
+local elapsedtime=statistics.elapsedtime
+local findbinfile=resolvers.findbinfile
+local trace_private=false registertracker("otf.private",function(v) trace_private=v end)
+local trace_loading=false registertracker("otf.loading",function(v) trace_loading=v end)
+local trace_features=false registertracker("otf.features",function(v) trace_features=v end)
+local trace_dynamics=false registertracker("otf.dynamics",function(v) trace_dynamics=v end)
+local trace_sequences=false registertracker("otf.sequences",function(v) trace_sequences=v end)
+local trace_markwidth=false registertracker("otf.markwidth",function(v) trace_markwidth=v end)
+local trace_defining=false registertracker("fonts.defining",function(v) trace_defining=v end)
+local report_otf=logs.reporter("fonts","otf loading")
+local fonts=fonts
+local otf=fonts.handlers.otf
+otf.glists={ "gsub","gpos" }
+otf.version=2.742
+otf.cache=containers.define("fonts","otf",otf.version,true)
+local fontdata=fonts.hashes.identifiers
+local chardata=characters and characters.data
+local otffeatures=fonts.constructors.newfeatures("otf")
+local registerotffeature=otffeatures.register
+local enhancers=allocate()
+otf.enhancers=enhancers
+local patches={}
+enhancers.patches=patches
+local definers=fonts.definers
+local readers=fonts.readers
+local constructors=fonts.constructors
+local forceload=false
+local cleanup=0
+local usemetatables=false
+local packdata=true
+local syncspace=true
+local forcenotdef=false
+local wildcard="*"
+local default="dflt"
+local fontloaderfields=fontloader.fields
+local mainfields=nil
+local glyphfields=nil
+registerdirective("fonts.otf.loader.cleanup",function(v) cleanup=tonumber(v) or (v and 1) or 0 end)
+registerdirective("fonts.otf.loader.force",function(v) forceload=v end)
+registerdirective("fonts.otf.loader.usemetatables",function(v) usemetatables=v end)
+registerdirective("fonts.otf.loader.pack",function(v) packdata=v end)
+registerdirective("fonts.otf.loader.syncspace",function(v) syncspace=v end)
+registerdirective("fonts.otf.loader.forcenotdef",function(v) forcenotdef=v end)
+local function load_featurefile(raw,featurefile)
+ if featurefile and featurefile~="" then
+ if trace_loading then
+ report_otf("using featurefile %a",featurefile)
+ end
+ fontloader.apply_featurefile(raw,featurefile)
+ end
+end
+local function showfeatureorder(rawdata,filename)
+ local sequences=rawdata.resources.sequences
+ if sequences and #sequences>0 then
+ if trace_loading then
+ report_otf("font %a has %s sequences",filename,#sequences)
+ report_otf(" ")
+ end
+ for nos=1,#sequences do
+ local sequence=sequences[nos]
+ local typ=sequence.type or "no-type"
+ local name=sequence.name or "no-name"
+ local subtables=sequence.subtables or { "no-subtables" }
+ local features=sequence.features
+ if trace_loading then
+ report_otf("%3i %-15s %-20s [% t]",nos,name,typ,subtables)
+ end
+ if features then
+ for feature,scripts in next,features do
+ local tt={}
+ if type(scripts)=="table" then
+ for script,languages in next,scripts do
+ local ttt={}
+ for language,_ in next,languages do
+ ttt[#ttt+1]=language
+ end
+ tt[#tt+1]=formatters["[%s: % t]"](script,ttt)
+ end
+ if trace_loading then
+ report_otf(" %s: % t",feature,tt)
+ end
+ else
+ if trace_loading then
+ report_otf(" %s: %S",feature,scripts)
+ end
+ end
+ end
+ end
+ end
+ if trace_loading then
+ report_otf("\n")
+ end
+ elseif trace_loading then
+ report_otf("font %a has no sequences",filename)
+ end
+end
+local valid_fields=table.tohash {
+ "ascent",
+ "cidinfo",
+ "copyright",
+ "descent",
+ "design_range_bottom",
+ "design_range_top",
+ "design_size",
+ "encodingchanged",
+ "extrema_bound",
+ "familyname",
+ "fontname",
+ "fontname",
+ "fontstyle_id",
+ "fontstyle_name",
+ "fullname",
+ "hasvmetrics",
+ "horiz_base",
+ "issans",
+ "isserif",
+ "italicangle",
+ "macstyle",
+ "onlybitmaps",
+ "origname",
+ "os2_version",
+ "pfminfo",
+ "serifcheck",
+ "sfd_version",
+ "strokedfont",
+ "strokewidth",
+ "table_version",
+ "ttf_tables",
+ "uni_interp",
+ "uniqueid",
+ "units_per_em",
+ "upos",
+ "use_typo_metrics",
+ "uwidth",
+ "version",
+ "vert_base",
+ "weight",
+ "weight_width_slope_only",
+}
+local ordered_enhancers={
+ "prepare tables",
+ "prepare glyphs",
+ "prepare lookups",
+ "analyze glyphs",
+ "analyze math",
+ "prepare tounicode",
+ "reorganize lookups",
+ "reorganize mark classes",
+ "reorganize anchor classes",
+ "reorganize glyph kerns",
+ "reorganize glyph lookups",
+ "reorganize glyph anchors",
+ "merge kern classes",
+ "reorganize features",
+ "reorganize subtables",
+ "check glyphs",
+ "check metadata",
+ "check extra features",
+ "add duplicates",
+ "check encoding",
+ "cleanup tables",
+}
+local actions=allocate()
+local before=allocate()
+local after=allocate()
+patches.before=before
+patches.after=after
+local function enhance(name,data,filename,raw)
+ local enhancer=actions[name]
+ if enhancer then
+ if trace_loading then
+ report_otf("apply enhancement %a to file %a",name,filename)
+ ioflush()
+ end
+ enhancer(data,filename,raw)
+ else
+ end
+end
+function enhancers.apply(data,filename,raw)
+ local basename=file.basename(lower(filename))
+ if trace_loading then
+ report_otf("%s enhancing file %a","start",filename)
+ end
+ ioflush()
+ for e=1,#ordered_enhancers do
+ local enhancer=ordered_enhancers[e]
+ local b=before[enhancer]
+ if b then
+ for pattern,action in next,b do
+ if find(basename,pattern) then
+ action(data,filename,raw)
+ end
+ end
+ end
+ enhance(enhancer,data,filename,raw)
+ local a=after[enhancer]
+ if a then
+ for pattern,action in next,a do
+ if find(basename,pattern) then
+ action(data,filename,raw)
+ end
+ end
+ end
+ ioflush()
+ end
+ if trace_loading then
+ report_otf("%s enhancing file %a","stop",filename)
+ end
+ ioflush()
+end
+function patches.register(what,where,pattern,action)
+ local pw=patches[what]
+ if pw then
+ local ww=pw[where]
+ if ww then
+ ww[pattern]=action
+ else
+ pw[where]={ [pattern]=action}
+ end
+ end
+end
+function patches.report(fmt,...)
+ if trace_loading then
+ report_otf("patching: %s",formatters[fmt](...))
+ end
+end
+function enhancers.register(what,action)
+ actions[what]=action
+end
+function otf.load(filename,format,sub,featurefile)
+ local base=file.basename(file.removesuffix(filename))
+ local name=file.removesuffix(base)
+ local attr=lfs.attributes(filename)
+ local size=attr and attr.size or 0
+ local time=attr and attr.modification or 0
+ if featurefile then
+ name=name.."@"..file.removesuffix(file.basename(featurefile))
+ end
+ if sub=="" then
+ sub=false
+ end
+ local hash=name
+ if sub then
+ hash=hash.."-"..sub
+ end
+ hash=containers.cleanname(hash)
+ local featurefiles
+ if featurefile then
+ featurefiles={}
+ for s in gmatch(featurefile,"[^,]+") do
+ local name=resolvers.findfile(file.addsuffix(s,'fea'),'fea') or ""
+ if name=="" then
+ report_otf("loading error, no featurefile %a",s)
+ else
+ local attr=lfs.attributes(name)
+ featurefiles[#featurefiles+1]={
+ name=name,
+ size=attr and attr.size or 0,
+ time=attr and attr.modification or 0,
+ }
+ end
+ end
+ if #featurefiles==0 then
+ featurefiles=nil
+ end
+ end
+ local data=containers.read(otf.cache,hash)
+ local reload=not data or data.size~=size or data.time~=time
+ if forceload then
+ report_otf("forced reload of %a due to hard coded flag",filename)
+ reload=true
+ end
+ if not reload then
+ local featuredata=data.featuredata
+ if featurefiles then
+ if not featuredata or #featuredata~=#featurefiles then
+ reload=true
+ else
+ for i=1,#featurefiles do
+ local fi,fd=featurefiles[i],featuredata[i]
+ if fi.name~=fd.name or fi.size~=fd.size or fi.time~=fd.time then
+ reload=true
+ break
+ end
+ end
+ end
+ elseif featuredata then
+ reload=true
+ end
+ if reload then
+ report_otf("loading: forced reload due to changed featurefile specification %a",featurefile)
+ end
+ end
+ if reload then
+ report_otf("loading %a, hash %a",filename,hash)
+ local fontdata,messages
+ if sub then
+ fontdata,messages=fontloader.open(filename,sub)
+ else
+ fontdata,messages=fontloader.open(filename)
+ end
+ if fontdata then
+ mainfields=mainfields or (fontloaderfields and fontloaderfields(fontdata))
+ end
+ if trace_loading and messages and #messages>0 then
+ if type(messages)=="string" then
+ report_otf("warning: %s",messages)
+ else
+ for m=1,#messages do
+ report_otf("warning: %S",messages[m])
+ end
+ end
+ else
+ report_otf("loading done")
+ end
+ if fontdata then
+ if featurefiles then
+ for i=1,#featurefiles do
+ load_featurefile(fontdata,featurefiles[i].name)
+ end
+ end
+ local unicodes={
+ }
+ local splitter=lpeg.splitter(" ",unicodes)
+ data={
+ size=size,
+ time=time,
+ format=format,
+ featuredata=featurefiles,
+ resources={
+ filename=resolvers.unresolve(filename),
+ version=otf.version,
+ creator="context mkiv",
+ unicodes=unicodes,
+ indices={
+ },
+ duplicates={
+ },
+ variants={
+ },
+ lookuptypes={},
+ },
+ metadata={
+ },
+ properties={
+ },
+ descriptions={},
+ goodies={},
+ helpers={
+ tounicodelist=splitter,
+ tounicodetable=lpeg.Ct(splitter),
+ },
+ }
+ starttiming(data)
+ report_otf("file size: %s",size)
+ enhancers.apply(data,filename,fontdata)
+ local packtime={}
+ if packdata then
+ if cleanup>0 then
+ collectgarbage("collect")
+ end
+ starttiming(packtime)
+ enhance("pack",data,filename,nil)
+ stoptiming(packtime)
+ end
+ report_otf("saving %a in cache",filename)
+ data=containers.write(otf.cache,hash,data)
+ if cleanup>1 then
+ collectgarbage("collect")
+ end
+ stoptiming(data)
+ if elapsedtime then
+ report_otf("preprocessing and caching time %s, packtime %s",
+ elapsedtime(data),packdata and elapsedtime(packtime) or 0)
+ end
+ fontloader.close(fontdata)
+ if cleanup>3 then
+ collectgarbage("collect")
+ end
+ data=containers.read(otf.cache,hash)
+ if cleanup>2 then
+ collectgarbage("collect")
+ end
+ else
+ data=nil
+ report_otf("loading failed due to read error")
+ end
+ end
+ if data then
+ if trace_defining then
+ report_otf("loading from cache using hash %a",hash)
+ end
+ enhance("unpack",data,filename,nil,false)
+ enhance("add dimensions",data,filename,nil,false)
+ if trace_sequences then
+ showfeatureorder(data,filename)
+ end
+ end
+ return data
+end
+local mt={
+ __index=function(t,k)
+ if k=="height" then
+ local ht=t.boundingbox[4]
+ return ht<0 and 0 or ht
+ elseif k=="depth" then
+ local dp=-t.boundingbox[2]
+ return dp<0 and 0 or dp
+ elseif k=="width" then
+ return 0
+ elseif k=="name" then
+ return forcenotdef and ".notdef"
+ end
+ end
+}
+actions["prepare tables"]=function(data,filename,raw)
+ data.properties.hasitalics=false
+end
+actions["add dimensions"]=function(data,filename)
+ if data then
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local defaultwidth=resources.defaultwidth or 0
+ local defaultheight=resources.defaultheight or 0
+ local defaultdepth=resources.defaultdepth or 0
+ local basename=trace_markwidth and file.basename(filename)
+ if usemetatables then
+ for _,d in next,descriptions do
+ local wd=d.width
+ if not wd then
+ d.width=defaultwidth
+ elseif trace_markwidth and wd~=0 and d.class=="mark" then
+ report_otf("mark %a with width %b found in %a",d.name or "<noname>",wd,basename)
+ end
+ setmetatable(d,mt)
+ end
+ else
+ for _,d in next,descriptions do
+ local bb,wd=d.boundingbox,d.width
+ if not wd then
+ d.width=defaultwidth
+ elseif trace_markwidth and wd~=0 and d.class=="mark" then
+ report_otf("mark %a with width %b found in %a",d.name or "<noname>",wd,basename)
+ end
+ if bb then
+ local ht,dp=bb[4],-bb[2]
+ if ht==0 or ht<0 then
+ else
+ d.height=ht
+ end
+ if dp==0 or dp<0 then
+ else
+ d.depth=dp
+ end
+ end
+ end
+ end
+ end
+end
+local function somecopy(old)
+ if old then
+ local new={}
+ if type(old)=="table" then
+ for k,v in next,old do
+ if k=="glyphs" then
+ elseif type(v)=="table" then
+ new[k]=somecopy(v)
+ else
+ new[k]=v
+ end
+ end
+ else
+ for i=1,#mainfields do
+ local k=mainfields[i]
+ local v=old[k]
+ if k=="glyphs" then
+ elseif type(v)=="table" then
+ new[k]=somecopy(v)
+ else
+ new[k]=v
+ end
+ end
+ end
+ return new
+ else
+ return {}
+ end
+end
+actions["prepare glyphs"]=function(data,filename,raw)
+ local rawglyphs=raw.glyphs
+ local rawsubfonts=raw.subfonts
+ local rawcidinfo=raw.cidinfo
+ local criterium=constructors.privateoffset
+ local private=criterium
+ local resources=data.resources
+ local metadata=data.metadata
+ local properties=data.properties
+ local descriptions=data.descriptions
+ local unicodes=resources.unicodes
+ local indices=resources.indices
+ local duplicates=resources.duplicates
+ local variants=resources.variants
+ if rawsubfonts then
+ metadata.subfonts={}
+ properties.cidinfo=rawcidinfo
+ if rawcidinfo.registry then
+ local cidmap=fonts.cid.getmap(rawcidinfo)
+ if cidmap then
+ rawcidinfo.usedname=cidmap.usedname
+ local nofnames,nofunicodes=0,0
+ local cidunicodes,cidnames=cidmap.unicodes,cidmap.names
+ for cidindex=1,#rawsubfonts do
+ local subfont=rawsubfonts[cidindex]
+ local cidglyphs=subfont.glyphs
+ metadata.subfonts[cidindex]=somecopy(subfont)
+ for index=0,subfont.glyphcnt-1 do
+ local glyph=cidglyphs[index]
+ if glyph then
+ local unicode=glyph.unicode
+ local name=glyph.name or cidnames[index]
+ if not unicode or unicode==-1 or unicode>=criterium then
+ unicode=cidunicodes[index]
+ end
+ if not unicode or unicode==-1 or unicode>=criterium then
+ if not name then
+ name=format("u%06X",private)
+ end
+ unicode=private
+ unicodes[name]=private
+ if trace_private then
+ report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private)
+ end
+ private=private+1
+ nofnames=nofnames+1
+ else
+ if not name then
+ name=format("u%06X",unicode)
+ end
+ unicodes[name]=unicode
+ nofunicodes=nofunicodes+1
+ end
+ indices[index]=unicode
+ local description={
+ boundingbox=glyph.boundingbox,
+ name=glyph.name or name or "unknown",
+ cidindex=cidindex,
+ index=index,
+ glyph=glyph,
+ }
+ descriptions[unicode]=description
+ else
+ end
+ end
+ end
+ if trace_loading then
+ report_otf("cid font remapped, %s unicode points, %s symbolic names, %s glyphs",nofunicodes,nofnames,nofunicodes+nofnames)
+ end
+ elseif trace_loading then
+ report_otf("unable to remap cid font, missing cid file for %a",filename)
+ end
+ elseif trace_loading then
+ report_otf("font %a has no glyphs",filename)
+ end
+ else
+ for index=0,raw.glyphcnt-1 do
+ local glyph=rawglyphs[index]
+ if glyph then
+ local unicode=glyph.unicode
+ local name=glyph.name
+ if not unicode or unicode==-1 or unicode>=criterium then
+ unicode=private
+ unicodes[name]=private
+ if trace_private then
+ report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private)
+ end
+ private=private+1
+ else
+ unicodes[name]=unicode
+ end
+ indices[index]=unicode
+ if not name then
+ name=format("u%06X",unicode)
+ end
+ descriptions[unicode]={
+ boundingbox=glyph.boundingbox,
+ name=name,
+ index=index,
+ glyph=glyph,
+ }
+ local altuni=glyph.altuni
+ if altuni then
+ local d
+ for i=1,#altuni do
+ local a=altuni[i]
+ local u=a.unicode
+ local v=a.variant
+ if v then
+ local vv=variants[v]
+ if vv then
+ vv[u]=unicode
+ else
+ vv={ [u]=unicode }
+ variants[v]=vv
+ end
+ elseif d then
+ d[#d+1]=u
+ else
+ d={ u }
+ end
+ end
+ if d then
+ duplicates[unicode]=d
+ end
+ end
+ else
+ report_otf("potential problem: glyph %U is used but empty",index)
+ end
+ end
+ end
+ resources.private=private
+end
+actions["check encoding"]=function(data,filename,raw)
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local properties=data.properties
+ local unicodes=resources.unicodes
+ local indices=resources.indices
+ local mapdata=raw.map or {}
+ local unicodetoindex=mapdata and mapdata.map or {}
+ local encname=lower(data.enc_name or mapdata.enc_name or "")
+ local criterium=0xFFFF
+ if find(encname,"unicode") then
+ if trace_loading then
+ report_otf("checking embedded unicode map %a",encname)
+ end
+ for unicode,index in next,unicodetoindex do
+ if unicode<=criterium and not descriptions[unicode] then
+ local parent=indices[index]
+ if parent then
+ report_otf("weird, unicode %U points to %U with index %H",unicode,parent,index)
+ else
+ report_otf("weird, unicode %U points to nowhere with index %H",unicode,index)
+ end
+ end
+ end
+ elseif properties.cidinfo then
+ report_otf("warning: no unicode map, used cidmap %a",properties.cidinfo.usedname)
+ else
+ report_otf("warning: non unicode map %a, only using glyph unicode data",encname or "whatever")
+ end
+ if mapdata then
+ mapdata.map={}
+ end
+end
+actions["add duplicates"]=function(data,filename,raw)
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local properties=data.properties
+ local unicodes=resources.unicodes
+ local indices=resources.indices
+ local duplicates=resources.duplicates
+ for unicode,d in next,duplicates do
+ for i=1,#d do
+ local u=d[i]
+ if not descriptions[u] then
+ local description=descriptions[unicode]
+ local duplicate=table.copy(description)
+ duplicate.comment=format("copy of U+%05X",unicode)
+ descriptions[u]=duplicate
+ local n=0
+ for _,description in next,descriptions do
+ if kerns then
+ local kerns=description.kerns
+ for _,k in next,kerns do
+ local ku=k[unicode]
+ if ku then
+ k[u]=ku
+ n=n+1
+ end
+ end
+ end
+ end
+ if trace_loading then
+ report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n)
+ end
+ end
+ end
+ end
+end
+actions["analyze glyphs"]=function(data,filename,raw)
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local metadata=data.metadata
+ local properties=data.properties
+ local hasitalics=false
+ local widths={}
+ local marks={}
+ for unicode,description in next,descriptions do
+ local glyph=description.glyph
+ local italic=glyph.italic_correction
+ if not italic then
+ elseif italic==0 then
+ else
+ description.italic=italic
+ hasitalics=true
+ end
+ local width=glyph.width
+ widths[width]=(widths[width] or 0)+1
+ local class=glyph.class
+ if class then
+ if class=="mark" then
+ marks[unicode]=true
+ end
+ description.class=class
+ end
+ end
+ properties.hasitalics=hasitalics
+ resources.marks=marks
+ local wd,most=0,1
+ for k,v in next,widths do
+ if v>most then
+ wd,most=k,v
+ end
+ end
+ if most>1000 then
+ if trace_loading then
+ report_otf("most common width: %s (%s times), sharing (cjk font)",wd,most)
+ end
+ for unicode,description in next,descriptions do
+ if description.width==wd then
+ else
+ description.width=description.glyph.width
+ end
+ end
+ resources.defaultwidth=wd
+ else
+ for unicode,description in next,descriptions do
+ description.width=description.glyph.width
+ end
+ end
+end
+actions["reorganize mark classes"]=function(data,filename,raw)
+ local mark_classes=raw.mark_classes
+ if mark_classes then
+ local resources=data.resources
+ local unicodes=resources.unicodes
+ local markclasses={}
+ resources.markclasses=markclasses
+ for name,class in next,mark_classes do
+ local t={}
+ for s in gmatch(class,"[^ ]+") do
+ t[unicodes[s]]=true
+ end
+ markclasses[name]=t
+ end
+ end
+end
+actions["reorganize features"]=function(data,filename,raw)
+ local features={}
+ data.resources.features=features
+ for k,what in next,otf.glists do
+ local dw=raw[what]
+ if dw then
+ local f={}
+ features[what]=f
+ for i=1,#dw do
+ local d=dw[i]
+ local dfeatures=d.features
+ if dfeatures then
+ for i=1,#dfeatures do
+ local df=dfeatures[i]
+ local tag=strip(lower(df.tag))
+ local ft=f[tag]
+ if not ft then
+ ft={}
+ f[tag]=ft
+ end
+ local dscripts=df.scripts
+ for i=1,#dscripts do
+ local d=dscripts[i]
+ local languages=d.langs
+ local script=strip(lower(d.script))
+ local fts=ft[script] if not fts then fts={} ft[script]=fts end
+ for i=1,#languages do
+ fts[strip(lower(languages[i]))]=true
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
+actions["reorganize anchor classes"]=function(data,filename,raw)
+ local resources=data.resources
+ local anchor_to_lookup={}
+ local lookup_to_anchor={}
+ resources.anchor_to_lookup=anchor_to_lookup
+ resources.lookup_to_anchor=lookup_to_anchor
+ local classes=raw.anchor_classes
+ if classes then
+ for c=1,#classes do
+ local class=classes[c]
+ local anchor=class.name
+ local lookups=class.lookup
+ if type(lookups)~="table" then
+ lookups={ lookups }
+ end
+ local a=anchor_to_lookup[anchor]
+ if not a then
+ a={}
+ anchor_to_lookup[anchor]=a
+ end
+ for l=1,#lookups do
+ local lookup=lookups[l]
+ local l=lookup_to_anchor[lookup]
+ if l then
+ l[anchor]=true
+ else
+ l={ [anchor]=true }
+ lookup_to_anchor[lookup]=l
+ end
+ a[lookup]=true
+ end
+ end
+ end
+end
+actions["prepare tounicode"]=function(data,filename,raw)
+ fonts.mappings.addtounicode(data,filename)
+end
+local g_directions={
+ gsub_contextchain=1,
+ gpos_contextchain=1,
+ gsub_reversecontextchain=-1,
+ gpos_reversecontextchain=-1,
+}
+local function supported(features)
+ for i=1,#features do
+ if features[i].ismac then
+ return false
+ end
+ end
+ return true
+end
+actions["reorganize subtables"]=function(data,filename,raw)
+ local resources=data.resources
+ local sequences={}
+ local lookups={}
+ local chainedfeatures={}
+ resources.sequences=sequences
+ resources.lookups=lookups
+ for _,what in next,otf.glists do
+ local dw=raw[what]
+ if dw then
+ for k=1,#dw do
+ local gk=dw[k]
+ local features=gk.features
+ if not features or supported(features) then
+ local typ=gk.type
+ local chain=g_directions[typ] or 0
+ local subtables=gk.subtables
+ if subtables then
+ local t={}
+ for s=1,#subtables do
+ t[s]=subtables[s].name
+ end
+ subtables=t
+ end
+ local flags,markclass=gk.flags,nil
+ if flags then
+ local t={
+ (flags.ignorecombiningmarks and "mark") or false,
+ (flags.ignoreligatures and "ligature") or false,
+ (flags.ignorebaseglyphs and "base") or false,
+ flags.r2l or false,
+ }
+ markclass=flags.mark_class
+ if markclass then
+ markclass=resources.markclasses[markclass]
+ end
+ flags=t
+ end
+ local name=gk.name
+ if not name then
+ report_otf("skipping weird lookup number %s",k)
+ elseif features then
+ local f={}
+ for i=1,#features do
+ local df=features[i]
+ local tag=strip(lower(df.tag))
+ local ft=f[tag] if not ft then ft={} f[tag]=ft end
+ local dscripts=df.scripts
+ for i=1,#dscripts do
+ local d=dscripts[i]
+ local languages=d.langs
+ local script=strip(lower(d.script))
+ local fts=ft[script] if not fts then fts={} ft[script]=fts end
+ for i=1,#languages do
+ fts[strip(lower(languages[i]))]=true
+ end
+ end
+ end
+ sequences[#sequences+1]={
+ type=typ,
+ chain=chain,
+ flags=flags,
+ name=name,
+ subtables=subtables,
+ markclass=markclass,
+ features=f,
+ }
+ else
+ lookups[name]={
+ type=typ,
+ chain=chain,
+ flags=flags,
+ subtables=subtables,
+ markclass=markclass,
+ }
+ end
+ end
+ end
+ end
+ end
+end
+actions["prepare lookups"]=function(data,filename,raw)
+ local lookups=raw.lookups
+ if lookups then
+ data.lookups=lookups
+ end
+end
+local function t_uncover(splitter,cache,covers)
+ local result={}
+ for n=1,#covers do
+ local cover=covers[n]
+ local uncovered=cache[cover]
+ if not uncovered then
+ uncovered=lpegmatch(splitter,cover)
+ cache[cover]=uncovered
+ end
+ result[n]=uncovered
+ end
+ return result
+end
+local function s_uncover(splitter,cache,cover)
+ if cover=="" then
+ return nil
+ else
+ local uncovered=cache[cover]
+ if not uncovered then
+ uncovered=lpegmatch(splitter,cover)
+ cache[cover]=uncovered
+ end
+ return { uncovered }
+ end
+end
+local function t_hashed(t,cache)
+ if t then
+ local ht={}
+ for i=1,#t do
+ local ti=t[i]
+ local tih=cache[ti]
+ if not tih then
+ tih={}
+ for i=1,#ti do
+ tih[ti[i]]=true
+ end
+ cache[ti]=tih
+ end
+ ht[i]=tih
+ end
+ return ht
+ else
+ return nil
+ end
+end
+local function s_hashed(t,cache)
+ if t then
+ local ht={}
+ local tf=t[1]
+ for i=1,#tf do
+ ht[i]={ [tf[i]]=true }
+ end
+ return ht
+ else
+ return nil
+ end
+end
+local function r_uncover(splitter,cache,cover,replacements)
+ if cover=="" then
+ return nil
+ else
+ local uncovered=cover[1]
+ local replaced=cache[replacements]
+ if not replaced then
+ replaced=lpegmatch(splitter,replacements)
+ cache[replacements]=replaced
+ end
+ local nu,nr=#uncovered,#replaced
+ local r={}
+ if nu==nr then
+ for i=1,nu do
+ r[uncovered[i]]=replaced[i]
+ end
+ end
+ return r
+ end
+end
+actions["reorganize lookups"]=function(data,filename,raw)
+ if data.lookups then
+ local splitter=data.helpers.tounicodetable
+ local t_u_cache={}
+ local s_u_cache=t_u_cache
+ local t_h_cache={}
+ local s_h_cache=t_h_cache
+ local r_u_cache={}
+ for _,lookup in next,data.lookups do
+ local rules=lookup.rules
+ if rules then
+ local format=lookup.format
+ if format=="class" then
+ local before_class=lookup.before_class
+ if before_class then
+ before_class=t_uncover(splitter,t_u_cache,reversed(before_class))
+ end
+ local current_class=lookup.current_class
+ if current_class then
+ current_class=t_uncover(splitter,t_u_cache,current_class)
+ end
+ local after_class=lookup.after_class
+ if after_class then
+ after_class=t_uncover(splitter,t_u_cache,after_class)
+ end
+ for i=1,#rules do
+ local rule=rules[i]
+ local class=rule.class
+ local before=class.before
+ if before then
+ for i=1,#before do
+ before[i]=before_class[before[i]] or {}
+ end
+ rule.before=t_hashed(before,t_h_cache)
+ end
+ local current=class.current
+ local lookups=rule.lookups
+ if current then
+ for i=1,#current do
+ current[i]=current_class[current[i]] or {}
+ if lookups and not lookups[i] then
+ lookups[i]=""
+ end
+ end
+ rule.current=t_hashed(current,t_h_cache)
+ end
+ local after=class.after
+ if after then
+ for i=1,#after do
+ after[i]=after_class[after[i]] or {}
+ end
+ rule.after=t_hashed(after,t_h_cache)
+ end
+ rule.class=nil
+ end
+ lookup.before_class=nil
+ lookup.current_class=nil
+ lookup.after_class=nil
+ lookup.format="coverage"
+ elseif format=="coverage" then
+ for i=1,#rules do
+ local rule=rules[i]
+ local coverage=rule.coverage
+ if coverage then
+ local before=coverage.before
+ if before then
+ before=t_uncover(splitter,t_u_cache,reversed(before))
+ rule.before=t_hashed(before,t_h_cache)
+ end
+ local current=coverage.current
+ if current then
+ current=t_uncover(splitter,t_u_cache,current)
+ rule.current=t_hashed(current,t_h_cache)
+ end
+ local after=coverage.after
+ if after then
+ after=t_uncover(splitter,t_u_cache,after)
+ rule.after=t_hashed(after,t_h_cache)
+ end
+ rule.coverage=nil
+ end
+ end
+ elseif format=="reversecoverage" then
+ for i=1,#rules do
+ local rule=rules[i]
+ local reversecoverage=rule.reversecoverage
+ if reversecoverage then
+ local before=reversecoverage.before
+ if before then
+ before=t_uncover(splitter,t_u_cache,reversed(before))
+ rule.before=t_hashed(before,t_h_cache)
+ end
+ local current=reversecoverage.current
+ if current then
+ current=t_uncover(splitter,t_u_cache,current)
+ rule.current=t_hashed(current,t_h_cache)
+ end
+ local after=reversecoverage.after
+ if after then
+ after=t_uncover(splitter,t_u_cache,after)
+ rule.after=t_hashed(after,t_h_cache)
+ end
+ local replacements=reversecoverage.replacements
+ if replacements then
+ rule.replacements=r_uncover(splitter,r_u_cache,current,replacements)
+ end
+ rule.reversecoverage=nil
+ end
+ end
+ elseif format=="glyphs" then
+ for i=1,#rules do
+ local rule=rules[i]
+ local glyphs=rule.glyphs
+ if glyphs then
+ local fore=glyphs.fore
+ if fore and fore~="" then
+ fore=s_uncover(splitter,s_u_cache,fore)
+ rule.before=s_hashed(fore,s_h_cache)
+ end
+ local back=glyphs.back
+ if back then
+ back=s_uncover(splitter,s_u_cache,back)
+ rule.after=s_hashed(back,s_h_cache)
+ end
+ local names=glyphs.names
+ if names then
+ names=s_uncover(splitter,s_u_cache,names)
+ rule.current=s_hashed(names,s_h_cache)
+ end
+ rule.glyphs=nil
+ end
+ end
+ end
+ end
+ end
+ end
+end
+local function check_variants(unicode,the_variants,splitter,unicodes)
+ local variants=the_variants.variants
+ if variants then
+ local glyphs=lpegmatch(splitter,variants)
+ local done={ [unicode]=true }
+ local n=0
+ for i=1,#glyphs do
+ local g=glyphs[i]
+ if done[g] then
+ report_otf("skipping cyclic reference %U in math variant %U",g,unicode)
+ else
+ if n==0 then
+ n=1
+ variants={ g }
+ else
+ n=n+1
+ variants[n]=g
+ end
+ done[g]=true
+ end
+ end
+ if n==0 then
+ variants=nil
+ end
+ end
+ local parts=the_variants.parts
+ if parts then
+ local p=#parts
+ if p>0 then
+ for i=1,p do
+ local pi=parts[i]
+ pi.glyph=unicodes[pi.component] or 0
+ pi.component=nil
+ end
+ else
+ parts=nil
+ end
+ end
+ local italic_correction=the_variants.italic_correction
+ if italic_correction and italic_correction==0 then
+ italic_correction=nil
+ end
+ return variants,parts,italic_correction
+end
+actions["analyze math"]=function(data,filename,raw)
+ if raw.math then
+ data.metadata.math=raw.math
+ local unicodes=data.resources.unicodes
+ local splitter=data.helpers.tounicodetable
+ for unicode,description in next,data.descriptions do
+ local glyph=description.glyph
+ local mathkerns=glyph.mathkern
+ local horiz_variants=glyph.horiz_variants
+ local vert_variants=glyph.vert_variants
+ local top_accent=glyph.top_accent
+ if mathkerns or horiz_variants or vert_variants or top_accent then
+ local math={}
+ if top_accent then
+ math.top_accent=top_accent
+ end
+ if mathkerns then
+ for k,v in next,mathkerns do
+ if not next(v) then
+ mathkerns[k]=nil
+ else
+ for k,v in next,v do
+ if v==0 then
+ k[v]=nil
+ end
+ end
+ end
+ end
+ math.kerns=mathkerns
+ end
+ if horiz_variants then
+ math.horiz_variants,math.horiz_parts,math.horiz_italic_correction=check_variants(unicode,horiz_variants,splitter,unicodes)
+ end
+ if vert_variants then
+ math.vert_variants,math.vert_parts,math.vert_italic_correction=check_variants(unicode,vert_variants,splitter,unicodes)
+ end
+ local italic_correction=description.italic
+ if italic_correction and italic_correction~=0 then
+ math.italic_correction=italic_correction
+ end
+ description.math=math
+ end
+ end
+ end
+end
+actions["reorganize glyph kerns"]=function(data,filename,raw)
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local unicodes=resources.unicodes
+ for unicode,description in next,descriptions do
+ local kerns=description.glyph.kerns
+ if kerns then
+ local newkerns={}
+ for k,kern in next,kerns do
+ local name=kern.char
+ local offset=kern.off
+ local lookup=kern.lookup
+ if name and offset and lookup then
+ local unicode=unicodes[name]
+ if unicode then
+ if type(lookup)=="table" then
+ for l=1,#lookup do
+ local lookup=lookup[l]
+ local lookupkerns=newkerns[lookup]
+ if lookupkerns then
+ lookupkerns[unicode]=offset
+ else
+ newkerns[lookup]={ [unicode]=offset }
+ end
+ end
+ else
+ local lookupkerns=newkerns[lookup]
+ if lookupkerns then
+ lookupkerns[unicode]=offset
+ else
+ newkerns[lookup]={ [unicode]=offset }
+ end
+ end
+ elseif trace_loading then
+ report_otf("problems with unicode %a of kern %a of glyph %U",name,k,unicode)
+ end
+ end
+ end
+ description.kerns=newkerns
+ end
+ end
+end
+actions["merge kern classes"]=function(data,filename,raw)
+ local gposlist=raw.gpos
+ if gposlist then
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local unicodes=resources.unicodes
+ local splitter=data.helpers.tounicodetable
+ for gp=1,#gposlist do
+ local gpos=gposlist[gp]
+ local subtables=gpos.subtables
+ if subtables then
+ for s=1,#subtables do
+ local subtable=subtables[s]
+ local kernclass=subtable.kernclass
+ if kernclass then
+ local split={}
+ for k=1,#kernclass do
+ local kcl=kernclass[k]
+ local firsts=kcl.firsts
+ local seconds=kcl.seconds
+ local offsets=kcl.offsets
+ local lookups=kcl.lookup
+ if type(lookups)~="table" then
+ lookups={ lookups }
+ end
+ for n,s in next,firsts do
+ split[s]=split[s] or lpegmatch(splitter,s)
+ end
+ local maxseconds=0
+ for n,s in next,seconds do
+ if n>maxseconds then
+ maxseconds=n
+ end
+ split[s]=split[s] or lpegmatch(splitter,s)
+ end
+ for l=1,#lookups do
+ local lookup=lookups[l]
+ for fk=1,#firsts do
+ local fv=firsts[fk]
+ local splt=split[fv]
+ if splt then
+ local extrakerns={}
+ local baseoffset=(fk-1)*maxseconds
+ for sk=2,maxseconds do
+ local sv=seconds[sk]
+ local splt=split[sv]
+ if splt then
+ local offset=offsets[baseoffset+sk]
+ if offset then
+ for i=1,#splt do
+ extrakerns[splt[i]]=offset
+ end
+ end
+ end
+ end
+ for i=1,#splt do
+ local first_unicode=splt[i]
+ local description=descriptions[first_unicode]
+ if description then
+ local kerns=description.kerns
+ if not kerns then
+ kerns={}
+ description.kerns=kerns
+ end
+ local lookupkerns=kerns[lookup]
+ if not lookupkerns then
+ lookupkerns={}
+ kerns[lookup]=lookupkerns
+ end
+ for second_unicode,kern in next,extrakerns do
+ lookupkerns[second_unicode]=kern
+ end
+ elseif trace_loading then
+ report_otf("no glyph data for %U",first_unicode)
+ end
+ end
+ end
+ end
+ end
+ end
+ subtable.kernclass={}
+ end
+ end
+ end
+ end
+ end
+end
+actions["check glyphs"]=function(data,filename,raw)
+ for unicode,description in next,data.descriptions do
+ description.glyph=nil
+ end
+end
+actions["check metadata"]=function(data,filename,raw)
+ local metadata=data.metadata
+ for _,k in next,mainfields do
+ if valid_fields[k] then
+ local v=raw[k]
+ if not metadata[k] then
+ metadata[k]=v
+ end
+ end
+ end
+ local ttftables=metadata.ttf_tables
+ if ttftables then
+ for i=1,#ttftables do
+ ttftables[i].data="deleted"
+ end
+ end
+end
+actions["cleanup tables"]=function(data,filename,raw)
+ data.resources.indices=nil
+ data.helpers=nil
+end
+actions["reorganize glyph lookups"]=function(data,filename,raw)
+ local resources=data.resources
+ local unicodes=resources.unicodes
+ local descriptions=data.descriptions
+ local splitter=data.helpers.tounicodelist
+ local lookuptypes=resources.lookuptypes
+ for unicode,description in next,descriptions do
+ local lookups=description.glyph.lookups
+ if lookups then
+ for tag,lookuplist in next,lookups do
+ for l=1,#lookuplist do
+ local lookup=lookuplist[l]
+ local specification=lookup.specification
+ local lookuptype=lookup.type
+ local lt=lookuptypes[tag]
+ if not lt then
+ lookuptypes[tag]=lookuptype
+ elseif lt~=lookuptype then
+ report_otf("conflicting lookuptypes, %a points to %a and %a",tag,lt,lookuptype)
+ end
+ if lookuptype=="ligature" then
+ lookuplist[l]={ lpegmatch(splitter,specification.components) }
+ elseif lookuptype=="alternate" then
+ lookuplist[l]={ lpegmatch(splitter,specification.components) }
+ elseif lookuptype=="substitution" then
+ lookuplist[l]=unicodes[specification.variant]
+ elseif lookuptype=="multiple" then
+ lookuplist[l]={ lpegmatch(splitter,specification.components) }
+ elseif lookuptype=="position" then
+ lookuplist[l]={
+ specification.x or 0,
+ specification.y or 0,
+ specification.h or 0,
+ specification.v or 0
+ }
+ elseif lookuptype=="pair" then
+ local one=specification.offsets[1]
+ local two=specification.offsets[2]
+ local paired=unicodes[specification.paired]
+ if one then
+ if two then
+ lookuplist[l]={ paired,{ one.x or 0,one.y or 0,one.h or 0,one.v or 0 },{ two.x or 0,two.y or 0,two.h or 0,two.v or 0 } }
+ else
+ lookuplist[l]={ paired,{ one.x or 0,one.y or 0,one.h or 0,one.v or 0 } }
+ end
+ else
+ if two then
+ lookuplist[l]={ paired,{},{ two.x or 0,two.y or 0,two.h or 0,two.v or 0} }
+ else
+ lookuplist[l]={ paired }
+ end
+ end
+ end
+ end
+ end
+ local slookups,mlookups
+ for tag,lookuplist in next,lookups do
+ if #lookuplist==1 then
+ if slookups then
+ slookups[tag]=lookuplist[1]
+ else
+ slookups={ [tag]=lookuplist[1] }
+ end
+ else
+ if mlookups then
+ mlookups[tag]=lookuplist
+ else
+ mlookups={ [tag]=lookuplist }
+ end
+ end
+ end
+ if slookups then
+ description.slookups=slookups
+ end
+ if mlookups then
+ description.mlookups=mlookups
+ end
+ end
+ end
+end
+actions["reorganize glyph anchors"]=function(data,filename,raw)
+ local descriptions=data.descriptions
+ for unicode,description in next,descriptions do
+ local anchors=description.glyph.anchors
+ if anchors then
+ for class,data in next,anchors do
+ if class=="baselig" then
+ for tag,specification in next,data do
+ for i=1,#specification do
+ local si=specification[i]
+ specification[i]={ si.x or 0,si.y or 0 }
+ end
+ end
+ else
+ for tag,specification in next,data do
+ data[tag]={ specification.x or 0,specification.y or 0 }
+ end
+ end
+ end
+ description.anchors=anchors
+ end
+ end
+end
+function otf.setfeatures(tfmdata,features)
+ local okay=constructors.initializefeatures("otf",tfmdata,features,trace_features,report_otf)
+ if okay then
+ return constructors.collectprocessors("otf",tfmdata,features,trace_features,report_otf)
+ else
+ return {}
+ end
+end
+local function copytotfm(data,cache_id)
+ if data then
+ local metadata=data.metadata
+ local resources=data.resources
+ local properties=derivetable(data.properties)
+ local descriptions=derivetable(data.descriptions)
+ local goodies=derivetable(data.goodies)
+ local characters={}
+ local parameters={}
+ local mathparameters={}
+ local pfminfo=metadata.pfminfo or {}
+ local resources=data.resources
+ local unicodes=resources.unicodes
+ local spaceunits=500
+ local spacer="space"
+ local designsize=metadata.designsize or metadata.design_size or 100
+ local mathspecs=metadata.math
+ if designsize==0 then
+ designsize=100
+ end
+ if mathspecs then
+ for name,value in next,mathspecs do
+ mathparameters[name]=value
+ end
+ end
+ for unicode,_ in next,data.descriptions do
+ characters[unicode]={}
+ end
+ if mathspecs then
+ for unicode,character in next,characters do
+ local d=descriptions[unicode]
+ local m=d.math
+ if m then
+ local variants=m.horiz_variants
+ local parts=m.horiz_parts
+ if variants then
+ local c=character
+ for i=1,#variants do
+ local un=variants[i]
+ c.next=un
+ c=characters[un]
+ end
+ c.horiz_variants=parts
+ elseif parts then
+ character.horiz_variants=parts
+ end
+ local variants=m.vert_variants
+ local parts=m.vert_parts
+ if variants then
+ local c=character
+ for i=1,#variants do
+ local un=variants[i]
+ c.next=un
+ c=characters[un]
+ end
+ c.vert_variants=parts
+ elseif parts then
+ character.vert_variants=parts
+ end
+ local italic_correction=m.vert_italic_correction
+ if italic_correction then
+ character.vert_italic_correction=italic_correction
+ end
+ local top_accent=m.top_accent
+ if top_accent then
+ character.top_accent=top_accent
+ end
+ local kerns=m.kerns
+ if kerns then
+ character.mathkerns=kerns
+ end
+ end
+ end
+ end
+ local monospaced=metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose.proportion=="Monospaced")
+ local charwidth=pfminfo.avgwidth
+ local italicangle=metadata.italicangle
+ local charxheight=pfminfo.os2_xheight and pfminfo.os2_xheight>0 and pfminfo.os2_xheight
+ properties.monospaced=monospaced
+ parameters.italicangle=italicangle
+ parameters.charwidth=charwidth
+ parameters.charxheight=charxheight
+ local space=0x0020
+ local emdash=0x2014
+ if monospaced then
+ if descriptions[space] then
+ spaceunits,spacer=descriptions[space].width,"space"
+ end
+ if not spaceunits and descriptions[emdash] then
+ spaceunits,spacer=descriptions[emdash].width,"emdash"
+ end
+ if not spaceunits and charwidth then
+ spaceunits,spacer=charwidth,"charwidth"
+ end
+ else
+ if descriptions[space] then
+ spaceunits,spacer=descriptions[space].width,"space"
+ end
+ if not spaceunits and descriptions[emdash] then
+ spaceunits,spacer=descriptions[emdash].width/2,"emdash/2"
+ end
+ if not spaceunits and charwidth then
+ spaceunits,spacer=charwidth,"charwidth"
+ end
+ end
+ spaceunits=tonumber(spaceunits) or 500
+ local filename=constructors.checkedfilename(resources)
+ local fontname=metadata.fontname
+ local fullname=metadata.fullname or fontname
+ local units=metadata.units_per_em or 1000
+ if units==0 then
+ units=1000
+ metadata.units_per_em=1000
+ end
+ parameters.slant=0
+ parameters.space=spaceunits
+ parameters.space_stretch=units/2
+ parameters.space_shrink=1*units/3
+ parameters.x_height=2*units/5
+ parameters.quad=units
+ if spaceunits<2*units/5 then
+ end
+ if italicangle then
+ parameters.italicangle=italicangle
+ parameters.italicfactor=math.cos(math.rad(90+italicangle))
+ parameters.slant=- math.round(math.tan(italicangle*math.pi/180))
+ end
+ if monospaced then
+ parameters.space_stretch=0
+ parameters.space_shrink=0
+ elseif syncspace then
+ parameters.space_stretch=spaceunits/2
+ parameters.space_shrink=spaceunits/3
+ end
+ parameters.extra_space=parameters.space_shrink
+ if charxheight then
+ parameters.x_height=charxheight
+ else
+ local x=0x78
+ if x then
+ local x=descriptions[x]
+ if x then
+ parameters.x_height=x.height
+ end
+ end
+ end
+ parameters.designsize=(designsize/10)*65536
+ parameters.ascender=abs(metadata.ascent or 0)
+ parameters.descender=abs(metadata.descent or 0)
+ parameters.units=units
+ properties.space=spacer
+ properties.encodingbytes=2
+ properties.format=data.format or fonts.formats[filename] or "opentype"
+ properties.noglyphnames=true
+ properties.filename=filename
+ properties.fontname=fontname
+ properties.fullname=fullname
+ properties.psname=fontname or fullname
+ properties.name=filename or fullname
+ return {
+ characters=characters,
+ descriptions=descriptions,
+ parameters=parameters,
+ mathparameters=mathparameters,
+ resources=resources,
+ properties=properties,
+ goodies=goodies,
+ }
+ end
+end
+local function otftotfm(specification)
+ local cache_id=specification.hash
+ local tfmdata=containers.read(constructors.cache,cache_id)
+ if not tfmdata then
+ local name=specification.name
+ local sub=specification.sub
+ local filename=specification.filename
+ local format=specification.format
+ local features=specification.features.normal
+ local rawdata=otf.load(filename,format,sub,features and features.featurefile)
+ if rawdata and next(rawdata) then
+ rawdata.lookuphash={}
+ tfmdata=copytotfm(rawdata,cache_id)
+ if tfmdata and next(tfmdata) then
+ local features=constructors.checkedfeatures("otf",features)
+ local shared=tfmdata.shared
+ if not shared then
+ shared={}
+ tfmdata.shared=shared
+ end
+ shared.rawdata=rawdata
+ shared.dynamics={}
+ tfmdata.changed={}
+ shared.features=features
+ shared.processes=otf.setfeatures(tfmdata,features)
+ end
+ end
+ containers.write(constructors.cache,cache_id,tfmdata)
+ end
+ return tfmdata
+end
+local function read_from_otf(specification)
+ local tfmdata=otftotfm(specification)
+ if tfmdata then
+ tfmdata.properties.name=specification.name
+ tfmdata.properties.sub=specification.sub
+ tfmdata=constructors.scale(tfmdata,specification)
+ local allfeatures=tfmdata.shared.features or specification.features.normal
+ constructors.applymanipulators("otf",tfmdata,allfeatures,trace_features,report_otf)
+ constructors.setname(tfmdata,specification)
+ fonts.loggers.register(tfmdata,file.suffix(specification.filename),specification)
+ end
+ return tfmdata
+end
+local function checkmathsize(tfmdata,mathsize)
+ local mathdata=tfmdata.shared.rawdata.metadata.math
+ local mathsize=tonumber(mathsize)
+ if mathdata then
+ local parameters=tfmdata.parameters
+ parameters.scriptpercentage=mathdata.ScriptPercentScaleDown
+ parameters.scriptscriptpercentage=mathdata.ScriptScriptPercentScaleDown
+ parameters.mathsize=mathsize
+ end
+end
+registerotffeature {
+ name="mathsize",
+ description="apply mathsize as specified in the font",
+ initializers={
+ base=checkmathsize,
+ node=checkmathsize,
+ }
+}
+function otf.collectlookups(rawdata,kind,script,language)
+ local sequences=rawdata.resources.sequences
+ if sequences then
+ local featuremap,featurelist={},{}
+ for s=1,#sequences do
+ local sequence=sequences[s]
+ local features=sequence.features
+ features=features and features[kind]
+ features=features and (features[script] or features[default] or features[wildcard])
+ features=features and (features[language] or features[default] or features[wildcard])
+ if features then
+ local subtables=sequence.subtables
+ if subtables then
+ for s=1,#subtables do
+ local ss=subtables[s]
+ if not featuremap[s] then
+ featuremap[ss]=true
+ featurelist[#featurelist+1]=ss
+ end
+ end
+ end
+ end
+ end
+ if #featurelist>0 then
+ return featuremap,featurelist
+ end
+ end
+ return nil,nil
+end
+local function check_otf(forced,specification,suffix,what)
+ local name=specification.name
+ if forced then
+ name=file.addsuffix(name,suffix,true)
+ end
+ local fullname=findbinfile(name,suffix) or ""
+ if fullname=="" then
+ fullname=fonts.names.getfilename(name,suffix) or ""
+ end
+ if fullname~="" then
+ specification.filename=fullname
+ specification.format=what
+ return read_from_otf(specification)
+ end
+end
+local function opentypereader(specification,suffix,what)
+ local forced=specification.forced or ""
+ if forced=="otf" then
+ return check_otf(true,specification,forced,"opentype")
+ elseif forced=="ttf" or forced=="ttc" or forced=="dfont" then
+ return check_otf(true,specification,forced,"truetype")
+ else
+ return check_otf(false,specification,suffix,what)
+ end
+end
+readers.opentype=opentypereader
+local formats=fonts.formats
+formats.otf="opentype"
+formats.ttf="truetype"
+formats.ttc="truetype"
+formats.dfont="truetype"
+function readers.otf (specification) return opentypereader(specification,"otf",formats.otf ) end
+function readers.ttf (specification) return opentypereader(specification,"ttf",formats.ttf ) end
+function readers.ttc (specification) return opentypereader(specification,"ttf",formats.ttc ) end
+function readers.dfont(specification) return opentypereader(specification,"ttf",formats.dfont) end
+function otf.scriptandlanguage(tfmdata,attr)
+ local properties=tfmdata.properties
+ return properties.script or "dflt",properties.language or "dflt"
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-otb']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local concat=table.concat
+local format,gmatch,gsub,find,match,lower,strip=string.format,string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip
+local type,next,tonumber,tostring=type,next,tonumber,tostring
+local lpegmatch=lpeg.match
+local utfchar=utf.char
+local trace_baseinit=false trackers.register("otf.baseinit",function(v) trace_baseinit=v end)
+local trace_singles=false trackers.register("otf.singles",function(v) trace_singles=v end)
+local trace_multiples=false trackers.register("otf.multiples",function(v) trace_multiples=v end)
+local trace_alternatives=false trackers.register("otf.alternatives",function(v) trace_alternatives=v end)
+local trace_ligatures=false trackers.register("otf.ligatures",function(v) trace_ligatures=v end)
+local trace_ligatures_detail=false trackers.register("otf.ligatures.detail",function(v) trace_ligatures_detail=v end)
+local trace_kerns=false trackers.register("otf.kerns",function(v) trace_kerns=v end)
+local trace_preparing=false trackers.register("otf.preparing",function(v) trace_preparing=v end)
+local report_prepare=logs.reporter("fonts","otf prepare")
+local fonts=fonts
+local otf=fonts.handlers.otf
+local otffeatures=otf.features
+local registerotffeature=otffeatures.register
+otf.defaultbasealternate="none"
+local wildcard="*"
+local default="dflt"
+local formatters=string.formatters
+local f_unicode=formatters["%U"]
+local f_uniname=formatters["%U (%s)"]
+local f_unilist=formatters["% t (% t)"]
+local function gref(descriptions,n)
+ if type(n)=="number" then
+ local name=descriptions[n].name
+ if name then
+ return f_uniname(n,name)
+ else
+ return f_unicode(n)
+ end
+ elseif n then
+ local num,nam={},{}
+ for i=2,#n do
+ local ni=n[i]
+ if tonumber(ni) then
+ local di=descriptions[ni]
+ num[i]=f_unicode(ni)
+ nam[i]=di and di.name or "-"
+ end
+ end
+ return f_unilist(num,nam)
+ else
+ return "<error in base mode tracing>"
+ end
+end
+local function cref(feature,lookupname)
+ if lookupname then
+ return formatters["feature %a, lookup %a"](feature,lookupname)
+ else
+ return formatters["feature %a"](feature)
+ end
+end
+local function report_alternate(feature,lookupname,descriptions,unicode,replacement,value,comment)
+ report_prepare("%s: base alternate %s => %s (%S => %S)",
+ cref(feature,lookupname),
+ gref(descriptions,unicode),
+ replacement and gref(descriptions,replacement),
+ value,
+ comment)
+end
+local function report_substitution(feature,lookupname,descriptions,unicode,substitution)
+ report_prepare("%s: base substitution %s => %S",
+ cref(feature,lookupname),
+ gref(descriptions,unicode),
+ gref(descriptions,substitution))
+end
+local function report_ligature(feature,lookupname,descriptions,unicode,ligature)
+ report_prepare("%s: base ligature %s => %S",
+ cref(feature,lookupname),
+ gref(descriptions,ligature),
+ gref(descriptions,unicode))
+end
+local function report_kern(feature,lookupname,descriptions,unicode,otherunicode,value)
+ report_prepare("%s: base kern %s + %s => %S",
+ cref(feature,lookupname),
+ gref(descriptions,unicode),
+ gref(descriptions,otherunicode),
+ value)
+end
+local basemethods={}
+local basemethod="<unset>"
+local function applybasemethod(what,...)
+ local m=basemethods[basemethod][what]
+ if m then
+ return m(...)
+ end
+end
+local basehash,basehashes,applied={},1,{}
+local function registerbasehash(tfmdata)
+ local properties=tfmdata.properties
+ local hash=concat(applied," ")
+ local base=basehash[hash]
+ if not base then
+ basehashes=basehashes+1
+ base=basehashes
+ basehash[hash]=base
+ end
+ properties.basehash=base
+ properties.fullname=properties.fullname.."-"..base
+ applied={}
+end
+local function registerbasefeature(feature,value)
+ applied[#applied+1]=feature.."="..tostring(value)
+end
+local trace=false
+local function finalize_ligatures(tfmdata,ligatures)
+ local nofligatures=#ligatures
+ if nofligatures>0 then
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ local resources=tfmdata.resources
+ local unicodes=resources.unicodes
+ local private=resources.private
+ local alldone=false
+ while not alldone do
+ local done=0
+ for i=1,nofligatures do
+ local ligature=ligatures[i]
+ if ligature then
+ local unicode,lookupdata=ligature[1],ligature[2]
+ if trace then
+ trace_ligatures_detail("building % a into %a",lookupdata,unicode)
+ end
+ local size=#lookupdata
+ local firstcode=lookupdata[1]
+ local firstdata=characters[firstcode]
+ local okay=false
+ if firstdata then
+ local firstname="ctx_"..firstcode
+ for i=1,size-1 do
+ local firstdata=characters[firstcode]
+ if not firstdata then
+ firstcode=private
+ if trace then
+ trace_ligatures_detail("defining %a as %a",firstname,firstcode)
+ end
+ unicodes[firstname]=firstcode
+ firstdata={ intermediate=true,ligatures={} }
+ characters[firstcode]=firstdata
+ descriptions[firstcode]={ name=firstname }
+ private=private+1
+ end
+ local target
+ local secondcode=lookupdata[i+1]
+ local secondname=firstname.."_"..secondcode
+ if i==size-1 then
+ target=unicode
+ if not unicodes[secondname] then
+ unicodes[secondname]=unicode
+ end
+ okay=true
+ else
+ target=unicodes[secondname]
+ if not target then
+ break
+ end
+ end
+ if trace then
+ trace_ligatures_detail("codes (%a,%a) + (%a,%a) -> %a",firstname,firstcode,secondname,secondcode,target)
+ end
+ local firstligs=firstdata.ligatures
+ if firstligs then
+ firstligs[secondcode]={ char=target }
+ else
+ firstdata.ligatures={ [secondcode]={ char=target } }
+ end
+ firstcode=target
+ firstname=secondname
+ end
+ end
+ if okay then
+ ligatures[i]=false
+ done=done+1
+ end
+ end
+ end
+ alldone=done==0
+ end
+ if trace then
+ for k,v in next,characters do
+ if v.ligatures then table.print(v,k) end
+ end
+ end
+ tfmdata.resources.private=private
+ end
+end
+local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist)
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ local resources=tfmdata.resources
+ local changed=tfmdata.changed
+ local unicodes=resources.unicodes
+ local lookuphash=resources.lookuphash
+ local lookuptypes=resources.lookuptypes
+ local ligatures={}
+ local alternate=tonumber(value)
+ local defaultalt=otf.defaultbasealternate
+ local trace_singles=trace_baseinit and trace_singles
+ local trace_alternatives=trace_baseinit and trace_alternatives
+ local trace_ligatures=trace_baseinit and trace_ligatures
+ local actions={
+ substitution=function(lookupdata,lookupname,description,unicode)
+ if trace_singles then
+ report_substitution(feature,lookupname,descriptions,unicode,lookupdata)
+ end
+ changed[unicode]=lookupdata
+ end,
+ alternate=function(lookupdata,lookupname,description,unicode)
+ local replacement=lookupdata[alternate]
+ if replacement then
+ changed[unicode]=replacement
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal")
+ end
+ elseif defaultalt=="first" then
+ replacement=lookupdata[1]
+ changed[unicode]=replacement
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ end
+ elseif defaultalt=="last" then
+ replacement=lookupdata[#data]
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ end
+ else
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown")
+ end
+ end
+ end,
+ ligature=function(lookupdata,lookupname,description,unicode)
+ if trace_ligatures then
+ report_ligature(feature,lookupname,descriptions,unicode,lookupdata)
+ end
+ ligatures[#ligatures+1]={ unicode,lookupdata }
+ end,
+ }
+ for unicode,character in next,characters do
+ local description=descriptions[unicode]
+ local lookups=description.slookups
+ if lookups then
+ for l=1,#lookuplist do
+ local lookupname=lookuplist[l]
+ local lookupdata=lookups[lookupname]
+ if lookupdata then
+ local lookuptype=lookuptypes[lookupname]
+ local action=actions[lookuptype]
+ if action then
+ action(lookupdata,lookupname,description,unicode)
+ end
+ end
+ end
+ end
+ local lookups=description.mlookups
+ if lookups then
+ for l=1,#lookuplist do
+ local lookupname=lookuplist[l]
+ local lookuplist=lookups[lookupname]
+ if lookuplist then
+ local lookuptype=lookuptypes[lookupname]
+ local action=actions[lookuptype]
+ if action then
+ for i=1,#lookuplist do
+ action(lookuplist[i],lookupname,description,unicode)
+ end
+ end
+ end
+ end
+ end
+ end
+ finalize_ligatures(tfmdata,ligatures)
+end
+local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist)
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ local resources=tfmdata.resources
+ local unicodes=resources.unicodes
+ local sharedkerns={}
+ local traceindeed=trace_baseinit and trace_kerns
+ for unicode,character in next,characters do
+ local description=descriptions[unicode]
+ local rawkerns=description.kerns
+ if rawkerns then
+ local s=sharedkerns[rawkerns]
+ if s==false then
+ elseif s then
+ character.kerns=s
+ else
+ local newkerns=character.kerns
+ local done=false
+ for l=1,#lookuplist do
+ local lookup=lookuplist[l]
+ local kerns=rawkerns[lookup]
+ if kerns then
+ for otherunicode,value in next,kerns do
+ if value==0 then
+ elseif not newkerns then
+ newkerns={ [otherunicode]=value }
+ done=true
+ if traceindeed then
+ report_kern(feature,lookup,descriptions,unicode,otherunicode,value)
+ end
+ elseif not newkerns[otherunicode] then
+ newkerns[otherunicode]=value
+ done=true
+ if traceindeed then
+ report_kern(feature,lookup,descriptions,unicode,otherunicode,value)
+ end
+ end
+ end
+ end
+ end
+ if done then
+ sharedkerns[rawkerns]=newkerns
+ character.kerns=newkerns
+ else
+ sharedkerns[rawkerns]=false
+ end
+ end
+ end
+ end
+end
+basemethods.independent={
+ preparesubstitutions=preparesubstitutions,
+ preparepositionings=preparepositionings,
+}
+local function makefake(tfmdata,name,present)
+ local resources=tfmdata.resources
+ local private=resources.private
+ local character={ intermediate=true,ligatures={} }
+ resources.unicodes[name]=private
+ tfmdata.characters[private]=character
+ tfmdata.descriptions[private]={ name=name }
+ resources.private=private+1
+ present[name]=private
+ return character
+end
+local function make_1(present,tree,name)
+ for k,v in next,tree do
+ if k=="ligature" then
+ present[name]=v
+ else
+ make_1(present,v,name.."_"..k)
+ end
+ end
+end
+local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,done,lookupname)
+ for k,v in next,tree do
+ if k=="ligature" then
+ local character=characters[preceding]
+ if not character then
+ if trace_baseinit then
+ report_prepare("weird ligature in lookup %a, current %C, preceding %C",lookupname,v,preceding)
+ end
+ character=makefake(tfmdata,name,present)
+ end
+ local ligatures=character.ligatures
+ if ligatures then
+ ligatures[unicode]={ char=v }
+ else
+ character.ligatures={ [unicode]={ char=v } }
+ end
+ if done then
+ local d=done[lookupname]
+ if not d then
+ done[lookupname]={ "dummy",v }
+ else
+ d[#d+1]=v
+ end
+ end
+ else
+ local code=present[name] or unicode
+ local name=name.."_"..k
+ make_2(present,tfmdata,characters,v,name,code,k,done,lookupname)
+ end
+ end
+end
+local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist)
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ local resources=tfmdata.resources
+ local changed=tfmdata.changed
+ local lookuphash=resources.lookuphash
+ local lookuptypes=resources.lookuptypes
+ local ligatures={}
+ local alternate=tonumber(value)
+ local defaultalt=otf.defaultbasealternate
+ local trace_singles=trace_baseinit and trace_singles
+ local trace_alternatives=trace_baseinit and trace_alternatives
+ local trace_ligatures=trace_baseinit and trace_ligatures
+ for l=1,#lookuplist do
+ local lookupname=lookuplist[l]
+ local lookupdata=lookuphash[lookupname]
+ local lookuptype=lookuptypes[lookupname]
+ for unicode,data in next,lookupdata do
+ if lookuptype=="substitution" then
+ if trace_singles then
+ report_substitution(feature,lookupname,descriptions,unicode,data)
+ end
+ changed[unicode]=data
+ elseif lookuptype=="alternate" then
+ local replacement=data[alternate]
+ if replacement then
+ changed[unicode]=replacement
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal")
+ end
+ elseif defaultalt=="first" then
+ replacement=data[1]
+ changed[unicode]=replacement
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ end
+ elseif defaultalt=="last" then
+ replacement=data[#data]
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ end
+ else
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown")
+ end
+ end
+ elseif lookuptype=="ligature" then
+ ligatures[#ligatures+1]={ unicode,data,lookupname }
+ if trace_ligatures then
+ report_ligature(feature,lookupname,descriptions,unicode,data)
+ end
+ end
+ end
+ end
+ local nofligatures=#ligatures
+ if nofligatures>0 then
+ local characters=tfmdata.characters
+ local present={}
+ local done=trace_baseinit and trace_ligatures and {}
+ for i=1,nofligatures do
+ local ligature=ligatures[i]
+ local unicode,tree=ligature[1],ligature[2]
+ make_1(present,tree,"ctx_"..unicode)
+ end
+ for i=1,nofligatures do
+ local ligature=ligatures[i]
+ local unicode,tree,lookupname=ligature[1],ligature[2],ligature[3]
+ make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,lookupname)
+ end
+ end
+end
+local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist)
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ local resources=tfmdata.resources
+ local lookuphash=resources.lookuphash
+ local traceindeed=trace_baseinit and trace_kerns
+ for l=1,#lookuplist do
+ local lookupname=lookuplist[l]
+ local lookupdata=lookuphash[lookupname]
+ for unicode,data in next,lookupdata do
+ local character=characters[unicode]
+ local kerns=character.kerns
+ if not kerns then
+ kerns={}
+ character.kerns=kerns
+ end
+ if traceindeed then
+ for otherunicode,kern in next,data do
+ if not kerns[otherunicode] and kern~=0 then
+ kerns[otherunicode]=kern
+ report_kern(feature,lookup,descriptions,unicode,otherunicode,kern)
+ end
+ end
+ else
+ for otherunicode,kern in next,data do
+ if not kerns[otherunicode] and kern~=0 then
+ kerns[otherunicode]=kern
+ end
+ end
+ end
+ end
+ end
+end
+local function initializehashes(tfmdata)
+ nodeinitializers.features(tfmdata)
+end
+basemethods.shared={
+ initializehashes=initializehashes,
+ preparesubstitutions=preparesubstitutions,
+ preparepositionings=preparepositionings,
+}
+basemethod="independent"
+local function featuresinitializer(tfmdata,value)
+ if true then
+ local t=trace_preparing and os.clock()
+ local features=tfmdata.shared.features
+ if features then
+ applybasemethod("initializehashes",tfmdata)
+ local collectlookups=otf.collectlookups
+ local rawdata=tfmdata.shared.rawdata
+ local properties=tfmdata.properties
+ local script=properties.script
+ local language=properties.language
+ local basesubstitutions=rawdata.resources.features.gsub
+ local basepositionings=rawdata.resources.features.gpos
+ if basesubstitutions then
+ for feature,data in next,basesubstitutions do
+ local value=features[feature]
+ if value then
+ local validlookups,lookuplist=collectlookups(rawdata,feature,script,language)
+ if validlookups then
+ applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist)
+ registerbasefeature(feature,value)
+ end
+ end
+ end
+ end
+ if basepositions then
+ for feature,data in next,basepositions do
+ local value=features[feature]
+ if value then
+ local validlookups,lookuplist=collectlookups(rawdata,feature,script,language)
+ if validlookups then
+ applybasemethod("preparepositionings",tfmdata,feature,features[feature],validlookups,lookuplist)
+ registerbasefeature(feature,value)
+ end
+ end
+ end
+ end
+ registerbasehash(tfmdata)
+ end
+ if trace_preparing then
+ report_prepare("preparation time is %0.3f seconds for %a",os.clock()-t,tfmdata.properties.fullname)
+ end
+ end
+end
+registerotffeature {
+ name="features",
+ description="features",
+ default=true,
+ initializers={
+ base=featuresinitializer,
+ }
+}
+directives.register("fonts.otf.loader.basemethod",function(v)
+ if basemethods[v] then
+ basemethod=v
+ end
+end)
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['node-inj']={
+ version=1.001,
+ comment="companion to node-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+}
+local next=next
+local utfchar=utf.char
+local trace_injections=false trackers.register("nodes.injections",function(v) trace_injections=v end)
+local report_injections=logs.reporter("nodes","injections")
+local attributes,nodes,node=attributes,nodes,node
+fonts=fonts
+local fontdata=fonts.hashes.identifiers
+nodes.injections=nodes.injections or {}
+local injections=nodes.injections
+local nodecodes=nodes.nodecodes
+local glyph_code=nodecodes.glyph
+local nodepool=nodes.pool
+local newkern=nodepool.kern
+local traverse_id=node.traverse_id
+local insert_node_before=node.insert_before
+local insert_node_after=node.insert_after
+local a_kernpair=attributes.private('kernpair')
+local a_ligacomp=attributes.private('ligacomp')
+local a_markbase=attributes.private('markbase')
+local a_markmark=attributes.private('markmark')
+local a_markdone=attributes.private('markdone')
+local a_cursbase=attributes.private('cursbase')
+local a_curscurs=attributes.private('curscurs')
+local a_cursdone=attributes.private('cursdone')
+function injections.installnewkern(nk)
+ newkern=nk or newkern
+end
+local cursives={}
+local marks={}
+local kerns={}
+function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext)
+ local dx,dy=factor*(exit[1]-entry[1]),factor*(exit[2]-entry[2])
+ local ws,wn=tfmstart.width,tfmnext.width
+ local bound=#cursives+1
+ start[a_cursbase]=bound
+ nxt[a_curscurs]=bound
+ cursives[bound]={ rlmode,dx,dy,ws,wn }
+ return dx,dy,bound
+end
+function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr)
+ local x,y,w,h=factor*spec[1],factor*spec[2],factor*spec[3],factor*spec[4]
+ if x~=0 or w~=0 or y~=0 or h~=0 then
+ local bound=current[a_kernpair]
+ if bound then
+ local kb=kerns[bound]
+ kb[2],kb[3],kb[4],kb[5]=(kb[2] or 0)+x,(kb[3] or 0)+y,(kb[4] or 0)+w,(kb[5] or 0)+h
+ else
+ bound=#kerns+1
+ current[a_kernpair]=bound
+ kerns[bound]={ rlmode,x,y,w,h,r2lflag,tfmchr.width }
+ end
+ return x,y,w,h,bound
+ end
+ return x,y,w,h
+end
+function injections.setkern(current,factor,rlmode,x,tfmchr)
+ local dx=factor*x
+ if dx~=0 then
+ local bound=#kerns+1
+ current[a_kernpair]=bound
+ kerns[bound]={ rlmode,dx }
+ return dx,bound
+ else
+ return 0,0
+ end
+end
+function injections.setmark(start,base,factor,rlmode,ba,ma,index)
+ local dx,dy=factor*(ba[1]-ma[1]),factor*(ba[2]-ma[2])
+ local bound=base[a_markbase]
+ local index=1
+ if bound then
+ local mb=marks[bound]
+ if mb then
+ index=#mb+1
+ mb[index]={ dx,dy,rlmode }
+ start[a_markmark]=bound
+ start[a_markdone]=index
+ return dx,dy,bound
+ else
+ report_injections("possible problem, %U is base mark without data (id %a)",base.char,bound)
+ end
+ end
+ index=index or 1
+ bound=#marks+1
+ base[a_markbase]=bound
+ start[a_markmark]=bound
+ start[a_markdone]=index
+ marks[bound]={ [index]={ dx,dy,rlmode } }
+ return dx,dy,bound
+end
+local function dir(n)
+ return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset"
+end
+local function trace(head)
+ report_injections("begin run")
+ for n in traverse_id(glyph_code,head) do
+ if n.subtype<256 then
+ local kp=n[a_kernpair]
+ local mb=n[a_markbase]
+ local mm=n[a_markmark]
+ local md=n[a_markdone]
+ local cb=n[a_cursbase]
+ local cc=n[a_curscurs]
+ local char=n.char
+ report_injections("font %s, char %U, glyph %c",char,n.font,char)
+ if kp then
+ local k=kerns[kp]
+ if k[3] then
+ report_injections(" pairkern: dir %a, x %p, y %p, w %p, h %p",dir(k[1]),k[2],k[3],k[4],k[5])
+ else
+ report_injections(" kern: dir %a, dx %p",dir(k[1]),k[2])
+ end
+ end
+ if mb then
+ report_injections(" markbase: bound %a",mb)
+ end
+ if mm then
+ local m=marks[mm]
+ if mb then
+ local m=m[mb]
+ if m then
+ report_injections(" markmark: bound %a, index %a, dx %p, dy %p",mm,md,m[1],m[2])
+ else
+ report_injections(" markmark: bound %a, missing index",mm)
+ end
+ else
+ m=m[1]
+ report_injections(" markmark: bound %a, dx %p, dy %p",mm,m and m[1],m and m[2])
+ end
+ end
+ if cb then
+ report_injections(" cursbase: bound %a",cb)
+ end
+ if cc then
+ local c=cursives[cc]
+ report_injections(" curscurs: bound %a, dir %a, dx %p, dy %p",cc,dir(c[1]),c[2],c[3])
+ end
+ end
+ end
+ report_injections("end run")
+end
+function injections.handler(head,where,keep)
+ local has_marks,has_cursives,has_kerns=next(marks),next(cursives),next(kerns)
+ if has_marks or has_cursives then
+ if trace_injections then
+ trace(head)
+ end
+ local done,ky,rl,valid,cx,wx,mk,nofvalid=false,{},{},{},{},{},{},0
+ if has_kerns then
+ local nf,tm=nil,nil
+ for n in traverse_id(glyph_code,head) do
+ if n.subtype<256 then
+ nofvalid=nofvalid+1
+ valid[nofvalid]=n
+ if n.font~=nf then
+ nf=n.font
+ tm=fontdata[nf].resources.marks
+ end
+ if tm then
+ mk[n]=tm[n.char]
+ end
+ local k=n[a_kernpair]
+ if k then
+ local kk=kerns[k]
+ if kk then
+ local x,y,w,h=kk[2] or 0,kk[3] or 0,kk[4] or 0,kk[5] or 0
+ local dy=y-h
+ if dy~=0 then
+ ky[n]=dy
+ end
+ if w~=0 or x~=0 then
+ wx[n]=kk
+ end
+ rl[n]=kk[1]
+ end
+ end
+ end
+ end
+ else
+ local nf,tm=nil,nil
+ for n in traverse_id(glyph_code,head) do
+ if n.subtype<256 then
+ nofvalid=nofvalid+1
+ valid[nofvalid]=n
+ if n.font~=nf then
+ nf=n.font
+ tm=fontdata[nf].resources.marks
+ end
+ if tm then
+ mk[n]=tm[n.char]
+ end
+ end
+ end
+ end
+ if nofvalid>0 then
+ local cx={}
+ if has_kerns and next(ky) then
+ for n,k in next,ky do
+ n.yoffset=k
+ end
+ end
+ if has_cursives then
+ local p_cursbase,p=nil,nil
+ local t,d,maxt={},{},0
+ for i=1,nofvalid do
+ local n=valid[i]
+ if not mk[n] then
+ local n_cursbase=n[a_cursbase]
+ if p_cursbase then
+ local n_curscurs=n[a_curscurs]
+ if p_cursbase==n_curscurs then
+ local c=cursives[n_curscurs]
+ if c then
+ local rlmode,dx,dy,ws,wn=c[1],c[2],c[3],c[4],c[5]
+ if rlmode>=0 then
+ dx=dx-ws
+ else
+ dx=dx+wn
+ end
+ if dx~=0 then
+ cx[n]=dx
+ rl[n]=rlmode
+ end
+ dy=-dy
+ maxt=maxt+1
+ t[maxt]=p
+ d[maxt]=dy
+ else
+ maxt=0
+ end
+ end
+ elseif maxt>0 then
+ local ny=n.yoffset
+ for i=maxt,1,-1 do
+ ny=ny+d[i]
+ local ti=t[i]
+ ti.yoffset=ti.yoffset+ny
+ end
+ maxt=0
+ end
+ if not n_cursbase and maxt>0 then
+ local ny=n.yoffset
+ for i=maxt,1,-1 do
+ ny=ny+d[i]
+ local ti=t[i]
+ ti.yoffset=ny
+ end
+ maxt=0
+ end
+ p_cursbase,p=n_cursbase,n
+ end
+ end
+ if maxt>0 then
+ local ny=n.yoffset
+ for i=maxt,1,-1 do
+ ny=ny+d[i]
+ local ti=t[i]
+ ti.yoffset=ny
+ end
+ maxt=0
+ end
+ if not keep then
+ cursives={}
+ end
+ end
+ if has_marks then
+ for i=1,nofvalid do
+ local p=valid[i]
+ local p_markbase=p[a_markbase]
+ if p_markbase then
+ local mrks=marks[p_markbase]
+ local nofmarks=#mrks
+ for n in traverse_id(glyph_code,p.next) do
+ local n_markmark=n[a_markmark]
+ if p_markbase==n_markmark then
+ local index=n[a_markdone] or 1
+ local d=mrks[index]
+ if d then
+ local rlmode=d[3]
+ if rlmode and rlmode>=0 then
+ local k=wx[p]
+ if k then
+ n.xoffset=p.xoffset-p.width+d[1]-k[2]
+ else
+ n.xoffset=p.xoffset-p.width+d[1]
+ end
+ else
+ local k=wx[p]
+ if k then
+ n.xoffset=p.xoffset-d[1]-k[2]
+ else
+ n.xoffset=p.xoffset-d[1]
+ end
+ end
+ if mk[p] then
+ n.yoffset=p.yoffset+d[2]
+ else
+ n.yoffset=n.yoffset+p.yoffset+d[2]
+ end
+ if nofmarks==1 then
+ break
+ else
+ nofmarks=nofmarks-1
+ end
+ end
+ else
+ end
+ end
+ end
+ end
+ if not keep then
+ marks={}
+ end
+ end
+ if next(wx) then
+ for n,k in next,wx do
+ local x,w=k[2] or 0,k[4]
+ if w then
+ local rl=k[1]
+ local wx=w-x
+ if rl<0 then
+ if wx~=0 then
+ insert_node_before(head,n,newkern(wx))
+ end
+ if x~=0 then
+ insert_node_after (head,n,newkern(x))
+ end
+ else
+ if x~=0 then
+ insert_node_before(head,n,newkern(x))
+ end
+ if wx~=0 then
+ insert_node_after(head,n,newkern(wx))
+ end
+ end
+ elseif x~=0 then
+ insert_node_before(head,n,newkern(x))
+ end
+ end
+ end
+ if next(cx) then
+ for n,k in next,cx do
+ if k~=0 then
+ local rln=rl[n]
+ if rln and rln<0 then
+ insert_node_before(head,n,newkern(-k))
+ else
+ insert_node_before(head,n,newkern(k))
+ end
+ end
+ end
+ end
+ if not keep then
+ kerns={}
+ end
+ return head,true
+ elseif not keep then
+ kerns,cursives,marks={},{},{}
+ end
+ elseif has_kerns then
+ if trace_injections then
+ trace(head)
+ end
+ for n in traverse_id(glyph_code,head) do
+ if n.subtype<256 then
+ local k=n[a_kernpair]
+ if k then
+ local kk=kerns[k]
+ if kk then
+ local rl,x,y,w=kk[1],kk[2] or 0,kk[3],kk[4]
+ if y and y~=0 then
+ n.yoffset=y
+ end
+ if w then
+ local wx=w-x
+ if rl<0 then
+ if wx~=0 then
+ insert_node_before(head,n,newkern(wx))
+ end
+ if x~=0 then
+ insert_node_after (head,n,newkern(x))
+ end
+ else
+ if x~=0 then
+ insert_node_before(head,n,newkern(x))
+ end
+ if wx~=0 then
+ insert_node_after(head,n,newkern(wx))
+ end
+ end
+ else
+ if x~=0 then
+ insert_node_before(head,n,newkern(x))
+ end
+ end
+ end
+ end
+ end
+ end
+ if not keep then
+ kerns={}
+ end
+ return head,true
+ else
+ end
+ return head,false
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-ota']={
+ version=1.001,
+ comment="companion to font-otf.lua (analysing)",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type=type
+if not trackers then trackers={ register=function() end } end
+local fonts,nodes,node=fonts,nodes,node
+local allocate=utilities.storage.allocate
+local otf=fonts.handlers.otf
+local analyzers=fonts.analyzers
+local initializers=allocate()
+local methods=allocate()
+analyzers.initializers=initializers
+analyzers.methods=methods
+analyzers.useunicodemarks=false
+local a_state=attributes.private('state')
+local nodecodes=nodes.nodecodes
+local glyph_code=nodecodes.glyph
+local math_code=nodecodes.math
+local traverse_id=node.traverse_id
+local traverse_node_list=node.traverse
+local end_of_math=node.end_of_math
+local fontdata=fonts.hashes.identifiers
+local categories=characters and characters.categories or {}
+local otffeatures=fonts.constructors.newfeatures("otf")
+local registerotffeature=otffeatures.register
+local s_init=1 local s_rphf=7
+local s_medi=2 local s_half=8
+local s_fina=3 local s_pref=9
+local s_isol=4 local s_blwf=10
+local s_mark=5 local s_pstf=11
+local s_rest=6
+local states={
+ init=s_init,
+ medi=s_medi,
+ fina=s_fina,
+ isol=s_isol,
+ mark=s_mark,
+ rest=s_rest,
+ rphf=s_rphf,
+ half=s_half,
+ pref=s_pref,
+ blwf=s_blwf,
+ pstf=s_pstf,
+}
+local features={
+ init=s_init,
+ medi=s_medi,
+ fina=s_fina,
+ isol=s_isol,
+}
+analyzers.states=states
+analyzers.features=features
+function analyzers.setstate(head,font)
+ local useunicodemarks=analyzers.useunicodemarks
+ local tfmdata=fontdata[font]
+ local descriptions=tfmdata.descriptions
+ local first,last,current,n,done=nil,nil,head,0,false
+ while current do
+ local id=current.id
+ if id==glyph_code and current.font==font then
+ done=true
+ local char=current.char
+ local d=descriptions[char]
+ if d then
+ if d.class=="mark" or (useunicodemarks and categories[char]=="mn") then
+ done=true
+ current[a_state]=s_mark
+ elseif n==0 then
+ first,last,n=current,current,1
+ current[a_state]=s_init
+ else
+ last,n=current,n+1
+ current[a_state]=s_medi
+ end
+ else
+ if first and first==last then
+ last[a_state]=s_isol
+ elseif last then
+ last[a_state]=s_fina
+ end
+ first,last,n=nil,nil,0
+ end
+ elseif id==disc_code then
+ current[a_state]=s_midi
+ last=current
+ else
+ if first and first==last then
+ last[a_state]=s_isol
+ elseif last then
+ last[a_state]=s_fina
+ end
+ first,last,n=nil,nil,0
+ if id==math_code then
+ current=end_of_math(current)
+ end
+ end
+ current=current.next
+ end
+ if first and first==last then
+ last[a_state]=s_isol
+ elseif last then
+ last[a_state]=s_fina
+ end
+ return head,done
+end
+local function analyzeinitializer(tfmdata,value)
+ local script,language=otf.scriptandlanguage(tfmdata)
+ local action=initializers[script]
+ if not action then
+ elseif type(action)=="function" then
+ return action(tfmdata,value)
+ else
+ local action=action[language]
+ if action then
+ return action(tfmdata,value)
+ end
+ end
+end
+local function analyzeprocessor(head,font,attr)
+ local tfmdata=fontdata[font]
+ local script,language=otf.scriptandlanguage(tfmdata,attr)
+ local action=methods[script]
+ if not action then
+ elseif type(action)=="function" then
+ return action(head,font,attr)
+ else
+ action=action[language]
+ if action then
+ return action(head,font,attr)
+ end
+ end
+ return head,false
+end
+registerotffeature {
+ name="analyze",
+ description="analysis of (for instance) character classes",
+ default=true,
+ initializers={
+ node=analyzeinitializer,
+ },
+ processors={
+ position=1,
+ node=analyzeprocessor,
+ }
+}
+methods.latn=analyzers.setstate
+local tatweel=0x0640
+local zwnj=0x200C
+local zwj=0x200D
+local isolated={
+ [0x0600]=true,[0x0601]=true,[0x0602]=true,[0x0603]=true,
+ [0x0604]=true,
+ [0x0608]=true,[0x060B]=true,[0x0621]=true,[0x0674]=true,
+ [0x06DD]=true,
+ [0x0856]=true,[0x0858]=true,[0x0857]=true,
+ [0x07FA]=true,
+ [zwnj]=true,
+}
+local final={
+ [0x0622]=true,[0x0623]=true,[0x0624]=true,[0x0625]=true,
+ [0x0627]=true,[0x0629]=true,[0x062F]=true,[0x0630]=true,
+ [0x0631]=true,[0x0632]=true,[0x0648]=true,[0x0671]=true,
+ [0x0672]=true,[0x0673]=true,[0x0675]=true,[0x0676]=true,
+ [0x0677]=true,[0x0688]=true,[0x0689]=true,[0x068A]=true,
+ [0x068B]=true,[0x068C]=true,[0x068D]=true,[0x068E]=true,
+ [0x068F]=true,[0x0690]=true,[0x0691]=true,[0x0692]=true,
+ [0x0693]=true,[0x0694]=true,[0x0695]=true,[0x0696]=true,
+ [0x0697]=true,[0x0698]=true,[0x0699]=true,[0x06C0]=true,
+ [0x06C3]=true,[0x06C4]=true,[0x06C5]=true,[0x06C6]=true,
+ [0x06C7]=true,[0x06C8]=true,[0x06C9]=true,[0x06CA]=true,
+ [0x06CB]=true,[0x06CD]=true,[0x06CF]=true,[0x06D2]=true,
+ [0x06D3]=true,[0x06D5]=true,[0x06EE]=true,[0x06EF]=true,
+ [0x0759]=true,[0x075A]=true,[0x075B]=true,[0x076B]=true,
+ [0x076C]=true,[0x0771]=true,[0x0773]=true,[0x0774]=true,
+ [0x0778]=true,[0x0779]=true,
+ [0x08AA]=true,[0x08AB]=true,[0x08AC]=true,
+ [0xFEF5]=true,[0xFEF7]=true,[0xFEF9]=true,[0xFEFB]=true,
+ [0x0710]=true,[0x0715]=true,[0x0716]=true,[0x0717]=true,
+ [0x0718]=true,[0x0719]=true,[0x0728]=true,[0x072A]=true,
+ [0x072C]=true,[0x071E]=true,
+ [0x072F]=true,[0x074D]=true,
+ [0x0840]=true,[0x0849]=true,[0x0854]=true,[0x0846]=true,
+ [0x084F]=true
+}
+local medial={
+ [0x0626]=true,[0x0628]=true,[0x062A]=true,[0x062B]=true,
+ [0x062C]=true,[0x062D]=true,[0x062E]=true,[0x0633]=true,
+ [0x0634]=true,[0x0635]=true,[0x0636]=true,[0x0637]=true,
+ [0x0638]=true,[0x0639]=true,[0x063A]=true,[0x063B]=true,
+ [0x063C]=true,[0x063D]=true,[0x063E]=true,[0x063F]=true,
+ [0x0641]=true,[0x0642]=true,[0x0643]=true,
+ [0x0644]=true,[0x0645]=true,[0x0646]=true,[0x0647]=true,
+ [0x0649]=true,[0x064A]=true,[0x066E]=true,[0x066F]=true,
+ [0x0678]=true,[0x0679]=true,[0x067A]=true,[0x067B]=true,
+ [0x067C]=true,[0x067D]=true,[0x067E]=true,[0x067F]=true,
+ [0x0680]=true,[0x0681]=true,[0x0682]=true,[0x0683]=true,
+ [0x0684]=true,[0x0685]=true,[0x0686]=true,[0x0687]=true,
+ [0x069A]=true,[0x069B]=true,[0x069C]=true,[0x069D]=true,
+ [0x069E]=true,[0x069F]=true,[0x06A0]=true,[0x06A1]=true,
+ [0x06A2]=true,[0x06A3]=true,[0x06A4]=true,[0x06A5]=true,
+ [0x06A6]=true,[0x06A7]=true,[0x06A8]=true,[0x06A9]=true,
+ [0x06AA]=true,[0x06AB]=true,[0x06AC]=true,[0x06AD]=true,
+ [0x06AE]=true,[0x06AF]=true,[0x06B0]=true,[0x06B1]=true,
+ [0x06B2]=true,[0x06B3]=true,[0x06B4]=true,[0x06B5]=true,
+ [0x06B6]=true,[0x06B7]=true,[0x06B8]=true,[0x06B9]=true,
+ [0x06BA]=true,[0x06BB]=true,[0x06BC]=true,[0x06BD]=true,
+ [0x06BE]=true,[0x06BF]=true,[0x06C1]=true,[0x06C2]=true,
+ [0x06CC]=true,[0x06CE]=true,[0x06D0]=true,[0x06D1]=true,
+ [0x06FA]=true,[0x06FB]=true,[0x06FC]=true,[0x06FF]=true,
+ [0x0750]=true,[0x0751]=true,[0x0752]=true,[0x0753]=true,
+ [0x0754]=true,[0x0755]=true,[0x0756]=true,[0x0757]=true,
+ [0x0758]=true,[0x075C]=true,[0x075D]=true,[0x075E]=true,
+ [0x075F]=true,[0x0760]=true,[0x0761]=true,[0x0762]=true,
+ [0x0763]=true,[0x0764]=true,[0x0765]=true,[0x0766]=true,
+ [0x0767]=true,[0x0768]=true,[0x0769]=true,[0x076A]=true,
+ [0x076D]=true,[0x076E]=true,[0x076F]=true,[0x0770]=true,
+ [0x0772]=true,[0x0775]=true,[0x0776]=true,[0x0777]=true,
+ [0x077A]=true,[0x077B]=true,[0x077C]=true,[0x077D]=true,
+ [0x077E]=true,[0x077F]=true,
+ [0x08A0]=true,[0x08A2]=true,[0x08A4]=true,[0x08A5]=true,
+ [0x08A6]=true,[0x0620]=true,[0x08A8]=true,[0x08A9]=true,
+ [0x08A7]=true,[0x08A3]=true,
+ [0x0712]=true,[0x0713]=true,[0x0714]=true,[0x071A]=true,
+ [0x071B]=true,[0x071C]=true,[0x071D]=true,[0x071F]=true,
+ [0x0720]=true,[0x0721]=true,[0x0722]=true,[0x0723]=true,
+ [0x0724]=true,[0x0725]=true,[0x0726]=true,[0x0727]=true,
+ [0x0729]=true,[0x072B]=true,[0x072D]=true,[0x072E]=true,
+ [0x074E]=true,[0x074F]=true,
+ [0x0841]=true,[0x0842]=true,[0x0843]=true,[0x0844]=true,
+ [0x0845]=true,[0x0847]=true,[0x0848]=true,[0x0855]=true,
+ [0x0851]=true,[0x084E]=true,[0x084D]=true,[0x084A]=true,
+ [0x084B]=true,[0x084C]=true,[0x0850]=true,[0x0852]=true,
+ [0x0853]=true,
+ [0x07D7]=true,[0x07E8]=true,[0x07D9]=true,[0x07EA]=true,
+ [0x07CA]=true,[0x07DB]=true,[0x07CC]=true,[0x07DD]=true,
+ [0x07CE]=true,[0x07DF]=true,[0x07D4]=true,[0x07E5]=true,
+ [0x07E9]=true,[0x07E7]=true,[0x07E3]=true,[0x07E2]=true,
+ [0x07E0]=true,[0x07E1]=true,[0x07DE]=true,[0x07DC]=true,
+ [0x07D1]=true,[0x07DA]=true,[0x07D8]=true,[0x07D6]=true,
+ [0x07D2]=true,[0x07D0]=true,[0x07CF]=true,[0x07CD]=true,
+ [0x07CB]=true,[0x07D3]=true,[0x07E4]=true,[0x07D5]=true,
+ [0x07E6]=true,
+ [tatweel]=true,
+ [zwj]=true,
+}
+local arab_warned={}
+local function warning(current,what)
+ local char=current.char
+ if not arab_warned[char] then
+ log.report("analyze","arab: character %C has no %a class",char,what)
+ arab_warned[char]=true
+ end
+end
+local function finish(first,last)
+ if last then
+ if first==last then
+ local fc=first.char
+ if medial[fc] or final[fc] then
+ first[a_state]=s_isol
+ else
+ warning(first,"isol")
+ first[a_state]=s_error
+ end
+ else
+ local lc=last.char
+ if medial[lc] or final[lc] then
+ last[a_state]=s_fina
+ else
+ warning(last,"fina")
+ last[a_state]=s_error
+ end
+ end
+ first,last=nil,nil
+ elseif first then
+ local fc=first.char
+ if medial[fc] or final[fc] then
+ first[a_state]=s_isol
+ else
+ warning(first,"isol")
+ first[a_state]=s_error
+ end
+ first=nil
+ end
+ return first,last
+end
+function methods.arab(head,font,attr)
+ local useunicodemarks=analyzers.useunicodemarks
+ local tfmdata=fontdata[font]
+ local marks=tfmdata.resources.marks
+ local first,last,current,done=nil,nil,head,false
+ while current do
+ local id=current.id
+ if id==glyph_code and current.font==font and current.subtype<256 and not current[a_state] then
+ done=true
+ local char=current.char
+ if marks[char] or (useunicodemarks and categories[char]=="mn") then
+ current[a_state]=s_mark
+ elseif isolated[char] then
+ first,last=finish(first,last)
+ current[a_state]=s_isol
+ first,last=nil,nil
+ elseif not first then
+ if medial[char] then
+ current[a_state]=s_init
+ first,last=first or current,current
+ elseif final[char] then
+ current[a_state]=s_isol
+ first,last=nil,nil
+ else
+ first,last=finish(first,last)
+ end
+ elseif medial[char] then
+ first,last=first or current,current
+ current[a_state]=s_medi
+ elseif final[char] then
+ if not last[a_state]==s_init then
+ last[a_state]=s_medi
+ end
+ current[a_state]=s_fina
+ first,last=nil,nil
+ elseif char>=0x0600 and char<=0x06FF then
+ current[a_state]=s_rest
+ first,last=finish(first,last)
+ else
+ first,last=finish(first,last)
+ end
+ else
+ if first or last then
+ first,last=finish(first,last)
+ end
+ if id==math_code then
+ current=end_of_math(current)
+ end
+ end
+ current=current.next
+ end
+ if first or last then
+ finish(first,last)
+ end
+ return head,done
+end
+methods.syrc=methods.arab
+methods.mand=methods.arab
+methods.nko=methods.arab
+directives.register("otf.analyze.useunicodemarks",function(v)
+ analyzers.useunicodemarks=v
+end)
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-otn']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+}
+local concat,insert,remove=table.concat,table.insert,table.remove
+local gmatch,gsub,find,match,lower,strip=string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip
+local type,next,tonumber,tostring=type,next,tonumber,tostring
+local lpegmatch=lpeg.match
+local random=math.random
+local formatters=string.formatters
+local logs,trackers,nodes,attributes=logs,trackers,nodes,attributes
+local registertracker=trackers.register
+local fonts=fonts
+local otf=fonts.handlers.otf
+local trace_lookups=false registertracker("otf.lookups",function(v) trace_lookups=v end)
+local trace_singles=false registertracker("otf.singles",function(v) trace_singles=v end)
+local trace_multiples=false registertracker("otf.multiples",function(v) trace_multiples=v end)
+local trace_alternatives=false registertracker("otf.alternatives",function(v) trace_alternatives=v end)
+local trace_ligatures=false registertracker("otf.ligatures",function(v) trace_ligatures=v end)
+local trace_contexts=false registertracker("otf.contexts",function(v) trace_contexts=v end)
+local trace_marks=false registertracker("otf.marks",function(v) trace_marks=v end)
+local trace_kerns=false registertracker("otf.kerns",function(v) trace_kerns=v end)
+local trace_cursive=false registertracker("otf.cursive",function(v) trace_cursive=v end)
+local trace_preparing=false registertracker("otf.preparing",function(v) trace_preparing=v end)
+local trace_bugs=false registertracker("otf.bugs",function(v) trace_bugs=v end)
+local trace_details=false registertracker("otf.details",function(v) trace_details=v end)
+local trace_applied=false registertracker("otf.applied",function(v) trace_applied=v end)
+local trace_steps=false registertracker("otf.steps",function(v) trace_steps=v end)
+local trace_skips=false registertracker("otf.skips",function(v) trace_skips=v end)
+local trace_directions=false registertracker("otf.directions",function(v) trace_directions=v end)
+local report_direct=logs.reporter("fonts","otf direct")
+local report_subchain=logs.reporter("fonts","otf subchain")
+local report_chain=logs.reporter("fonts","otf chain")
+local report_process=logs.reporter("fonts","otf process")
+local report_prepare=logs.reporter("fonts","otf prepare")
+local report_warning=logs.reporter("fonts","otf warning")
+registertracker("otf.verbose_chain",function(v) otf.setcontextchain(v and "verbose") end)
+registertracker("otf.normal_chain",function(v) otf.setcontextchain(v and "normal") end)
+registertracker("otf.replacements","otf.singles,otf.multiples,otf.alternatives,otf.ligatures")
+registertracker("otf.positions","otf.marks,otf.kerns,otf.cursive")
+registertracker("otf.actions","otf.replacements,otf.positions")
+registertracker("otf.injections","nodes.injections")
+registertracker("*otf.sample","otf.steps,otf.actions,otf.analyzing")
+local insert_node_after=node.insert_after
+local delete_node=nodes.delete
+local copy_node=node.copy
+local find_node_tail=node.tail or node.slide
+local flush_node_list=node.flush_list
+local end_of_math=node.end_of_math
+local setmetatableindex=table.setmetatableindex
+local zwnj=0x200C
+local zwj=0x200D
+local wildcard="*"
+local default="dflt"
+local nodecodes=nodes.nodecodes
+local whatcodes=nodes.whatcodes
+local glyphcodes=nodes.glyphcodes
+local glyph_code=nodecodes.glyph
+local glue_code=nodecodes.glue
+local disc_code=nodecodes.disc
+local whatsit_code=nodecodes.whatsit
+local math_code=nodecodes.math
+local dir_code=whatcodes.dir
+local localpar_code=whatcodes.localpar
+local ligature_code=glyphcodes.ligature
+local privateattribute=attributes.private
+local a_state=privateattribute('state')
+local a_markbase=privateattribute('markbase')
+local a_markmark=privateattribute('markmark')
+local a_markdone=privateattribute('markdone')
+local a_cursbase=privateattribute('cursbase')
+local a_curscurs=privateattribute('curscurs')
+local a_cursdone=privateattribute('cursdone')
+local a_kernpair=privateattribute('kernpair')
+local a_ligacomp=privateattribute('ligacomp')
+local injections=nodes.injections
+local setmark=injections.setmark
+local setcursive=injections.setcursive
+local setkern=injections.setkern
+local setpair=injections.setpair
+local markonce=true
+local cursonce=true
+local kernonce=true
+local fonthashes=fonts.hashes
+local fontdata=fonthashes.identifiers
+local otffeatures=fonts.constructors.newfeatures("otf")
+local registerotffeature=otffeatures.register
+local onetimemessage=fonts.loggers.onetimemessage
+otf.defaultnodealternate="none"
+local tfmdata=false
+local characters=false
+local descriptions=false
+local resources=false
+local marks=false
+local currentfont=false
+local lookuptable=false
+local anchorlookups=false
+local lookuptypes=false
+local handlers={}
+local rlmode=0
+local featurevalue=false
+local checkstep=(nodes and nodes.tracers and nodes.tracers.steppers.check) or function() end
+local registerstep=(nodes and nodes.tracers and nodes.tracers.steppers.register) or function() end
+local registermessage=(nodes and nodes.tracers and nodes.tracers.steppers.message) or function() end
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_direct(...)
+end
+local function logwarning(...)
+ report_direct(...)
+end
+local f_unicode=formatters["%U"]
+local f_uniname=formatters["%U (%s)"]
+local f_unilist=formatters["% t (% t)"]
+local function gref(n)
+ if type(n)=="number" then
+ local description=descriptions[n]
+ local name=description and description.name
+ if name then
+ return f_uniname(n,name)
+ else
+ return f_unicode(n)
+ end
+ elseif n then
+ local num,nam={},{}
+ for i=1,#n do
+ local ni=n[i]
+ if tonumber(ni) then
+ local di=descriptions[ni]
+ num[i]=f_unicode(ni)
+ nam[i]=di and di.name or "-"
+ end
+ end
+ return f_unilist(num,nam)
+ else
+ return "<error in node mode tracing>"
+ end
+end
+local function cref(kind,chainname,chainlookupname,lookupname,index)
+ if index then
+ return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookupname,index)
+ elseif lookupname then
+ return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookupname)
+ elseif chainlookupname then
+ return formatters["feature %a, chain %a, sub %a"](kind,chainname,chainlookupname)
+ elseif chainname then
+ return formatters["feature %a, chain %a"](kind,chainname)
+ else
+ return formatters["feature %a"](kind)
+ end
+end
+local function pref(kind,lookupname)
+ return formatters["feature %a, lookup %a"](kind,lookupname)
+end
+local function copy_glyph(g)
+ local components=g.components
+ if components then
+ g.components=nil
+ local n=copy_node(g)
+ g.components=components
+ return n
+ else
+ return copy_node(g)
+ end
+end
+local function markstoligature(kind,lookupname,head,start,stop,char)
+ if start==stop and start.char==char then
+ return head,start
+ else
+ local prev=start.prev
+ local next=stop.next
+ start.prev=nil
+ stop.next=nil
+ local base=copy_glyph(start)
+ if head==start then
+ head=base
+ end
+ base.char=char
+ base.subtype=ligature_code
+ base.components=start
+ if prev then
+ prev.next=base
+ end
+ if next then
+ next.prev=base
+ end
+ base.next=next
+ base.prev=prev
+ return head,base
+ end
+end
+local function getcomponentindex(start)
+ if start.id~=glyph_code then
+ return 0
+ elseif start.subtype==ligature_code then
+ local i=0
+ local components=start.components
+ while components do
+ i=i+getcomponentindex(components)
+ components=components.next
+ end
+ return i
+ elseif not marks[start.char] then
+ return 1
+ else
+ return 0
+ end
+end
+local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound)
+ if start==stop and start.char==char then
+ start.char=char
+ return head,start
+ end
+ local prev=start.prev
+ local next=stop.next
+ start.prev=nil
+ stop.next=nil
+ local base=copy_glyph(start)
+ if start==head then
+ head=base
+ end
+ base.char=char
+ base.subtype=ligature_code
+ base.components=start
+ if prev then
+ prev.next=base
+ end
+ if next then
+ next.prev=base
+ end
+ base.next=next
+ base.prev=prev
+ if not discfound then
+ local deletemarks=markflag~="mark"
+ local components=start
+ local baseindex=0
+ local componentindex=0
+ local head=base
+ local current=base
+ while start do
+ local char=start.char
+ if not marks[char] then
+ baseindex=baseindex+componentindex
+ componentindex=getcomponentindex(start)
+ elseif not deletemarks then
+ start[a_ligacomp]=baseindex+(start[a_ligacomp] or componentindex)
+ if trace_marks then
+ logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp])
+ end
+ head,current=insert_node_after(head,current,copy_node(start))
+ end
+ start=start.next
+ end
+ local start=components
+ while start and start.id==glyph_code do
+ local char=start.char
+ if marks[char] then
+ start[a_ligacomp]=baseindex+(start[a_ligacomp] or componentindex)
+ if trace_marks then
+ logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp])
+ end
+ else
+ break
+ end
+ start=start.next
+ end
+ end
+ return head,base
+end
+function handlers.gsub_single(head,start,kind,lookupname,replacement)
+ if trace_singles then
+ logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(start.char),gref(replacement))
+ end
+ start.char=replacement
+ return head,start,true
+end
+local function get_alternative_glyph(start,alternatives,value,trace_alternatives)
+ local n=#alternatives
+ if value=="random" then
+ local r=random(1,n)
+ return alternatives[r],trace_alternatives and formatters["value %a, taking %a"](value,r)
+ elseif value=="first" then
+ return alternatives[1],trace_alternatives and formatters["value %a, taking %a"](value,1)
+ elseif value=="last" then
+ return alternatives[n],trace_alternatives and formatters["value %a, taking %a"](value,n)
+ else
+ value=tonumber(value)
+ if type(value)~="number" then
+ return alternatives[1],trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
+ elseif value>n then
+ local defaultalt=otf.defaultnodealternate
+ if defaultalt=="first" then
+ return alternatives[n],trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
+ elseif defaultalt=="last" then
+ return alternatives[1],trace_alternatives and formatters["invalid value %s, taking %a"](value,n)
+ else
+ return false,trace_alternatives and formatters["invalid value %a, %s"](value,"out of range")
+ end
+ elseif value==0 then
+ return start.char,trace_alternatives and formatters["invalid value %a, %s"](value,"no change")
+ elseif value<1 then
+ return alternatives[1],trace_alternatives and formatters["invalid value %a, taking %a"](value,1)
+ else
+ return alternatives[value],trace_alternatives and formatters["value %a, taking %a"](value,value)
+ end
+ end
+end
+local function multiple_glyphs(head,start,multiple)
+ local nofmultiples=#multiple
+ if nofmultiples>0 then
+ start.char=multiple[1]
+ if nofmultiples>1 then
+ local sn=start.next
+ for k=2,nofmultiples do
+ local n=copy_node(start)
+ n.char=multiple[k]
+ n.next=sn
+ n.prev=start
+ if sn then
+ sn.prev=n
+ end
+ start.next=n
+ start=n
+ end
+ end
+ return head,start,true
+ else
+ if trace_multiples then
+ logprocess("no multiple for %s",gref(start.char))
+ end
+ return head,start,false
+ end
+end
+function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence)
+ local value=featurevalue==true and tfmdata.shared.features[kind] or featurevalue
+ local choice,comment=get_alternative_glyph(start,alternative,value,trace_alternatives)
+ if choice then
+ if trace_alternatives then
+ logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(start.char),choice,gref(choice),comment)
+ end
+ start.char=choice
+ else
+ if trace_alternatives then
+ logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(start.char),comment)
+ end
+ end
+ return head,start,true
+end
+function handlers.gsub_multiple(head,start,kind,lookupname,multiple)
+ if trace_multiples then
+ logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(start.char),gref(multiple))
+ end
+ return multiple_glyphs(head,start,multiple)
+end
+function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
+ local s,stop,discfound=start.next,nil,false
+ local startchar=start.char
+ if marks[startchar] then
+ while s do
+ local id=s.id
+ if id==glyph_code and s.font==currentfont and s.subtype<256 then
+ local lg=ligature[s.char]
+ if lg then
+ stop=s
+ ligature=lg
+ s=s.next
+ else
+ break
+ end
+ else
+ break
+ end
+ end
+ if stop then
+ local lig=ligature.ligature
+ if lig then
+ if trace_ligatures then
+ local stopchar=stop.char
+ head,start=markstoligature(kind,lookupname,head,start,stop,lig)
+ logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
+ else
+ head,start=markstoligature(kind,lookupname,head,start,stop,lig)
+ end
+ return head,start,true
+ else
+ end
+ end
+ else
+ local skipmark=sequence.flags[1]
+ while s do
+ local id=s.id
+ if id==glyph_code and s.subtype<256 then
+ if s.font==currentfont then
+ local char=s.char
+ if skipmark and marks[char] then
+ s=s.next
+ else
+ local lg=ligature[char]
+ if lg then
+ stop=s
+ ligature=lg
+ s=s.next
+ else
+ break
+ end
+ end
+ else
+ break
+ end
+ elseif id==disc_code then
+ discfound=true
+ s=s.next
+ else
+ break
+ end
+ end
+ if stop then
+ local lig=ligature.ligature
+ if lig then
+ if trace_ligatures then
+ local stopchar=stop.char
+ head,start=toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
+ logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
+ else
+ head,start=toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
+ end
+ return head,start,true
+ else
+ end
+ end
+ end
+ return head,start,false
+end
+function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence)
+ local markchar=start.char
+ if marks[markchar] then
+ local base=start.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ local basechar=base.char
+ if marks[basechar] then
+ while true do
+ base=base.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ basechar=base.char
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+ end
+ end
+ end
+ local baseanchors=descriptions[basechar]
+ if baseanchors then
+ baseanchors=baseanchors.anchors
+ end
+ if baseanchors then
+ local baseanchors=baseanchors['basechar']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
+ pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head,start,true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s, no matching anchors for mark %s and base %s",pref(kind,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ else
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no char",pref(kind,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+end
+function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence)
+ local markchar=start.char
+ if marks[markchar] then
+ local base=start.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ local basechar=base.char
+ if marks[basechar] then
+ while true do
+ base=base.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ basechar=base.char
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+ end
+ end
+ end
+ local index=start[a_ligacomp]
+ local baseanchors=descriptions[basechar]
+ if baseanchors then
+ baseanchors=baseanchors.anchors
+ if baseanchors then
+ local baseanchors=baseanchors['baselig']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ ba=ba[index]
+ if ba then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ if trace_marks then
+ logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
+ pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy)
+ end
+ return head,start,true
+ end
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and baselig %s",pref(kind,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ else
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no char",pref(kind,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+end
+function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence)
+ local markchar=start.char
+ if marks[markchar] then
+ local base=start.prev
+ local slc=start[a_ligacomp]
+ if slc then
+ while base do
+ local blc=base[a_ligacomp]
+ if blc and blc~=slc then
+ base=base.prev
+ else
+ break
+ end
+ end
+ end
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ local basechar=base.char
+ local baseanchors=descriptions[basechar]
+ if baseanchors then
+ baseanchors=baseanchors.anchors
+ if baseanchors then
+ baseanchors=baseanchors['basemark']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
+ pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head,start,true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and basemark %s",pref(kind,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ else
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no mark",pref(kind,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+end
+function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence)
+ local alreadydone=cursonce and start[a_cursbase]
+ if not alreadydone then
+ local done=false
+ local startchar=start.char
+ if marks[startchar] then
+ if trace_cursive then
+ logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
+ end
+ else
+ local nxt=start.next
+ while not done and nxt and nxt.id==glyph_code and nxt.font==currentfont and nxt.subtype<256 do
+ local nextchar=nxt.char
+ if marks[nextchar] then
+ nxt=nxt.next
+ else
+ local entryanchors=descriptions[nextchar]
+ if entryanchors then
+ entryanchors=entryanchors.anchors
+ if entryanchors then
+ entryanchors=entryanchors['centry']
+ if entryanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,entry in next,entryanchors do
+ if al[anchor] then
+ local exit=exitanchors[anchor]
+ if exit then
+ local dx,dy,bound=setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ if trace_cursive then
+ logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
+ end
+ done=true
+ break
+ end
+ end
+ end
+ end
+ end
+ else
+ onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
+ end
+ break
+ end
+ end
+ end
+ return head,start,done
+ else
+ if trace_cursive and trace_details then
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
+ end
+ return head,start,false
+ end
+end
+function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence)
+ local startchar=start.char
+ local dx,dy,w,h=setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h)
+ end
+ return head,start,false
+end
+function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence)
+ local snext=start.next
+ if not snext then
+ return head,start,false
+ else
+ local prev,done=start,false
+ local factor=tfmdata.parameters.factor
+ local lookuptype=lookuptypes[lookupname]
+ while snext and snext.id==glyph_code and snext.font==currentfont and snext.subtype<256 do
+ local nextchar=snext.char
+ local krn=kerns[nextchar]
+ if not krn and marks[nextchar] then
+ prev=snext
+ snext=snext.next
+ else
+ local krn=kerns[nextchar]
+ if not krn then
+ elseif type(krn)=="table" then
+ if lookuptype=="pair" then
+ local a,b=krn[2],krn[3]
+ if a and #a>0 then
+ local startchar=start.char
+ local x,y,w,h=setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ if b and #b>0 then
+ local startchar=start.char
+ local x,y,w,h=setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
+ if trace_kerns then
+ logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ else
+ report_process("%s: check this out (old kern stuff)",pref(kind,lookupname))
+ end
+ done=true
+ elseif krn~=0 then
+ local k=setkern(snext,factor,rlmode,krn)
+ if trace_kerns then
+ logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
+ end
+ done=true
+ end
+ break
+ end
+ end
+ return head,start,done
+ end
+end
+local chainmores={}
+local chainprocs={}
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_subchain(...)
+end
+local logwarning=report_subchain
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_chain(...)
+end
+local logwarning=report_chain
+function chainprocs.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname)
+ logwarning("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
+ return head,start,false
+end
+function chainmores.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname,n)
+ logprocess("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
+ return head,start,false
+end
+function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements)
+ local char=start.char
+ local replacement=replacements[char]
+ if replacement then
+ if trace_singles then
+ logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement))
+ end
+ start.char=replacement
+ return head,start,true
+ else
+ return head,start,false
+ end
+end
+local function delete_till_stop(start,stop,ignoremarks)
+ local n=1
+ if start==stop then
+ elseif ignoremarks then
+ repeat
+ local next=start.next
+ if not marks[next.char] then
+ local components=next.components
+ if components then
+ flush_node_list(components)
+ end
+ delete_node(start,next)
+ end
+ n=n+1
+ until next==stop
+ else
+ repeat
+ local next=start.next
+ local components=next.components
+ if components then
+ flush_node_list(components)
+ end
+ delete_node(start,next)
+ n=n+1
+ until next==stop
+ end
+ return n
+end
+function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
+ local current=start
+ local subtables=currentlookup.subtables
+ if #subtables>1 then
+ logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," "))
+ end
+ while current do
+ if current.id==glyph_code then
+ local currentchar=current.char
+ local lookupname=subtables[1]
+ local replacement=lookuphash[lookupname]
+ if not replacement then
+ if trace_bugs then
+ logwarning("%s: no single hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
+ end
+ else
+ replacement=replacement[currentchar]
+ if not replacement or replacement=="" then
+ if trace_bugs then
+ logwarning("%s: no single for %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar))
+ end
+ else
+ if trace_singles then
+ logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement))
+ end
+ current.char=replacement
+ end
+ end
+ return head,start,true
+ elseif current==stop then
+ break
+ else
+ current=current.next
+ end
+ end
+ return head,start,false
+end
+chainmores.gsub_single=chainprocs.gsub_single
+function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ delete_till_stop(start,stop)
+ local startchar=start.char
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local replacements=lookuphash[lookupname]
+ if not replacements then
+ if trace_bugs then
+ logwarning("%s: no multiple hits",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ else
+ replacements=replacements[startchar]
+ if not replacements or replacement=="" then
+ if trace_bugs then
+ logwarning("%s: no multiple for %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar))
+ end
+ else
+ if trace_multiples then
+ logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements))
+ end
+ return multiple_glyphs(head,start,replacements)
+ end
+ end
+ return head,start,false
+end
+chainmores.gsub_multiple=chainprocs.gsub_multiple
+function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local current=start
+ local subtables=currentlookup.subtables
+ local value=featurevalue==true and tfmdata.shared.features[kind] or featurevalue
+ while current do
+ if current.id==glyph_code then
+ local currentchar=current.char
+ local lookupname=subtables[1]
+ local alternatives=lookuphash[lookupname]
+ if not alternatives then
+ if trace_bugs then
+ logwarning("%s: no alternative hit",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ else
+ alternatives=alternatives[currentchar]
+ if alternatives then
+ local choice,comment=get_alternative_glyph(current,alternatives,value,trace_alternatives)
+ if choice then
+ if trace_alternatives then
+ logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment)
+ end
+ start.char=choice
+ else
+ if trace_alternatives then
+ logwarning("%s: no variant %a for %s, %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(char),comment)
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s: no alternative for %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(currentchar),comment)
+ end
+ end
+ return head,start,true
+ elseif current==stop then
+ break
+ else
+ current=current.next
+ end
+ end
+ return head,start,false
+end
+chainmores.gsub_alternate=chainprocs.gsub_alternate
+function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
+ local startchar=start.char
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local ligatures=lookuphash[lookupname]
+ if not ligatures then
+ if trace_bugs then
+ logwarning("%s: no ligature hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
+ end
+ else
+ ligatures=ligatures[startchar]
+ if not ligatures then
+ if trace_bugs then
+ logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
+ end
+ else
+ local s=start.next
+ local discfound=false
+ local last=stop
+ local nofreplacements=0
+ local skipmark=currentlookup.flags[1]
+ while s do
+ local id=s.id
+ if id==disc_code then
+ s=s.next
+ discfound=true
+ else
+ local schar=s.char
+ if skipmark and marks[schar] then
+ s=s.next
+ else
+ local lg=ligatures[schar]
+ if lg then
+ ligatures,last,nofreplacements=lg,s,nofreplacements+1
+ if s==stop then
+ break
+ else
+ s=s.next
+ end
+ else
+ break
+ end
+ end
+ end
+ end
+ local l2=ligatures.ligature
+ if l2 then
+ if chainindex then
+ stop=last
+ end
+ if trace_ligatures then
+ if start==stop then
+ logprocess("%s: replacing character %s by ligature %s case 3",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2))
+ else
+ logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char),gref(l2))
+ end
+ end
+ head,start=toligature(kind,lookupname,head,start,stop,l2,currentlookup.flags[1],discfound)
+ return head,start,true,nofreplacements
+ elseif trace_bugs then
+ if start==stop then
+ logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
+ else
+ logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char))
+ end
+ end
+ end
+ end
+ return head,start,false,0
+end
+chainmores.gsub_ligature=chainprocs.gsub_ligature
+function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local markchar=start.char
+ if marks[markchar] then
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local markanchors=lookuphash[lookupname]
+ if markanchors then
+ markanchors=markanchors[markchar]
+ end
+ if markanchors then
+ local base=start.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ local basechar=base.char
+ if marks[basechar] then
+ while true do
+ base=base.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ basechar=base.char
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+ end
+ end
+ end
+ local baseanchors=descriptions[basechar].anchors
+ if baseanchors then
+ local baseanchors=baseanchors['basechar']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head,start,true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s, no matching anchors for mark %s and base %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no char",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ end
+ return head,start,false
+end
+function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local markchar=start.char
+ if marks[markchar] then
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local markanchors=lookuphash[lookupname]
+ if markanchors then
+ markanchors=markanchors[markchar]
+ end
+ if markanchors then
+ local base=start.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ local basechar=base.char
+ if marks[basechar] then
+ while true do
+ base=base.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ basechar=base.char
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",cref(kind,chainname,chainlookupname,lookupname),markchar)
+ end
+ return head,start,false
+ end
+ end
+ end
+ local index=start[a_ligacomp]
+ local baseanchors=descriptions[basechar].anchors
+ if baseanchors then
+ local baseanchors=baseanchors['baselig']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ ba=ba[index]
+ if ba then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy)
+ end
+ return head,start,true
+ end
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and baselig %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ logwarning("feature %s, lookup %s: prev node is no char",kind,lookupname)
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ end
+ return head,start,false
+end
+function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local markchar=start.char
+ if marks[markchar] then
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local markanchors=lookuphash[lookupname]
+ if markanchors then
+ markanchors=markanchors[markchar]
+ end
+ if markanchors then
+ local base=start.prev
+ local slc=start[a_ligacomp]
+ if slc then
+ while base do
+ local blc=base[a_ligacomp]
+ if blc and blc~=slc then
+ base=base.prev
+ else
+ break
+ end
+ end
+ end
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ local basechar=base.char
+ local baseanchors=descriptions[basechar].anchors
+ if baseanchors then
+ baseanchors=baseanchors['basemark']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head,start,true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ end
+ return head,start,false
+end
+function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local alreadydone=cursonce and start[a_cursbase]
+ if not alreadydone then
+ local startchar=start.char
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local exitanchors=lookuphash[lookupname]
+ if exitanchors then
+ exitanchors=exitanchors[startchar]
+ end
+ if exitanchors then
+ local done=false
+ if marks[startchar] then
+ if trace_cursive then
+ logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
+ end
+ else
+ local nxt=start.next
+ while not done and nxt and nxt.id==glyph_code and nxt.font==currentfont and nxt.subtype<256 do
+ local nextchar=nxt.char
+ if marks[nextchar] then
+ nxt=nxt.next
+ else
+ local entryanchors=descriptions[nextchar]
+ if entryanchors then
+ entryanchors=entryanchors.anchors
+ if entryanchors then
+ entryanchors=entryanchors['centry']
+ if entryanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,entry in next,entryanchors do
+ if al[anchor] then
+ local exit=exitanchors[anchor]
+ if exit then
+ local dx,dy,bound=setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ if trace_cursive then
+ logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
+ end
+ done=true
+ break
+ end
+ end
+ end
+ end
+ end
+ else
+ onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
+ end
+ break
+ end
+ end
+ end
+ return head,start,done
+ else
+ if trace_cursive and trace_details then
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
+ end
+ return head,start,false
+ end
+ end
+ return head,start,false
+end
+function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
+ local startchar=start.char
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local kerns=lookuphash[lookupname]
+ if kerns then
+ kerns=kerns[startchar]
+ if kerns then
+ local dx,dy,w,h=setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h)
+ end
+ end
+ end
+ return head,start,false
+end
+function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
+ local snext=start.next
+ if snext then
+ local startchar=start.char
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local kerns=lookuphash[lookupname]
+ if kerns then
+ kerns=kerns[startchar]
+ if kerns then
+ local lookuptype=lookuptypes[lookupname]
+ local prev,done=start,false
+ local factor=tfmdata.parameters.factor
+ while snext and snext.id==glyph_code and snext.font==currentfont and snext.subtype<256 do
+ local nextchar=snext.char
+ local krn=kerns[nextchar]
+ if not krn and marks[nextchar] then
+ prev=snext
+ snext=snext.next
+ else
+ if not krn then
+ elseif type(krn)=="table" then
+ if lookuptype=="pair" then
+ local a,b=krn[2],krn[3]
+ if a and #a>0 then
+ local startchar=start.char
+ local x,y,w,h=setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ if b and #b>0 then
+ local startchar=start.char
+ local x,y,w,h=setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
+ if trace_kerns then
+ logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ else
+ report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname))
+ local a,b=krn[2],krn[6]
+ if a and a~=0 then
+ local k=setkern(snext,factor,rlmode,a)
+ if trace_kerns then
+ logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
+ end
+ end
+ if b and b~=0 then
+ logwarning("%s: ignoring second kern xoff %s",cref(kind,chainname,chainlookupname),b*factor)
+ end
+ end
+ done=true
+ elseif krn~=0 then
+ local k=setkern(snext,factor,rlmode,krn)
+ if trace_kerns then
+ logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
+ end
+ done=true
+ end
+ break
+ end
+ end
+ return head,start,done
+ end
+ end
+ end
+ return head,start,false
+end
+local function show_skip(kind,chainname,char,ck,class)
+ if ck[9] then
+ logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a, %a => %a",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10])
+ else
+ logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a",cref(kind,chainname),gref(char),class,ck[1],ck[2])
+ end
+end
+local function normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash)
+ local flags=sequence.flags
+ local done=false
+ local skipmark=flags[1]
+ local skipligature=flags[2]
+ local skipbase=flags[3]
+ local someskip=skipmark or skipligature or skipbase
+ local markclass=sequence.markclass
+ local skipped=false
+ for k=1,#contexts do
+ local match=true
+ local current=start
+ local last=start
+ local ck=contexts[k]
+ local seq=ck[3]
+ local s=#seq
+ if s==1 then
+ match=current.id==glyph_code and current.font==currentfont and current.subtype<256 and seq[1][current.char]
+ else
+ local f,l=ck[4],ck[5]
+ if f==1 and f==l then
+ else
+ if f==l then
+ else
+ local n=f+1
+ last=last.next
+ while n<=l do
+ if last then
+ local id=last.id
+ if id==glyph_code then
+ if last.font==currentfont and last.subtype<256 then
+ local char=last.char
+ local ccd=descriptions[char]
+ if ccd then
+ local class=ccd.class
+ if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then
+ skipped=true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ last=last.next
+ elseif seq[n][char] then
+ if n<l then
+ last=last.next
+ end
+ n=n+1
+ else
+ match=false
+ break
+ end
+ else
+ match=false
+ break
+ end
+ else
+ match=false
+ break
+ end
+ elseif id==disc_code then
+ last=last.next
+ else
+ match=false
+ break
+ end
+ else
+ match=false
+ break
+ end
+ end
+ end
+ end
+ if match and f>1 then
+ local prev=start.prev
+ if prev then
+ local n=f-1
+ while n>=1 do
+ if prev then
+ local id=prev.id
+ if id==glyph_code then
+ if prev.font==currentfont and prev.subtype<256 then
+ local char=prev.char
+ local ccd=descriptions[char]
+ if ccd then
+ local class=ccd.class
+ if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then
+ skipped=true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ elseif seq[n][char] then
+ n=n -1
+ else
+ match=false
+ break
+ end
+ else
+ match=false
+ break
+ end
+ else
+ match=false
+ break
+ end
+ elseif id==disc_code then
+ elseif seq[n][32] then
+ n=n -1
+ else
+ match=false
+ break
+ end
+ prev=prev.prev
+ elseif seq[n][32] then
+ n=n -1
+ else
+ match=false
+ break
+ end
+ end
+ elseif f==2 then
+ match=seq[1][32]
+ else
+ for n=f-1,1 do
+ if not seq[n][32] then
+ match=false
+ break
+ end
+ end
+ end
+ end
+ if match and s>l then
+ local current=last and last.next
+ if current then
+ local n=l+1
+ while n<=s do
+ if current then
+ local id=current.id
+ if id==glyph_code then
+ if current.font==currentfont and current.subtype<256 then
+ local char=current.char
+ local ccd=descriptions[char]
+ if ccd then
+ local class=ccd.class
+ if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then
+ skipped=true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ elseif seq[n][char] then
+ n=n+1
+ else
+ match=false
+ break
+ end
+ else
+ match=false
+ break
+ end
+ else
+ match=false
+ break
+ end
+ elseif id==disc_code then
+ elseif seq[n][32] then
+ n=n+1
+ else
+ match=false
+ break
+ end
+ current=current.next
+ elseif seq[n][32] then
+ n=n+1
+ else
+ match=false
+ break
+ end
+ end
+ elseif s-l==1 then
+ match=seq[s][32]
+ else
+ for n=l+1,s do
+ if not seq[n][32] then
+ match=false
+ break
+ end
+ end
+ end
+ end
+ end
+ if match then
+ if trace_contexts then
+ local rule,lookuptype,f,l=ck[1],ck[2],ck[4],ck[5]
+ local char=start.char
+ if ck[9] then
+ logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a, %a => %a",
+ cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10])
+ else
+ logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a",
+ cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype)
+ end
+ end
+ local chainlookups=ck[6]
+ if chainlookups then
+ local nofchainlookups=#chainlookups
+ if nofchainlookups==1 then
+ local chainlookupname=chainlookups[1]
+ local chainlookup=lookuptable[chainlookupname]
+ if chainlookup then
+ local cp=chainprocs[chainlookup.type]
+ if cp then
+ head,start,done=cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
+ else
+ logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
+ end
+ else
+ logprocess("%s is not yet supported",cref(kind,chainname,chainlookupname))
+ end
+ else
+ local i=1
+ repeat
+ if skipped then
+ while true do
+ local char=start.char
+ local ccd=descriptions[char]
+ if ccd then
+ local class=ccd.class
+ if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then
+ start=start.next
+ else
+ break
+ end
+ else
+ break
+ end
+ end
+ end
+ local chainlookupname=chainlookups[i]
+ local chainlookup=lookuptable[chainlookupname]
+ local cp=chainlookup and chainmores[chainlookup.type]
+ if cp then
+ local ok,n
+ head,start,ok,n=cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence)
+ if ok then
+ done=true
+ i=i+(n or 1)
+ else
+ i=i+1
+ end
+ else
+ i=i+1
+ end
+ if start then
+ start=start.next
+ else
+ end
+ until i>nofchainlookups
+ end
+ else
+ local replacements=ck[7]
+ if replacements then
+ head,start,done=chainprocs.reversesub(head,start,last,kind,chainname,ck,lookuphash,replacements)
+ else
+ done=true
+ if trace_contexts then
+ logprocess("%s: skipping match",cref(kind,chainname))
+ end
+ end
+ end
+ end
+ end
+ return head,start,done
+end
+local verbose_handle_contextchain=function(font,...)
+ logwarning("no verbose handler installed, reverting to 'normal'")
+ otf.setcontextchain()
+ return normal_handle_contextchain(...)
+end
+otf.chainhandlers={
+ normal=normal_handle_contextchain,
+ verbose=verbose_handle_contextchain,
+}
+function otf.setcontextchain(method)
+ if not method or method=="normal" or not otf.chainhandlers[method] then
+ if handlers.contextchain then
+ logwarning("installing normal contextchain handler")
+ end
+ handlers.contextchain=normal_handle_contextchain
+ else
+ logwarning("installing contextchain handler %a",method)
+ local handler=otf.chainhandlers[method]
+ handlers.contextchain=function(...)
+ return handler(currentfont,...)
+ end
+ end
+ handlers.gsub_context=handlers.contextchain
+ handlers.gsub_contextchain=handlers.contextchain
+ handlers.gsub_reversecontextchain=handlers.contextchain
+ handlers.gpos_contextchain=handlers.contextchain
+ handlers.gpos_context=handlers.contextchain
+end
+otf.setcontextchain()
+local missing={}
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_process(...)
+end
+local logwarning=report_process
+local function report_missing_cache(typ,lookup)
+ local f=missing[currentfont] if not f then f={} missing[currentfont]=f end
+ local t=f[typ] if not t then t={} f[typ]=t end
+ if not t[lookup] then
+ t[lookup]=true
+ logwarning("missing cache for lookup %a, type %a, font %a, name %a",lookup,typ,currentfont,tfmdata.properties.fullname)
+ end
+end
+local resolved={}
+local lookuphashes={}
+setmetatableindex(lookuphashes,function(t,font)
+ local lookuphash=fontdata[font].resources.lookuphash
+ if not lookuphash or not next(lookuphash) then
+ lookuphash=false
+ end
+ t[font]=lookuphash
+ return lookuphash
+end)
+local autofeatures=fonts.analyzers.features
+local function initialize(sequence,script,language,enabled)
+ local features=sequence.features
+ if features then
+ for kind,scripts in next,features do
+ local valid=enabled[kind]
+ if valid then
+ local languages=scripts[script] or scripts[wildcard]
+ if languages and (languages[language] or languages[wildcard]) then
+ return { valid,autofeatures[kind] or false,sequence.chain or 0,kind,sequence }
+ end
+ end
+ end
+ end
+ return false
+end
+function otf.dataset(tfmdata,font)
+ local shared=tfmdata.shared
+ local properties=tfmdata.properties
+ local language=properties.language or "dflt"
+ local script=properties.script or "dflt"
+ local enabled=shared.features
+ local res=resolved[font]
+ if not res then
+ res={}
+ resolved[font]=res
+ end
+ local rs=res[script]
+ if not rs then
+ rs={}
+ res[script]=rs
+ end
+ local rl=rs[language]
+ if not rl then
+ rl={
+ }
+ rs[language]=rl
+ local sequences=tfmdata.resources.sequences
+for s=1,#sequences do
+ local v=enabled and initialize(sequences[s],script,language,enabled)
+ if v then
+ rl[#rl+1]=v
+ end
+end
+ end
+ return rl
+end
+local function featuresprocessor(head,font,attr)
+ local lookuphash=lookuphashes[font]
+ if not lookuphash then
+ return head,false
+ end
+ if trace_steps then
+ checkstep(head)
+ end
+ tfmdata=fontdata[font]
+ descriptions=tfmdata.descriptions
+ characters=tfmdata.characters
+ resources=tfmdata.resources
+ marks=resources.marks
+ anchorlookups=resources.lookup_to_anchor
+ lookuptable=resources.lookups
+ lookuptypes=resources.lookuptypes
+ currentfont=font
+ rlmode=0
+ local sequences=resources.sequences
+ local done=false
+ local datasets=otf.dataset(tfmdata,font,attr)
+ local dirstack={}
+for s=1,#datasets do
+ local dataset=datasets[s]
+ featurevalue=dataset[1]
+ local sequence=dataset[5]
+ local rlparmode=0
+ local topstack=0
+ local success=false
+ local attribute=dataset[2]
+ local chain=dataset[3]
+ local typ=sequence.type
+ local subtables=sequence.subtables
+ if chain<0 then
+ local handler=handlers[typ]
+ local start=find_node_tail(head)
+ while start do
+ local id=start.id
+ if id==glyph_code then
+ if start.font==font and start.subtype<256 then
+ local a=start[0]
+ if a then
+ a=a==attr
+ else
+ a=true
+ end
+ if a then
+ for i=1,#subtables do
+ local lookupname=subtables[i]
+ local lookupcache=lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch=lookupcache[start.char]
+ if lookupmatch then
+ head,start,success=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if success then
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ if start then start=start.prev end
+ else
+ start=start.prev
+ end
+ else
+ start=start.prev
+ end
+ else
+ start=start.prev
+ end
+ end
+ else
+ local handler=handlers[typ]
+ local ns=#subtables
+ local start=head
+ rlmode=0
+ if ns==1 then
+ local lookupname=subtables[1]
+ local lookupcache=lookuphash[lookupname]
+ if not lookupcache then
+ report_missing_cache(typ,lookupname)
+ else
+ while start do
+ local id=start.id
+ if id==glyph_code then
+ if start.font==font and start.subtype<256 then
+ local a=start[0]
+ if a then
+ a=(a==attr) and (not attribute or start[a_state]==attribute)
+ else
+ a=not attribute or start[a_state]==attribute
+ end
+ if a then
+ local lookupmatch=lookupcache[start.char]
+ if lookupmatch then
+ local ok
+ head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ success=true
+ end
+ end
+ if start then start=start.next end
+ else
+ start=start.next
+ end
+ elseif id==math_code then
+ start=end_of_math(start).next
+ else
+ start=start.next
+ end
+ elseif id==whatsit_code then
+ local subtype=start.subtype
+ if subtype==dir_code then
+ local dir=start.dir
+ if dir=="+TRT" or dir=="+TLT" then
+ topstack=topstack+1
+ dirstack[topstack]=dir
+ elseif dir=="-TRT" or dir=="-TLT" then
+ topstack=topstack-1
+ end
+ local newdir=dirstack[topstack]
+ if newdir=="+TRT" then
+ rlmode=-1
+ elseif newdir=="+TLT" then
+ rlmode=1
+ else
+ rlmode=rlparmode
+ end
+ if trace_directions then
+ report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
+ end
+ elseif subtype==localpar_code then
+ local dir=start.dir
+ if dir=="TRT" then
+ rlparmode=-1
+ elseif dir=="TLT" then
+ rlparmode=1
+ else
+ rlparmode=0
+ end
+ rlmode=rlparmode
+ if trace_directions then
+ report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
+ end
+ end
+ start=start.next
+ elseif id==math_code then
+ start=end_of_math(start).next
+ else
+ start=start.next
+ end
+ end
+ end
+ else
+ while start do
+ local id=start.id
+ if id==glyph_code then
+ if start.font==font and start.subtype<256 then
+ local a=start[0]
+ if a then
+ a=(a==attr) and (not attribute or start[a_state]==attribute)
+ else
+ a=not attribute or start[a_state]==attribute
+ end
+ if a then
+ for i=1,ns do
+ local lookupname=subtables[i]
+ local lookupcache=lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch=lookupcache[start.char]
+ if lookupmatch then
+ local ok
+ head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ success=true
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ if start then start=start.next end
+ else
+ start=start.next
+ end
+ else
+ start=start.next
+ end
+ elseif id==whatsit_code then
+ local subtype=start.subtype
+ if subtype==dir_code then
+ local dir=start.dir
+ if dir=="+TRT" or dir=="+TLT" then
+ topstack=topstack+1
+ dirstack[topstack]=dir
+ elseif dir=="-TRT" or dir=="-TLT" then
+ topstack=topstack-1
+ end
+ local newdir=dirstack[topstack]
+ if newdir=="+TRT" then
+ rlmode=-1
+ elseif newdir=="+TLT" then
+ rlmode=1
+ else
+ rlmode=rlparmode
+ end
+ if trace_directions then
+ report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
+ end
+ elseif subtype==localpar_code then
+ local dir=start.dir
+ if dir=="TRT" then
+ rlparmode=-1
+ elseif dir=="TLT" then
+ rlparmode=1
+ else
+ rlparmode=0
+ end
+ rlmode=rlparmode
+ if trace_directions then
+ report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
+ end
+ end
+ start=start.next
+ elseif id==math_code then
+ start=end_of_math(start).next
+ else
+ start=start.next
+ end
+ end
+ end
+ end
+ if success then
+ done=true
+ end
+ if trace_steps then
+ registerstep(head)
+ end
+ end
+ return head,done
+end
+local function generic(lookupdata,lookupname,unicode,lookuphash)
+ local target=lookuphash[lookupname]
+ if target then
+ target[unicode]=lookupdata
+ else
+ lookuphash[lookupname]={ [unicode]=lookupdata }
+ end
+end
+local action={
+ substitution=generic,
+ multiple=generic,
+ alternate=generic,
+ position=generic,
+ ligature=function(lookupdata,lookupname,unicode,lookuphash)
+ local target=lookuphash[lookupname]
+ if not target then
+ target={}
+ lookuphash[lookupname]=target
+ end
+ for i=1,#lookupdata do
+ local li=lookupdata[i]
+ local tu=target[li]
+ if not tu then
+ tu={}
+ target[li]=tu
+ end
+ target=tu
+ end
+ target.ligature=unicode
+ end,
+ pair=function(lookupdata,lookupname,unicode,lookuphash)
+ local target=lookuphash[lookupname]
+ if not target then
+ target={}
+ lookuphash[lookupname]=target
+ end
+ local others=target[unicode]
+ local paired=lookupdata[1]
+ if others then
+ others[paired]=lookupdata
+ else
+ others={ [paired]=lookupdata }
+ target[unicode]=others
+ end
+ end,
+}
+local function prepare_lookups(tfmdata)
+ local rawdata=tfmdata.shared.rawdata
+ local resources=rawdata.resources
+ local lookuphash=resources.lookuphash
+ local anchor_to_lookup=resources.anchor_to_lookup
+ local lookup_to_anchor=resources.lookup_to_anchor
+ local lookuptypes=resources.lookuptypes
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ for unicode,character in next,characters do
+ local description=descriptions[unicode]
+ if description then
+ local lookups=description.slookups
+ if lookups then
+ for lookupname,lookupdata in next,lookups do
+ action[lookuptypes[lookupname]](lookupdata,lookupname,unicode,lookuphash)
+ end
+ end
+ local lookups=description.mlookups
+ if lookups then
+ for lookupname,lookuplist in next,lookups do
+ local lookuptype=lookuptypes[lookupname]
+ for l=1,#lookuplist do
+ local lookupdata=lookuplist[l]
+ action[lookuptype](lookupdata,lookupname,unicode,lookuphash)
+ end
+ end
+ end
+ local list=description.kerns
+ if list then
+ for lookup,krn in next,list do
+ local target=lookuphash[lookup]
+ if target then
+ target[unicode]=krn
+ else
+ lookuphash[lookup]={ [unicode]=krn }
+ end
+ end
+ end
+ local list=description.anchors
+ if list then
+ for typ,anchors in next,list do
+ if typ=="mark" or typ=="cexit" then
+ for name,anchor in next,anchors do
+ local lookups=anchor_to_lookup[name]
+ if lookups then
+ for lookup,_ in next,lookups do
+ local target=lookuphash[lookup]
+ if target then
+ target[unicode]=anchors
+ else
+ lookuphash[lookup]={ [unicode]=anchors }
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
+local function split(replacement,original)
+ local result={}
+ for i=1,#replacement do
+ result[original[i]]=replacement[i]
+ end
+ return result
+end
+local valid={
+ coverage={ chainsub=true,chainpos=true,contextsub=true },
+ reversecoverage={ reversesub=true },
+ glyphs={ chainsub=true,chainpos=true },
+}
+local function prepare_contextchains(tfmdata)
+ local rawdata=tfmdata.shared.rawdata
+ local resources=rawdata.resources
+ local lookuphash=resources.lookuphash
+ local lookups=rawdata.lookups
+ if lookups then
+ for lookupname,lookupdata in next,rawdata.lookups do
+ local lookuptype=lookupdata.type
+ if lookuptype then
+ local rules=lookupdata.rules
+ if rules then
+ local format=lookupdata.format
+ local validformat=valid[format]
+ if not validformat then
+ report_prepare("unsupported format %a",format)
+ elseif not validformat[lookuptype] then
+ report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookupname)
+ else
+ local contexts=lookuphash[lookupname]
+ if not contexts then
+ contexts={}
+ lookuphash[lookupname]=contexts
+ end
+ local t,nt={},0
+ for nofrules=1,#rules do
+ local rule=rules[nofrules]
+ local current=rule.current
+ local before=rule.before
+ local after=rule.after
+ local replacements=rule.replacements
+ local sequence={}
+ local nofsequences=0
+ if before then
+ for n=1,#before do
+ nofsequences=nofsequences+1
+ sequence[nofsequences]=before[n]
+ end
+ end
+ local start=nofsequences+1
+ for n=1,#current do
+ nofsequences=nofsequences+1
+ sequence[nofsequences]=current[n]
+ end
+ local stop=nofsequences
+ if after then
+ for n=1,#after do
+ nofsequences=nofsequences+1
+ sequence[nofsequences]=after[n]
+ end
+ end
+ if sequence[1] then
+ nt=nt+1
+ t[nt]={ nofrules,lookuptype,sequence,start,stop,rule.lookups,replacements }
+ for unic,_ in next,sequence[start] do
+ local cu=contexts[unic]
+ if not cu then
+ contexts[unic]=t
+ end
+ end
+ end
+ end
+ end
+ else
+ end
+ else
+ report_prepare("missing lookuptype for lookupname %a",lookupname)
+ end
+ end
+ end
+end
+local function featuresinitializer(tfmdata,value)
+ if true then
+ local rawdata=tfmdata.shared.rawdata
+ local properties=rawdata.properties
+ if not properties.initialized then
+ local starttime=trace_preparing and os.clock()
+ local resources=rawdata.resources
+ resources.lookuphash=resources.lookuphash or {}
+ prepare_contextchains(tfmdata)
+ prepare_lookups(tfmdata)
+ properties.initialized=true
+ if trace_preparing then
+ report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,tfmdata.properties.fullname)
+ end
+ end
+ end
+end
+registerotffeature {
+ name="features",
+ description="features",
+ default=true,
+ initializers={
+ position=1,
+ node=featuresinitializer,
+ },
+ processors={
+ node=featuresprocessor,
+ }
+}
+otf.handlers=handlers
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-fonts-lua']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+fonts.formats.lua="lua"
+function fonts.readers.lua(specification)
+ local fullname=specification.filename or ""
+ if fullname=="" then
+ local forced=specification.forced or ""
+ if forced~="" then
+ fullname=specification.name.."."..forced
+ else
+ fullname=specification.name
+ end
+ end
+ local fullname=resolvers.findfile(fullname) or ""
+ if fullname~="" then
+ local loader=loadfile(fullname)
+ loader=loader and loader()
+ return loader and loader(specification)
+ end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-def']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local format,gmatch,match,find,lower,gsub=string.format,string.gmatch,string.match,string.find,string.lower,string.gsub
+local tostring,next=tostring,next
+local lpegmatch=lpeg.match
+local allocate=utilities.storage.allocate
+local trace_defining=false trackers .register("fonts.defining",function(v) trace_defining=v end)
+local directive_embedall=false directives.register("fonts.embedall",function(v) directive_embedall=v end)
+trackers.register("fonts.loading","fonts.defining","otf.loading","afm.loading","tfm.loading")
+trackers.register("fonts.all","fonts.*","otf.*","afm.*","tfm.*")
+local report_defining=logs.reporter("fonts","defining")
+local fonts=fonts
+local fontdata=fonts.hashes.identifiers
+local readers=fonts.readers
+local definers=fonts.definers
+local specifiers=fonts.specifiers
+local constructors=fonts.constructors
+local fontgoodies=fonts.goodies
+readers.sequence=allocate { 'otf','ttf','afm','tfm','lua' }
+local variants=allocate()
+specifiers.variants=variants
+definers.methods=definers.methods or {}
+local internalized=allocate()
+local lastdefined=nil
+local loadedfonts=constructors.loadedfonts
+local designsizes=constructors.designsizes
+local resolvefile=fontgoodies and fontgoodies.filenames and fontgoodies.filenames.resolve or function(s) return s end
+local splitter,splitspecifiers=nil,""
+local P,C,S,Cc=lpeg.P,lpeg.C,lpeg.S,lpeg.Cc
+local left=P("(")
+local right=P(")")
+local colon=P(":")
+local space=P(" ")
+definers.defaultlookup="file"
+local prefixpattern=P(false)
+local function addspecifier(symbol)
+ splitspecifiers=splitspecifiers..symbol
+ local method=S(splitspecifiers)
+ local lookup=C(prefixpattern)*colon
+ local sub=left*C(P(1-left-right-method)^1)*right
+ local specification=C(method)*C(P(1)^1)
+ local name=C((1-sub-specification)^1)
+ splitter=P((lookup+Cc(""))*name*(sub+Cc(""))*(specification+Cc("")))
+end
+local function addlookup(str,default)
+ prefixpattern=prefixpattern+P(str)
+end
+definers.addlookup=addlookup
+addlookup("file")
+addlookup("name")
+addlookup("spec")
+local function getspecification(str)
+ return lpegmatch(splitter,str)
+end
+definers.getspecification=getspecification
+function definers.registersplit(symbol,action,verbosename)
+ addspecifier(symbol)
+ variants[symbol]=action
+ if verbosename then
+ variants[verbosename]=action
+ end
+end
+local function makespecification(specification,lookup,name,sub,method,detail,size)
+ size=size or 655360
+ if not lookup or lookup=="" then
+ lookup=definers.defaultlookup
+ end
+ if trace_defining then
+ report_defining("specification %a, lookup %a, name %a, sub %a, method %a, detail %a",
+ specification,lookup,name,sub,method,detail)
+ end
+ local t={
+ lookup=lookup,
+ specification=specification,
+ size=size,
+ name=name,
+ sub=sub,
+ method=method,
+ detail=detail,
+ resolved="",
+ forced="",
+ features={},
+ }
+ return t
+end
+definers.makespecification=makespecification
+function definers.analyze(specification,size)
+ local lookup,name,sub,method,detail=getspecification(specification or "")
+ return makespecification(specification,lookup,name,sub,method,detail,size)
+end
+definers.resolvers=definers.resolvers or {}
+local resolvers=definers.resolvers
+function resolvers.file(specification)
+ local name=resolvefile(specification.name)
+ local suffix=file.suffix(name)
+ if fonts.formats[suffix] then
+ specification.forced=suffix
+ specification.name=file.removesuffix(name)
+ else
+ specification.name=name
+ end
+end
+function resolvers.name(specification)
+ local resolve=fonts.names.resolve
+ if resolve then
+ local resolved,sub=resolve(specification.name,specification.sub,specification)
+ if resolved then
+ specification.resolved=resolved
+ specification.sub=sub
+ local suffix=file.suffix(resolved)
+ if fonts.formats[suffix] then
+ specification.forced=suffix
+ specification.name=file.removesuffix(resolved)
+ else
+ specification.name=resolved
+ end
+ end
+ else
+ resolvers.file(specification)
+ end
+end
+function resolvers.spec(specification)
+ local resolvespec=fonts.names.resolvespec
+ if resolvespec then
+ local resolved,sub=resolvespec(specification.name,specification.sub,specification)
+ if resolved then
+ specification.resolved=resolved
+ specification.sub=sub
+ specification.forced=file.suffix(resolved)
+ specification.name=file.removesuffix(resolved)
+ end
+ else
+ resolvers.name(specification)
+ end
+end
+function definers.resolve(specification)
+ if not specification.resolved or specification.resolved=="" then
+ local r=resolvers[specification.lookup]
+ if r then
+ r(specification)
+ end
+ end
+ if specification.forced=="" then
+ specification.forced=nil
+ else
+ specification.forced=specification.forced
+ end
+ specification.hash=lower(specification.name..' @ '..constructors.hashfeatures(specification))
+ if specification.sub and specification.sub~="" then
+ specification.hash=specification.sub..' @ '..specification.hash
+ end
+ return specification
+end
+function definers.applypostprocessors(tfmdata)
+ local postprocessors=tfmdata.postprocessors
+ if postprocessors then
+ local properties=tfmdata.properties
+ for i=1,#postprocessors do
+ local extrahash=postprocessors[i](tfmdata)
+ if type(extrahash)=="string" and extrahash~="" then
+ extrahash=gsub(lower(extrahash),"[^a-z]","-")
+ properties.fullname=format("%s-%s",properties.fullname,extrahash)
+ end
+ end
+ end
+ return tfmdata
+end
+local function checkembedding(tfmdata)
+ local properties=tfmdata.properties
+ local embedding
+ if directive_embedall then
+ embedding="full"
+ elseif properties and properties.filename and constructors.dontembed[properties.filename] then
+ embedding="no"
+ else
+ embedding="subset"
+ end
+ if properties then
+ properties.embedding=embedding
+ else
+ tfmdata.properties={ embedding=embedding }
+ end
+ tfmdata.embedding=embedding
+end
+function definers.loadfont(specification)
+ local hash=constructors.hashinstance(specification)
+ local tfmdata=loadedfonts[hash]
+ if not tfmdata then
+ local forced=specification.forced or ""
+ if forced~="" then
+ local reader=readers[lower(forced)]
+ tfmdata=reader and reader(specification)
+ if not tfmdata then
+ report_defining("forced type %a of %a not found",forced,specification.name)
+ end
+ else
+ local sequence=readers.sequence
+ for s=1,#sequence do
+ local reader=sequence[s]
+ if readers[reader] then
+ if trace_defining then
+ report_defining("trying (reader sequence driven) type %a for %a with file %a",reader,specification.name,specification.filename)
+ end
+ tfmdata=readers[reader](specification)
+ if tfmdata then
+ break
+ else
+ specification.filename=nil
+ end
+ end
+ end
+ end
+ if tfmdata then
+ tfmdata=definers.applypostprocessors(tfmdata)
+ checkembedding(tfmdata)
+ loadedfonts[hash]=tfmdata
+ designsizes[specification.hash]=tfmdata.parameters.designsize
+ end
+ end
+ if not tfmdata then
+ report_defining("font with asked name %a is not found using lookup %a",specification.name,specification.lookup)
+ end
+ return tfmdata
+end
+function constructors.checkvirtualids()
+end
+function constructors.readanddefine(name,size)
+ local specification=definers.analyze(name,size)
+ local method=specification.method
+ if method and variants[method] then
+ specification=variants[method](specification)
+ end
+ specification=definers.resolve(specification)
+ local hash=constructors.hashinstance(specification)
+ local id=definers.registered(hash)
+ if not id then
+ local tfmdata=definers.loadfont(specification)
+ if tfmdata then
+ tfmdata.properties.hash=hash
+ constructors.checkvirtualids(tfmdata)
+ id=font.define(tfmdata)
+ definers.register(tfmdata,id)
+ else
+ id=0
+ end
+ end
+ return fontdata[id],id
+end
+function definers.current()
+ return lastdefined
+end
+function definers.registered(hash)
+ local id=internalized[hash]
+ return id,id and fontdata[id]
+end
+function definers.register(tfmdata,id)
+ if tfmdata and id then
+ local hash=tfmdata.properties.hash
+ if not hash then
+ report_defining("registering font, id %a, name %a, invalid hash",id,tfmdata.properties.filename or "?")
+ elseif not internalized[hash] then
+ internalized[hash]=id
+ if trace_defining then
+ report_defining("registering font, id %s, hash %a",id,hash)
+ end
+ fontdata[id]=tfmdata
+ end
+ end
+end
+function definers.read(specification,size,id)
+ statistics.starttiming(fonts)
+ if type(specification)=="string" then
+ specification=definers.analyze(specification,size)
+ end
+ local method=specification.method
+ if method and variants[method] then
+ specification=variants[method](specification)
+ end
+ specification=definers.resolve(specification)
+ local hash=constructors.hashinstance(specification)
+ local tfmdata=definers.registered(hash)
+ if tfmdata then
+ if trace_defining then
+ report_defining("already hashed: %s",hash)
+ end
+ else
+ tfmdata=definers.loadfont(specification)
+ if tfmdata then
+ if trace_defining then
+ report_defining("loaded and hashed: %s",hash)
+ end
+ tfmdata.properties.hash=hash
+ if id then
+ definers.register(tfmdata,id)
+ end
+ else
+ if trace_defining then
+ report_defining("not loaded and hashed: %s",hash)
+ end
+ end
+ end
+ lastdefined=tfmdata or id
+ if not tfmdata then
+ report_defining("unknown font %a, loading aborted",specification.name)
+ elseif trace_defining and type(tfmdata)=="table" then
+ local properties=tfmdata.properties or {}
+ local parameters=tfmdata.parameters or {}
+ report_defining("using %s font with id %a, name %a, size %a, bytes %a, encoding %a, fullname %a, filename %a",
+ properties.format,id,properties.name,parameters.size,properties.encodingbytes,
+ properties.encodingname,properties.fullname,file.basename(properties.filename))
+ end
+ statistics.stoptiming(fonts)
+ return tfmdata
+end
+function font.getfont(id)
+ return fontdata[id]
+end
+callbacks.register('define_font',definers.read,"definition of fonts (tfmdata preparation)")
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-font-def']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+fonts.constructors.namemode="specification"
+function fonts.definers.getspecification(str)
+ return "",str,"",":",str
+end
+local list={}
+local function issome () list.lookup='name' end
+local function isfile () list.lookup='file' end
+local function isname () list.lookup='name' end
+local function thename(s) list.name=s end
+local function issub (v) list.sub=v end
+local function iscrap (s) list.crap=string.lower(s) end
+local function iskey (k,v) list[k]=v end
+local function istrue (s) list[s]=true end
+local function isfalse(s) list[s]=false end
+local P,S,R,C=lpeg.P,lpeg.S,lpeg.R,lpeg.C
+local spaces=P(" ")^0
+local namespec=(1-S("/:("))^0
+local crapspec=spaces*P("/")*(((1-P(":"))^0)/iscrap)*spaces
+local filename_1=P("file:")/isfile*(namespec/thename)
+local filename_2=P("[")*P(true)/isname*(((1-P("]"))^0)/thename)*P("]")
+local fontname_1=P("name:")/isname*(namespec/thename)
+local fontname_2=P(true)/issome*(namespec/thename)
+local sometext=(R("az","AZ","09")+S("+-."))^1
+local truevalue=P("+")*spaces*(sometext/istrue)
+local falsevalue=P("-")*spaces*(sometext/isfalse)
+local keyvalue=(C(sometext)*spaces*P("=")*spaces*C(sometext))/iskey
+local somevalue=sometext/istrue
+local subvalue=P("(")*(C(P(1-S("()"))^1)/issub)*P(")")
+local option=spaces*(keyvalue+falsevalue+truevalue+somevalue)*spaces
+local options=P(":")*spaces*(P(";")^0*option)^0
+local pattern=(filename_1+filename_2+fontname_1+fontname_2)*subvalue^0*crapspec^0*options^0
+local function colonized(specification)
+ list={}
+ lpeg.match(pattern,specification.specification)
+ list.crap=nil
+ if list.name then
+ specification.name=list.name
+ list.name=nil
+ end
+ if list.lookup then
+ specification.lookup=list.lookup
+ list.lookup=nil
+ end
+ if list.sub then
+ specification.sub=list.sub
+ list.sub=nil
+ end
+ specification.features.normal=fonts.handlers.otf.features.normalize(list)
+ return specification
+end
+fonts.definers.registersplit(":",colonized,"cryptic")
+fonts.definers.registersplit("",colonized,"more cryptic")
+function fonts.definers.applypostprocessors(tfmdata)
+ local postprocessors=tfmdata.postprocessors
+ if postprocessors then
+ for i=1,#postprocessors do
+ local extrahash=postprocessors[i](tfmdata)
+ if type(extrahash)=="string" and extrahash~="" then
+ extrahash=string.gsub(lower(extrahash),"[^a-z]","-")
+ tfmdata.properties.fullname=format("%s-%s",tfmdata.properties.fullname,extrahash)
+ end
+ end
+ end
+ return tfmdata
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-fonts-ext']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+local otffeatures=fonts.constructors.newfeatures("otf")
+local function initializeitlc(tfmdata,value)
+ if value then
+ local parameters=tfmdata.parameters
+ local italicangle=parameters.italicangle
+ if italicangle and italicangle~=0 then
+ local properties=tfmdata.properties
+ local factor=tonumber(value) or 1
+ properties.hasitalics=true
+ properties.autoitalicamount=factor*(parameters.uwidth or 40)/2
+ end
+ end
+end
+otffeatures.register {
+ name="itlc",
+ description="italic correction",
+ initializers={
+ base=initializeitlc,
+ node=initializeitlc,
+ }
+}
+local function initializeslant(tfmdata,value)
+ value=tonumber(value)
+ if not value then
+ value=0
+ elseif value>1 then
+ value=1
+ elseif value<-1 then
+ value=-1
+ end
+ tfmdata.parameters.slantfactor=value
+end
+otffeatures.register {
+ name="slant",
+ description="slant glyphs",
+ initializers={
+ base=initializeslant,
+ node=initializeslant,
+ }
+}
+local function initializeextend(tfmdata,value)
+ value=tonumber(value)
+ if not value then
+ value=0
+ elseif value>10 then
+ value=10
+ elseif value<-10 then
+ value=-10
+ end
+ tfmdata.parameters.extendfactor=value
+end
+otffeatures.register {
+ name="extend",
+ description="scale glyphs horizontally",
+ initializers={
+ base=initializeextend,
+ node=initializeextend,
+ }
+}
+fonts.protrusions=fonts.protrusions or {}
+fonts.protrusions.setups=fonts.protrusions.setups or {}
+local setups=fonts.protrusions.setups
+local function initializeprotrusion(tfmdata,value)
+ if value then
+ local setup=setups[value]
+ if setup then
+ local factor,left,right=setup.factor or 1,setup.left or 1,setup.right or 1
+ local emwidth=tfmdata.parameters.quad
+ tfmdata.parameters.protrusion={
+ auto=true,
+ }
+ for i,chr in next,tfmdata.characters do
+ local v,pl,pr=setup[i],nil,nil
+ if v then
+ pl,pr=v[1],v[2]
+ end
+ if pl and pl~=0 then chr.left_protruding=left*pl*factor end
+ if pr and pr~=0 then chr.right_protruding=right*pr*factor end
+ end
+ end
+ end
+end
+otffeatures.register {
+ name="protrusion",
+ description="shift characters into the left and or right margin",
+ initializers={
+ base=initializeprotrusion,
+ node=initializeprotrusion,
+ }
+}
+fonts.expansions=fonts.expansions or {}
+fonts.expansions.setups=fonts.expansions.setups or {}
+local setups=fonts.expansions.setups
+local function initializeexpansion(tfmdata,value)
+ if value then
+ local setup=setups[value]
+ if setup then
+ local factor=setup.factor or 1
+ tfmdata.parameters.expansion={
+ stretch=10*(setup.stretch or 0),
+ shrink=10*(setup.shrink or 0),
+ step=10*(setup.step or 0),
+ auto=true,
+ }
+ for i,chr in next,tfmdata.characters do
+ local v=setup[i]
+ if v and v~=0 then
+ chr.expansion_factor=v*factor
+ else
+ chr.expansion_factor=factor
+ end
+ end
+ end
+ end
+end
+otffeatures.register {
+ name="expansion",
+ description="apply hz optimization",
+ initializers={
+ base=initializeexpansion,
+ node=initializeexpansion,
+ }
+}
+function fonts.loggers.onetimemessage() end
+local byte=string.byte
+fonts.expansions.setups['default']={
+ stretch=2,shrink=2,step=.5,factor=1,
+ [byte('A')]=0.5,[byte('B')]=0.7,[byte('C')]=0.7,[byte('D')]=0.5,[byte('E')]=0.7,
+ [byte('F')]=0.7,[byte('G')]=0.5,[byte('H')]=0.7,[byte('K')]=0.7,[byte('M')]=0.7,
+ [byte('N')]=0.7,[byte('O')]=0.5,[byte('P')]=0.7,[byte('Q')]=0.5,[byte('R')]=0.7,
+ [byte('S')]=0.7,[byte('U')]=0.7,[byte('W')]=0.7,[byte('Z')]=0.7,
+ [byte('a')]=0.7,[byte('b')]=0.7,[byte('c')]=0.7,[byte('d')]=0.7,[byte('e')]=0.7,
+ [byte('g')]=0.7,[byte('h')]=0.7,[byte('k')]=0.7,[byte('m')]=0.7,[byte('n')]=0.7,
+ [byte('o')]=0.7,[byte('p')]=0.7,[byte('q')]=0.7,[byte('s')]=0.7,[byte('u')]=0.7,
+ [byte('w')]=0.7,[byte('z')]=0.7,
+ [byte('2')]=0.7,[byte('3')]=0.7,[byte('6')]=0.7,[byte('8')]=0.7,[byte('9')]=0.7,
+}
+fonts.protrusions.setups['default']={
+ factor=1,left=1,right=1,
+ [0x002C]={ 0,1 },
+ [0x002E]={ 0,1 },
+ [0x003A]={ 0,1 },
+ [0x003B]={ 0,1 },
+ [0x002D]={ 0,1 },
+ [0x2013]={ 0,0.50 },
+ [0x2014]={ 0,0.33 },
+ [0x3001]={ 0,1 },
+ [0x3002]={ 0,1 },
+ [0x060C]={ 0,1 },
+ [0x061B]={ 0,1 },
+ [0x06D4]={ 0,1 },
+}
+fonts.handlers.otf.features.normalize=function(t)
+ if t.rand then
+ t.rand="random"
+ end
+ return t
+end
+function fonts.helpers.nametoslot(name)
+ local t=type(name)
+ if t=="string" then
+ local tfmdata=fonts.hashes.identifiers[currentfont()]
+ local shared=tfmdata and tfmdata.shared
+ local fntdata=shared and shared.rawdata
+ return fntdata and fntdata.resources.unicodes[name]
+ elseif t=="number" then
+ return n
+ end
+end
+fonts.encodings=fonts.encodings or {}
+local reencodings={}
+fonts.encodings.reencodings=reencodings
+local function specialreencode(tfmdata,value)
+ local encoding=value and reencodings[value]
+ if encoding then
+ local temp={}
+ local char=tfmdata.characters
+ for k,v in next,encoding do
+ temp[k]=char[v]
+ end
+ for k,v in next,temp do
+ char[k]=temp[k]
+ end
+ return string.format("reencoded:%s",value)
+ end
+end
+local function reencode(tfmdata,value)
+ tfmdata.postprocessors=tfmdata.postprocessors or {}
+ table.insert(tfmdata.postprocessors,
+ function(tfmdata)
+ return specialreencode(tfmdata,value)
+ end
+ )
+end
+otffeatures.register {
+ name="reencode",
+ description="reencode characters",
+ manipulators={
+ base=reencode,
+ node=reencode,
+ }
+}
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-fonts-cbk']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+local nodes=nodes
+local traverse_id=node.traverse_id
+local glyph_code=nodes.nodecodes.glyph
+function nodes.handlers.characters(head)
+ local fontdata=fonts.hashes.identifiers
+ if fontdata then
+ local usedfonts,done,prevfont={},false,nil
+ for n in traverse_id(glyph_code,head) do
+ local font=n.font
+ if font~=prevfont then
+ prevfont=font
+ local used=usedfonts[font]
+ if not used then
+ local tfmdata=fontdata[font]
+ if tfmdata then
+ local shared=tfmdata.shared
+ if shared then
+ local processors=shared.processes
+ if processors and #processors>0 then
+ usedfonts[font]=processors
+ done=true
+ end
+ end
+ end
+ end
+ end
+ end
+ if done then
+ for font,processors in next,usedfonts do
+ for i=1,#processors do
+ local h,d=processors[i](head,font,0)
+ head,done=h or head,done or d
+ end
+ end
+ end
+ return head,true
+ else
+ return head,false
+ end
+end
+function nodes.simple_font_handler(head)
+ head=nodes.handlers.characters(head)
+ nodes.injections.handler(head)
+ nodes.handlers.protectglyphs(head)
+ head=node.ligaturing(head)
+ head=node.kerning(head)
+ return head
+end
+
+end -- closure
diff --git a/luaotfload-override.lua b/luaotfload-override.lua
new file mode 100644
index 0000000..94f2376
--- /dev/null
+++ b/luaotfload-override.lua
@@ -0,0 +1,81 @@
+if not modules then modules = { } end modules ['luat-ovr'] = {
+ version = 1.001,
+ comment = "companion to luatex-*.tex",
+ author = "Khaled Hosny and Elie Roux",
+ copyright = "Luaotfload Development Team",
+ license = "GNU GPL v2"
+}
+
+
+local module_name = "luaotfload"
+
+local texiowrite_nl = texio.write_nl
+local stringformat = string.format
+local tableconcat = table.concat
+local type = type
+
+--[[doc--
+We recreate the verbosity levels previously implemented in font-nms:
+
+ ==========================================================
+ lvl arg trace_loading trace_search suppress_output
+ ----------------------------------------------------------
+ (0) -> -q ⊥ ⊥ ⊤
+ (1) -> ∅ ⊥ ⊥ ⊥
+ (2) -> -v ⊤ ⊥ ⊥
+ (>2) -> -vv ⊤ ⊤ ⊥
+ ==========================================================
+
+--doc]]--
+local loglevel = 1 --- default
+local logout = "log"
+
+local set_loglevel = function (n)
+ if type(n) == "number" then
+ loglevel = n
+ end
+end
+logs.set_loglevel = set_loglevel
+logs.set_log_level = set_loglevel --- accomodating lazy typists
+
+local set_logout = function (s)
+ if s == "stdout" then
+ logout = "term"
+ --else --- remains “log”
+ end
+end
+
+logs.set_logout = set_logout
+
+local log = function (category, fmt, ...)
+ local res = { module_name, " |" }
+ if category then res[#res+1] = " " .. category end
+ if fmt then res[#res+1] = ": " .. stringformat(fmt, ...) end
+ texiowrite_nl(logout, tableconcat(res))
+end
+
+local stdout = function (category, fmt, ...)
+ local res = { module_name, " |" }
+ if category then res[#res+1] = " " .. category end
+ if fmt then res[#res+1] = ": " .. stringformat(fmt, ...) end
+ texiowrite_nl(tableconcat(res))
+end
+
+local level_ids = { common = 0, loading = 1, search = 2 }
+
+logs.names_report = function (mode, lvl, ...)
+ if type(lvl) == "string" then
+ lvl = level_ids[lvl]
+ end
+ if not lvl then lvl = 0 end
+
+ if loglevel > lvl then
+ if mode == "log" then
+ log (...)
+ else
+ stdout (...)
+ end
+ end
+end
+
+-- vim:tw=71:sw=4:ts=4:expandtab
diff --git a/luaotfload.dtx b/luaotfload.dtx
index 3538075..dbc822f 100644
--- a/luaotfload.dtx
+++ b/luaotfload.dtx
@@ -1,6 +1,6 @@
% \iffalse meta-comment
%
-% Copyright (C) 2009-2011 by Elie Roux <elie.roux@telecom-bretagne.eu>
+% Copyright (C) 2009-2013 by Elie Roux <elie.roux@telecom-bretagne.eu>
% and Khaled Hosny <khaledhosny@eglug.org>
% (Support: <lualatex-dev@tug.org>.)
%
@@ -14,7 +14,7 @@
% tex luaotfload.dtx
%
% Documentation:
-% pdflatex luaotfload.dtx
+% lualatex luaotfload.dtx
%
% The class ltxdoc loads the configuration file ltxdoc.cfg
% if available. Here you can specify further options, e.g.
@@ -36,7 +36,7 @@
\input docstrip.tex
\Msg{************************************************************************}
\Msg{* Installation}
-\Msg{* Package: luaotfload v2.0 OpenType layout system}
+\Msg{* Package: luaotfload v2.2 OpenType layout system}
\Msg{************************************************************************}
\keepsilent
@@ -47,7 +47,7 @@
\preamble
This is a generated file.
-Copyright (C) 2009-2011 by by Elie Roux <elie.roux@telecom-bretagne.eu>
+Copyright (C) 2009-2013 by by Elie Roux <elie.roux@telecom-bretagne.eu>
and Khaled Hosny <khaledhosny@eglug.org>
(Support: <lualatex-dev@tug.org>.)
@@ -61,13 +61,12 @@ and the derived files
\let\MetaPrefix\DoubleperCent
-
\generate{%
- \usedir{tex/luatex/luaodfload}%
+ \usedir{tex/luatex/luaotfload}%
\file{luaotfload.sty}{\from{luaotfload.dtx}{package}}%
}
-% The following hacks are to generate a lua file with lua comments starting by
+% The following hacks are to generate a lua file with lua comments starting with
% -- instead of %%
\def\MetaPrefix{-- }
@@ -104,38 +103,63 @@ and the derived files
%<*driver>
\NeedsTeXFormat{LaTeX2e}
\ProvidesFile{luaotfload.drv}%
- [2011/10/06 v2.0 OpenType layout system]%
+ [2013/04/16 v2.2 OpenType layout system]%
\documentclass{ltxdoc}
-\usepackage{metalogo,multicol,mdwlist,fancyvrb,xcolor,xspace}
+\usepackage{metalogo,multicol,mdwlist,fancyvrb,xspace}
+\usepackage[x11names]{xcolor}
+%
+\def\primarycolor{DodgerBlue4} %%-> rgb 16 78 139 | #104e8b
+\def\secondarycolor{Goldenrod4} %%-> rgb 139 105 200 | #8b6914
+%
\usepackage[
- bookmarks=true,
- colorlinks=true,
- linkcolor=niceblue,
-% urlcolor=niceblue,
- citecolor=niceblue,
- pdftitle={The luaotfload package},
- pdfsubject={OpenType layout system for Plain TeX and LaTeX},
- pdfauthor={Elie Roux & Khaled Hosny},
+ bookmarks=true,
+ colorlinks=true,
+ linkcolor=\primarycolor,
+ urlcolor=\secondarycolor,
+ citecolor=\primarycolor,
+ pdftitle={The luaotfload package},
+ pdfsubject={OpenType layout system for Plain TeX and LaTeX},
+ pdfauthor={Elie Roux & Khaled Hosny},
pdfkeywords={luatex, lualatex, unicode, opentype}
- ]{hyperref}
-
+]{hyperref}
\usepackage{fontspec}
-\usepackage{unicode-math}
-\setmainfont[Ligatures=TeX]{Linux Libertine O}
-\setsansfont[Ligatures=TeX]{Linux Biolinum O}
-\setmathfont{XITS Math}
+%usepackage{unicode-math}%% broken
+\setmainfont[Numbers=OldStyle,Ligatures=TeX]{Linux Libertine O}
+\setmonofont[Ligatures=TeX,Scale=MatchLowercase]{Liberation Mono}
+%setsansfont[Ligatures=TeX]{Linux Biolinum O}
+\setsansfont[Ligatures=TeX,Scale=MatchLowercase]{Iwona Medium}
+%setmathfont{XITS Math}
+
+\newcommand\TEX {\TeX\xspace}
+\newcommand\LUA {Lua\xspace}
+\newcommand\PDFTEX {pdf\TeX\xspace}
+\newcommand\LUATEX {Lua\TeX\xspace}
+\newcommand\XETEX {\XeTeX\xspace}
+\newcommand\LATEX {\LaTeX\xspace}
+\newcommand\CONTEXT {Con\TeX t\xspace}
+\newcommand\OpenType{\identifier{Open\kern-.25ex Type}\xspace}
-\definecolor{niceblue}{rgb}{0.4,0.6,1.000}
+\def\definehighlight[#1][#2]%
+ {\ifcsname #1\endcsname\else
+ \expandafter\def\csname #1\endcsname%
+ {\bgroup#2\csname #1_indeed\endcsname}
+ \expandafter\def\csname #1_indeed\endcsname##1%
+ {##1\egroup}%
+ \fi}
-\newcommand\tex {\TeX\xspace}
-\newcommand\pdftex {PDF\TeX\xspace}
-\newcommand\luatex {Lua\TeX\xspace}
-\newcommand\xetex {\XeTeX\xspace}
-\newcommand\latex {\LaTeX\xspace}
-\newcommand\context{Con\TeX t\xspace}
+\def\restoreunderscore{\catcode`\_=12\relax}
+
+\definehighlight [fileent][\ttfamily\restoreunderscore] %% files, dirs
+\definehighlight [texmacro][\sffamily\itshape\textbackslash] %% cs
+\definehighlight[luafunction][\sffamily\itshape\restoreunderscore] %% lua identifiers
+\definehighlight [identifier][\sffamily] %% names
+\definehighlight [abbrev][\rmfamily\scshape] %% acronyms
+\definehighlight [emphasis][\rmfamily\slshape] %% level 1 emph
\newcommand*\email[1]{\href{mailto:#1}{#1}}
+\renewcommand\partname{Part}%% gets rid of the stupid “file” heading
+
\VerbatimFootnotes
\begin{document}
\DocInput{luaotfload.dtx}%
@@ -163,611 +187,1334 @@ and the derived files
%
% \GetFileInfo{luaotfload.drv}
%
-% \title{The \textsf{luaotfload} package}
-% \date{2011/10/06 v2.0}
+% \title{The \identifier{luaotfload} package}
+% \date{2013/04/16 v2.2}
% \author{Elie Roux and Khaled Hosny\\
% Support: \email{lualatex-dev@tug.org}}
%
% \maketitle
%
% \begin{abstract}
-% This package is an adaptation of the \context font loading system, providing
-% the ability to load \textsf{OpenType} fonts with extended font loading syntax
-% supporting a large selection of OpenType font features.
+% This package is an adaptation of the \CONTEXT font loading system.
+% It allows for loading \OpenType fonts with an extended syntax and adds
+% support for a variety of font features.
% \end{abstract}
%
% \tableofcontents
%
+% \part{Package Description}
+%
% \section{Introduction}
%
-% Font management and installation has always been painful with \tex. A lot of
-% files are needed for one font (tfm, pfb, map, fd, vf), and as \tex is 8-bit
-% each font is limited to 256 characters. But the font world has evolved since
-% \tex, and new font technologies have appeared, most notably the so called
-% \emph{smart font} technologies like \textsf{OpenType} fonts. These fonts can
-% contain a lot of characters, and additional functionalities like ligatures,
-% old-style numbers, small capitals, etc., and support more complex writing
-% systems like Arabic and Indic\footnote{Unfortunately, \textsf{luaotfload}
-% doesn't support Indic scripts right now.} scripts. They are widely deployed
-% and available for all modern operating systems and are becoming the de facto
-% standard fonts for advanced text layout. Until now the only way to use them
-% directly in the \tex world was by using them with \xetex.
-%
-% Unlike \xetex, \luatex does not provide direct support for using these fonts
-% by default, but it provides a way to hook Lua code in some points of the \tex
-% processing; for instance, we can improve the font loading system, and text
-% procession, which what this package is about.
-%
-% \section{Loading fonts}
-%
-% \textsf{luaotfload} supports an extended font loading syntax which looks
-% like:
+% Font management and installation has always been painful with \TEX. A lot of
+% files are needed for one font (\abbrev{tfm}, \abbrev{pfb}, \abbrev{map},
+% \abbrev{fd}, \abbrev{vf}), and due to the 8-Bit encoding each font is limited
+% to 256 characters.
+% But the font world has evolved since the original
+% \TEX, and new typographic systems have appeared, most notably the so
+% called \emphasis{smart font} technologies like \OpenType
+% fonts (\abbrev{otf}).
+% These fonts can contain many more characters than \TEX fonts, as well as additional
+% functionality like ligatures, old-style numbers, small capitals,
+% etc., and support more complex writing systems like Arabic and
+% Indic\footnote{%
+% Unfortunately, \identifier{luaotfload} doesn't support Indic
+% scripts right now.
+% Assistance in implementing the prerequisites is greatly
+% appreciated.
+% }
+% scripts.
+% \OpenType fonts are widely deployed and available for all
+% modern operating systems.
+% As of 2013 they have become the de facto standard for advanced text
+% layout.
+% However, until recently the only way to use them directly in the \TEX
+% world was with the \XETEX engine.
+%
+% Unlike \XETEX, \LUATEX has no built-in support for
+% \OpenType or technologies other than the original \TEX fonts.
+% Instead, it provides hooks for executing \LUA code during the \TEX run
+% that allow implementing extensions for loading fonts and manipulating
+% how input text is processed without modifying the underlying engine.
+% This is where \identifier{luaotfload} comes into play:
+% Based on code from \CONTEXT, it extends \LUATEX with functionality necessary
+% for handling \OpenType fonts.
+% Additionally, it provides means for accessing fonts known to the operating
+% system conveniently by indexing the metadata.
+%
+% \section{Loading Fonts}
+%
+% \identifier{luaotfload} supports an extended font loading syntax:
%
% \begin{center}
-% |\font\foo={|\meta{prefix}|:|\meta{font name}|:|\meta{font features}|}| \meta{\tex font features}
+% |\font\foo={|%
+% \meta{prefix}|:|%
+% \meta{font name}|:|%
+% \meta{font features}|}|%
+% \meta{\TEX font features}
% \end{center}
%
% \noindent
-% The curly brackets are optional and are used for escaping spaces in font
-% names (double quotes can also used for the same purpose).
+% The curly brackets are optional and escape the spaces in the enclosed
+% font name (alternatively, double quotes serve the same purpose).
+% The individual parts of the syntax are:
%
% \paragraph{Prefix}
%
-% The \meta{prefix} can be either |file:| or |name:|, which specify whether to
-% use a select the font from its filename or font name, respectively. If no
-% prefix is specified |name:| is assumed.
+% The \meta{prefix} is either |file:| or |name:|.
+% It determines whether the font loader should interpret the request as a
+% file name or font name, respectively, which again influences how it
+% will attempt to locate the font.
+% The prefix can be omitted, in which case |name:| is assumed.
+%
+%% \iffalse%% how am i supposed to friggin comment stuff in a dtx???
+%% TODO
+%% it would appear that the next paragraph is incorrect; I get
+%% name: lookups regardless unless the font file is actually
+%% in CWD
+%% \fi
+%% For compatibility with \XETEX, surrounding the \meta{font name} with
+%% square brackets is synonymous to using the |file:| prefix.
+%
+% In order for fonts installed both in system locations and in your
+% \fileent{texmf} to be accessible by font name, \identifier{luaotfload} must
+% first collect the metadata included in the files.
+% Please refer to section ~\ref{sec:fontdb} below for instructions on how to
+% create the database.
%
-% For compatibility with \xetex, surrounding the \meta{font name} with square
-% brackets is synonymous to using the |file:| prefix.
+% \paragraph{Font name}
%
-% Accessing fonts by fontname allows loading system installed fonts as well as
-% \textsc{texmf} ones, and requires a font names database; see
-% Section~\ref{sec:fontdb} for more information.
+% The \meta{font name} can be either a font filename or actual font
+% name based on the \meta{prefix} as mentioned above.
%
-% \paragraph{Font name}
+% A filename request may optionally include the absolute path to the font file,
+% allowing for fonts outside the standard locations to be loaded as well.
+% If no path is specified, then \identifier{kpathsea} is used to locate the
+% font (which will typically be in the \fileent{texmf} tree or the
+% current directory).
%
-% The \meta{font name} can be either a font filename or actual font name based
-% on the \meta{prefix} as mentioned above.
+% \subparagraph{Examples for loading by file name}
%
-% Fonts loaded by filename may either include their absolute path in the
-% filesystem or consist of just the filename with a path. If no path is
-% specified then \textsf{kpathsea} is used to locate the font (which will
-% typically be in the \textsc{texmf} tree or the current directory).
+% For example, conventional \abbrev{type1} font can be loaded with a \verb|file:|
+% request like so:
%
-% For example,
% \begin{quote}
-% \begin{verbatim}
-% \font\1={file:ec-lmr10} at 10pt
-% \font\2={/Users/Shared/Fonts/aldus.otf} at 11pt
-% \font\3={name:TeX Gyre Pagella} at 9pt
-% \end{verbatim}
+% \begin{verbatim}
+% \font\lmromanten={file:ec-lmr10} at 10pt
+% \end{verbatim}
% \end{quote}
%
+% The \OpenType version of Janusz Nowacki’s font \emphasis{Antykwa
+% Półtawskiego} (in \TEX Live) in its condensed variant can be loaded as
+% follows:
+%
+% \begin{quote}
+% \begin{verbatim}
+% \font\apcregular=file:antpoltltcond-regular.otf at 42pt
+% \end{verbatim}
+% \end{quote}
+%
+% The next example shows how to load the \emphasis{Porson} font digitized by
+% the Greek Font Society using \XETEX-style syntax and an absolute path from a
+% non-standard directory:
+%
+% \begin{quote}
+% \begin{verbatim}
+% \font\gfsporson="[/tmp/GFSPorson.otf]" at 12pt
+% \end{verbatim}
+% \end{quote}
+%
+% \subparagraph{Examples for loading by font name}
+%
+% The \verb|name:| lookup does not depend on cryptic filenames:
+%
+% \begin{quote}
+% \begin{verbatim}
+% \font\pagellaregular={name:TeX Gyre Pagella} at 9pt
+% \end{verbatim}
+% \end{quote}
+%
+% A bit more specific but essentially the same lookup would be:
+%
+% \begin{quote}
+% \begin{verbatim}
+% \font\pagellaregular={name:TeX Gyre Pagella Regular} at 9pt
+% \end{verbatim}
+% \end{quote}
+%
+% Which fits nicely with the whole set:
+%
+% \begin{quote}
+% \begin{verbatim}
+% \font\pagellaregular ={name:TeX Gyre Pagella Regular} at 9pt
+% \font\pagellaitalic ={name:TeX Gyre Pagella Italic} at 9pt
+% \font\pagellabold ={name:TeX Gyre Pagella Bold} at 9pt
+% \font\pagellabolditalic={name:TeX Gyre Pagella Bolditalic} at 9pt
+%
+% {\pagellaregular foo bar baz\endgraf}
+% {\pagellaitalic foo bar baz\endgraf}
+% {\pagellabold foo bar baz\endgraf}
+% {\pagellabolditalic foo bar baz\endgraf}
+%
+% ...
+% \end{verbatim}
+% \end{quote}
%
% \paragraph{Font features}
%
% \meta{font features} is semicolon-separated list of feature
-% tags\footnote{\url{http://www.microsoft.com/typography/otspec/featurelist.htm}}
-% and font options. Font features are prepended with a |+| to turn them on and
-% a |-| to turn them off, alternatively you can pass |true| or |false| value to
-% the feature:
-%
-% |\font\test=Latin Modern Roman:+clig;-kern|
+% tags\footnote{%
+% Cf. \url{http://www.microsoft.com/typography/otspec/featurelist.htm}.
+% }
+% and font options.
+% Prepending a font feature with a |+| (plus sign) enables it, whereas
+% a |-| (minus) disables it. For instance, the request
%
-% \noindent or:
+% \begin{quote}
+% \begin{verbatim}
+% \font\test=LatinModernRoman:+clig;-kern
+% \end{verbatim}
+% \end{quote}
%
-% |\font\test=Latin Modern Roman:clig=true;kern=false|
+% \noindent activates contextual ligatures (|clig|) and disables
+% kerning (|kern|).
+% Alternatively the options |true| or |false| can be passed to
+% the feature in a key/value expression.
+% The following request has the same meaning as the last one:
%
-% \noindent For alternate substation features you can pass the index of the
-% variant you want (starting from 1) or |random| to randomly select a variant
-% each time an affected glyph is shown, e.g.:
+% \begin{quote}
+% \begin{verbatim}
+% \font\test=LatinModernRoman:clig=true;kern=false
+% \end{verbatim}
+% \end{quote}
%
-% |\font\test=Latin Modern Roman:salt=1|
+% \noindent
+% Furthermore, this second syntax is required should a font feature
+% accept other options besides a true/false switch.
+% For example, \emphasis{stylistic alternates} (|salt|) are variants of given
+% glyphs.
+% They can be selected either explicitly by supplying the variant
+% index (starting from one), or randomly by setting the value to,
+% obviously, |random|.
+%
+% \iffalse TODO verify that this actually works with a font that supports
+% the salt/random feature!\fi
+% \begin{quote}
+% \begin{verbatim}
+% \font\librmsaltfirst=LatinModernRoman:salt=1
+% \end{verbatim}
+% \end{quote}
%
-% \noindent Known font options include:
+% \noindent Other font options include:
%
% \begin{description}
+%
% \item [mode] \hfill \\
-% \textsf{luaotfload} has two OpenType processing modes; |base| and |node|.
-% |base| mode works by mapping OpenType features to traditional \tex ligature
-% and kerning mechanisms, thus supporting only non-contextual substitutions and
-% kerning pairs, but is slightly faster. |node| works by direct processing of
-% the node list at Lua end and have more wide support of OpenType features but
-% can be slow especially with complex fonts and can't be used in math mode.
-%
-% By default |node| mode is used, and you have to manually force |base| mode
-% when needed e.g. for math fonts.
-%
-% \item [script] \hfill \\
-% OpenType script
-% string,\footnote{\url{http://www.microsoft.com/typography/otspec/scripttags.htm}}
-% default value is |dflt|. Some fonts don't assign features to the |dflt|
-% script, in which case the script need to be set explicitly.
+% \identifier{luaotfload} has two \OpenType processing
+% \emphasis{modes}:
+% \identifier{base} and \identifier{node}.
+%
+% \identifier{base} mode works by mapping \OpenType
+% features to traditional \TEX ligature and kerning mechanisms.
+% Supporting only non-contextual substitutions and kerning
+% pairs, it is the slightly faster, albeit somewhat limited, variant.
+% \identifier{node} mode works by processing \TeX’s internal
+% node list directly at the \LUA end and supports
+% a wider range of \OpenType features.
+% The downside is that the intricate operations required for
+% \identifier{node} mode may slow down typesetting especially
+% with complex fonts and it does not work in math mode.
+%
+% By default \identifier{luaotfload} is in \identifier{node}
+% mode, and \identifier{base} mode has to be requested where needed,
+% e.~g. for math fonts.
+%
+% \item [script] \label{script-tag} \hfill \\
+% An \OpenType script tag;\footnote{%
+% See \url{http://www.microsoft.com/typography/otspec/scripttags.htm}
+% for a list of valid values.
+% For scripts derived from the Latin alphabet the value
+% |latn| is good choice.
+% }
+% the default value is |dlft|.
+% Some fonts, including very popular ones by foundries like Adobe,
+% do not assign features to the |dflt| script, in
+% which case the script needs to be set explicitly.
%
% \item [language] \hfill \\
-% OpenType language
-% string,\footnote{\url{http://www.microsoft.com/typography/otspec/languagetags.htm}}
-% default value is |latn|.
+% An \OpenType language system identifier,\footnote{%
+% Cf. \url{http://www.microsoft.com/typography/otspec/languagetags.htm}.
+% }
+% defaulting to |dflt|.
%
% \item [featurefile] \hfill \\
-% a comma-separated list of feature files to be applied to the font. Feature
-% files are textual representation of OpenType tables and can be used to extend
-% OpenType features of the font on fly. Features defined in a feature file,
-% after being applied to the font, can be enabled/disabled like any other
-% feature. The syntax is documented in Adobe's OpenType Feature File
-% Specification.\footnote{\url{http://www.adobe.com/devnet/opentype/afdko/topic_feature_file_syntax.html}}
-%
-% For example, to set a |tkrn| feature from |mykern.fea| file:
-%
-% |\font\test=Latin Modern Roman:featurefile=mykern.fea;+tkrn|
+% A comma-separated list of feature files to be applied to the
+% font.
+% Feature files contain a textual representation of
+% \OpenType tables and extend the features of a font
+% on fly.
+% After they are applied to a font, features defined in a
+% feature file can be enabled or disabled just like any
+% other font feature.
+% The syntax is documented in \identifier{Adobe}’s
+% \OpenType Feature File Specification.\footnote{%
+% Cf. \url{http://www.adobe.com/devnet/opentype/afdko/topic_feature_file_syntax.html}.
+% }
+%
+% For a demonstration of how to set a |tkrn| feature consult
+% the file |tkrn.fea| that is part of \identifier{luaotfload}.
+% It can be read and applied as follows:
+%
+% |\font\test=Latin Modern Roman:featurefile=tkrn.fea;+tkrn|
%
% \item [color] \hfill \\
-% font color, defined as a triplet of two-digit hexadecimal RGB values, with
-% optionally another value for the transparency (where |00| is completely
-% transparent and |FF| is opaque.)
+% A font color, defined as a triplet of two-digit hexadecimal
+% \abbrev{rgb} values, with an optional fourth value for
+% transparency
+% (where |00| is completely transparent and |FF| is opaque).
%
-% For example, to set text in semitransparent red:
+% For example, in order to set text in semitransparent red:
%
-% |\font\test=Latin Modern Roman:color=FF0000BB|
+% \begin{quote}
+% \begin{verbatim}
+% \font\test={Latin Modern Roman}:color=FF0000BB
+% \end{verbatim}
+% \end{quote}
%
% \item [protrusion \& expansion] \hfill \\
-% Both keys control microtypographic features of the font, namely glyph
-% protrusion and expansion. The value of the key is the name of predefined Lua
-% tables of protrusion and expansion values; see the end of |otfl-fonts-ext.lua|
-% file for an example of such tables. The only predefined value is |default|.
+% These keys control microtypographic features of the font,
+% namely \emphasis{character protrusion} and \emphasis{font
+% expansion}.
+% Their arguments are names of \LUA tables that contain
+% values for the respective features.\footnote{%
+% For examples of the table layout please refer to the
+% section of the file \fileent{luaotfload-fonts-ext.lua} where the
+% default values are defined.
+% Alternatively and with loss of information, you can dump
+% those tables into your terminal by issuing
+% \begin{verbatim}
+% \directlua{inspect(fonts.protrusions.setups.default)
+% inspect(fonts.expansions.setups.default)}
+% \end{verbatim}
+% at some point after loading \fileent{luaotfload.sty}.
+% }
+% For both, only the set \identifier{default} is predefined.
+%
+% For example, to enable default protrusion\footnote{%
+% You also need to set
+% \verb|pdfprotrudechars=2| and
+% \verb|pdfadjustspacing=2|
+% to activate protrusion and expansion, respectively.
+% See the
+% \href{http://mirrors.ctan.org/systems/pdftex/manual/pdftex-a.pdf}%
+% {\PDFTEX manual}
+% for details.
+% }:
+%
+% \begin{quote}
+% \begin{verbatim}
+% \font\test=LatinModernRoman:protrusion=default
+% \end{verbatim}
+% \end{quote}
+% \end{description}
%
-% For example, to enable default protrusion:\footnote{You also need to set
-% |\pdfprotrudechars2 \pdfadjustspacing2| to activate protrusion and expansion,
-% respectively. See \pdftex manual for details.}
+% \paragraph{Non-standard font features}
+% \identifier{luaotfload} adds a number of features that are not defined
+% in the original \OpenType specification, most of them
+% aiming at emulating the behavior familiar from other \TEX engines.
+% Currently (2013) there are three of them:
%
-% |\font\test=Latin Modern Roman:protrusion=default|
-% \end{description}
+% \begin{description}
+%
+% \item [anum]
+% Substitutes the glyphs in the \abbrev{ascii} number range
+% with their counterparts from eastern Arabic or Persian,
+% depending on the value of \identifier{language}.
%
-% \subparagraph{Non-standard font features}
-% \textsf{luaotfload} defines some additional font feature not defined in
-% OpenType, currently three features are defined:
+% \item [tlig]
+% Applies legacy \TEX ligatures:
%
-% \begin{itemize*}
-% \item |anum|: replaces European numbers with eastern Arabic numbers or
-% Persian numbers, depending on the value of |language|.
-% \item |tlig|: applies legacy \tex ligatures: |``|, |''|, |`|, |'|, |"|, |--|,
-% |---|, |!`| and |?`|.\footnote{For \xetex users: this is the equivalent of
-% writing |mapping=text-tex| using \xetex's input remapping feature.}
-% \end{itemize*}
+% \begin{tabular}{rlrl}
+% `` & \verb|``| & '' & \verb|''| \\
+% ` & \verb|`| & ' & \verb|'| \\
+% " & \verb|"| & -- & \verb|--| \\
+% --- & \verb|---| & !` & \verb|!`| \\
+% ?` & \verb|?`| & & \\
+% \end{tabular}
+%
+% \footnote{%
+% These contain the feature set \verb|trep| of earlier
+% versions of \identifier{luaotfload}.
+%
+% Note to \XETEX users: this is the equivalent of the
+% assignment \verb|mapping=text-tex| using \XETEX's input
+% remapping feature.
+% }
+%
+% \item [itlc]
+% Computes italic correction values (active by default).
+%
+% \end{description}
%
%
%
% \section{Font names database}
% \label{sec:fontdb}
%
-% As introduced in the previous section, \textsf{luaotfload} uses a database to
-% keep track of fonts available to \luatex. Using this database, fonts can be
-% loaded by font name as well as filename.
-%
-% When \textsf{luaotfload} is asked to load a font by font name, it will check
-% if font names database exists and load it, or generate a new database if non
-% exists. This is all done automatically without user intervention. When the
-% asked font is missing from the database, it will attempt to update the
-% database and try to find the font again, so that the user can install new
-% fonts without worrying about manually updating the database.
-%
-% However, it is sometimes desirable to update the database manually, so
-% \textsf{luaotfload} provides a |mkluatexfontdb| utility to manually update
-% the database. |mkluatexfontdb| is a lua script that can be either run
-% directly or as an argument to |texlua|, depending on your system.\footnote{On
-% MS Windows it can be run either by calling the wrapper application
-% |mkluatexfontdb.exe| or with |texlua.exe mkluatexfontdb.lua|.}
-%
-% The first time the database is generated may take quite some time to process
-% every font on your computer. This is particularly noticeable if it occurs
-% during a typesetting run. Subsequent runs to update the database will be
-% quite fast, however.
-%
-% \textsf{luaotfload} will parse standard places for fonts in your system to
-% build the font database. On Linux, it will read |fontconfig| configuration
-% files to find the font locations; on Windows and Mac~OS~X, it will search in
-% the standard font locations, |%WINDIR%\Fonts| in Windows and
-% |~/Library/Fonts|, |/Library/Fonts|, |/System/Library/Fonts|, and
-% |/Network/Library/Fonts| in Mac~OS~X.
-%
-% If you do not wish the standard font locations be searched by default but
-% would rather specify the exact locations in which to find your fonts, set the
-% |OSFONTDIR| environment variable instead. When this variable is set, only the
-% specified directories will be searched.
-%
-% |mkluatexfontdb.lua --help| provides a brief summary of the functionality of
-% the script and includes some advanced options that we have not mentioned
-% here.
-%
-% \subsection{Blacklisting fonts}
-%
-% Some fonts are problematic in \luatex, if you found that your document takes
-% too long to compile, or eats all the free memory, you can find the culprit
-% file by running |mkluatexfontdb| utility with |-v| option to see which font
-% file it is stuck with. You can then instruct \textsf{luaotfload} to ignore
-% this font by adding it to the blacklist configuration file.
-%
-% Simply, create a file named |otfl-blacklist.cnf| and added the to be
-% blacklisted files, one per line. Then put the file some where \textsf{kpse}
-% can find. You can either use the base name or the full path. Any thing after
-% a |%| sign is ignored. \textsf{luaotfload} reads all files named named
-% |otfl-blacklist.cnf|, so you can add your own fonts to the global blacklist
-% by creating a local file |otfl-blacklist.cnf| with the entries you need. You
-% can also remove a font from this blacklist by prepending the name with a dash
-% (|-|).
+% As mentioned above, \identifier{luaotfload} keeps track of which
+% fonts are available to \LUATEX by means of a \emphasis{database}.
+% This allows referring to fonts not only by explicit filenames but
+% also by the proper names contained in the metadata which is often
+% more accessible to humans.\footnote{%
+% The tool \href{http://www.lcdf.org/type/}{\fileent{otfinfo}} (comes
+% with \TEX Live), when invoked on a font file with the \verb|-i|
+% option, lists the variety of name fields defined for it.
+% }
+%
+% When \identifier{luaotfload} is asked to load a font by a font name,
+% it will check if the database exists and load it, or else generate a
+% fresh one.
+% Should it then fail to locate the font, an update to the database is
+% performed in case the font has been added to the system only
+% recently. As soon as the database is updated, the resolver will try
+% and look up the font again, all without user intervention.
+% The goal is for \identifier{luaotfload} to act in the background and
+% behave as unobtrusively as possible, while providing a convenient
+% interface to the fonts installed on the system.
+%
+% Generating the database for the first time may take a while since it
+% inspects every font file on your computer.
+% This is particularly noticeable if it occurs during a typesetting run.
+% In any case, subsequent updates to the database will be quite fast.
+%
+% \subsection[fontdbutil / mkluatexfontdb.lua]%
+% {\fileent{fontdbutil} /
+% \fileent{mkluatexfontdb.lua}\footnote{%
+% The script may be named just \fileent{mkluatexfontdb} in your
+% distribution.
+% }}
+%
+% It can still be desirable at times to do some of these steps
+% manually, and without having to compile a document.
+% To this end, \identifier{luaotfload} comes with the utility
+% \fileent{fontdbutil} that offers an interface to the database
+% functionality.
+% Being a \LUA script, there are two ways to run it:
+% either make it executable (\verb|chmod +x| on unixoid systems) or
+% pass it as an argument to \fileent{texlua}.\footnote{%
+% Tests by the maintainer show only marginal performance gain by
+% running with Luigi Scarso’s
+% \href{https://foundry.supelec.fr/projects/luajittex/}%
+% {\identifier{Luajit\kern-.25ex\TEX}},
+% which is probably due to the fact that most of the time is spent
+% on file system operations.
+%
+% \emphasis{Note}:
+% On \abbrev{MS} \identifier{Windows} systems, the script can be run
+% either by calling the wrapper application
+% \fileent{fontdbutil.exe} or as
+% \verb|texlua.exe fontdbutil|.
+% }
+% Invoked with the argument \verb|--update| it will perform a database
+% update, scanning for fonts not indexed.
+%
+% \begin{quote}
+% \begin{verbatim}
+% fontdbutil --update
+% \end{verbatim}
+% \end{quote}
+%
+% Adding the \verb|--force| switch will initiate a complete
+% rebuild of the database.
+%
+% \begin{quote}
+% \begin{verbatim}
+% fontdbutil --update --force
+% \end{verbatim}
+% \end{quote}
+%
+% For sake of backwards compatibility, \fileent{fontdbutil} may be
+% renamed or symlinked to \fileent{mkluatexfontdb}.
+% Whenever it is run under this name, it will update the database
+% first, mimicking the behavior of earlier versions of
+% \identifier{luaotfload}.
+%
+% \subsection{Search Paths}
+%
+% \identifier{luaotfload} scans those directories where fonts are
+% expected to be located on a given system.
+% On a Linux machine it follows the paths listed in the
+% \identifier{Fontconfig} configuration files;
+% consult \verb|man 5 fonts.conf| for further information.
+% On \identifier{Windows} systems, the standard location is
+% \verb|Windows\Fonts|,
+% while \identifier{Mac OS~X} requires a multitude of paths to
+% be examined.
+% The complete list is is given in table \ref{table-searchpaths}.
+% Other paths can be specified by setting the environment variable
+% \verb+OSFONTDIR+.
+% If it is non-empty, then search will be limited to the included
+% directories.
+%
+% \begin{table}[t]
+% \hrule
+% \caption{List of paths searched for each supported operating
+% system.}
+% \renewcommand{\arraystretch}{1.2}
+% \begin{center}
+% \begin{tabular}{lp{.5\textwidth}}
+% Windows & \verb|%WINDIR%\Fonts|
+% \\
+% Linux & \fileent{/usr/local/etc/fonts/fonts.conf} and\hfill\break
+% \fileent{/etc/fonts/fonts.conf}
+% \\
+% Mac & \fileent{\textasciitilde/Library/Fonts},\break
+% \fileent{/Library/Fonts},\break
+% \fileent{/System/Library/Fonts}, and\hfill\break
+% \fileent{/Network/Library/Fonts}
+% \\
+% \end{tabular}
+% \end{center}
+% \label{table-searchpaths}
+% \hrule
+% \end{table}
+%
+% \subsection{Querying from Outside}
+%
+% \fileent{fontdbutil} also provides rudimentary means of
+% accessing the information collected in the font database.
+% If the option \verb|--find=|\emphasis{name} is given, the script will
+% try and search the fonts indexed by \identifier{luaotfload} for a
+% matching name.
+% For instance, the invocation
+%
+% \begin{quote}
+% \begin{verbatim}
+% fontdbutil --find="Iwona Regular"
+% \end{verbatim}
+% \end{quote}
+%
+% \noindent
+% will verify if “Iwona Regular” is found in the database and can be
+% readily requested in a document.
+%
+% If you are unsure about the actual font name, then add the
+% \verb|-F| (or \verb|--fuzzy|) switch to the command line to enable
+% approximate matching.
+% Suppose you cannot precisely remember if the variant of
+% \identifier{Iwona} you are looking for was “Bright” or “Light”.
+% The query
+%
+% \begin{quote}
+% \begin{verbatim}
+% fontdbutil -F --find="Iwona Bright"
+% \end{verbatim}
+% \end{quote}
+%
+% \noindent
+% will tell you that indeed the latter name is correct.
+%
+% Basic information about fonts in the database can be displayed
+% using the \verb|-i| option (\verb|--info|).
+% \begin{quote}
+% \begin{verbatim}
+% fontdbutil -F --find="Iwona Light Italic"
+% \end{verbatim}
+% \end{quote}
+% \noindent
+% The meaning of the printed values is described in section 4.4 of the
+% \LUATEX reference manual.\footnote{%
+% In \TEX Live: \fileent{texmf-dist/doc/luatex/base/luatexref-t.pdf}.
+% }
+%
+% \verb|fontdbutil --help| will list the available command line
+% switches, including some not discussed in detail here.
+%
+% \subsection{Blacklisting Fonts}
+% \label{font-blacklist}
+%
+% Some fonts are problematic in general, or just in \LUATEX.
+% If you find that compiling your document takes far too long or eats
+% away all your system’s memory, you can track down the culprit by
+% running \verb|fontdbutil -v| to increase verbosity.
+% Take a note of the \emphasis{filename} of the font that database
+% creation fails with and append it to the file
+% \fileent{luaotfload-blacklist.cnf}.
+%
+% A blacklist file is a list of font filenames, one per line.
+% Specifying the full path to where the file is located is optional, the
+% plain filename should suffice.
+% File extensions (\fileent{.otf}, \fileent{.ttf}, etc.) may be omitted.
+% Anything after a percent (|%|) character until the end of the line
+% is ignored, so use this to add comments.
+% Place this file to some location where the \identifier{kpse}
+% library can find it, e.~g.
+% \fileent{texmf-local/tex/luatex/luaotfload} if you are running
+% \identifier{\TEX Live},\footnote{%
+% You may have to run \verb|mktexlsr| if you created a new file in
+% your \fileent{texmf} tree.
+% }
+% or just leave it in the working directory of your document.
+% \identifier{luaotfload} reads all files named
+% \fileent{luaotfload-blacklist.cnf} it finds, so the fonts in
+% \fileent{./luaotfload-blacklist.cnf} extend the global blacklist.
+%
+% Furthermore, a filename prepended with a dash character (|-|) is
+% removed from the blacklist, causing it to be temporarily whitelisted
+% without modifying the global file.
+% An example with explicit paths:
%
% \begin{verbatim}
% % example otf-blacklist.cnf
-% /Library/Fonts/GillSans.ttc % luaotfload ignores this font
-% -/Library/Fonts/Optima.ttc % it is usable again, even if it
-% % is blacklisted somewhere else
+% /Library/Fonts/GillSans.ttc % Luaotfload ignores this font.
+% -/Library/Fonts/Optima.ttc % This one is usable again, even if
+% % blacklisted somewhere else.
% \end{verbatim}
%
-% \section{Used \context files}
+% \section{Files from \CONTEXT and \LUATEX-Fonts}
%
-% This package is a wrapper for several files taken from the \context macro
-% package. The philosophy is to let \context do all the implementation and
-% update these files from time to time. So we try not to modify the files taken
-% from \context as far as possible, but we changed their names to prevent name
+% \identifier{luaotfload} relies on code originally written by Hans
+% Hagen\footnote{%
+% The creator of the \href{http://wiki.contextgarden.net}{\CONTEXT}
+% format.
+% }
+% for and tested with \CONTEXT.
+% It integrates the font loader as distributed in
+% the \identifier{\LUATEX-Fonts} package.
+% The original \LUA source files have been combined using the
+% \fileent{mtx-package} script into a single, self-contained blob.
+% In this form the font loader has no further dependencies\footnote{%
+% It covers, however, to some extent the functionality of the
+% \identifier{lualibs} package.
+% }
+% and requires only minor adaptions to integrate into
+% \identifier{luaotfload}.
+% The guiding principle is to let \CONTEXT/\LUATEX-Fonts take care of
+% the implementation, and update the imported code from time to time.
+% As maintainers, we aim at importing files from upstream essentially
+% \emphasis{unmodified}, except for renaming them to prevent name
% clashes.
+% This job has been greatly alleviated since the advent of
+% \LUATEX-Fonts, prior to which the individual dependencies had to be
+% manually spotted and extracted from the \CONTEXT source code in a
+% complicated and error-prone fashion.
+%
+% Below is a commented list of the files distributed with
+% \identifier{luaotfload} in one way or the other.
+% See figure \ref{file-graph} on page \pageref{file-graph} for a
+% graphical representation of the dependencies.
+% From \LUATEX-Fonts, only the file \fileent{luatex-fonts-merged.lua}
+% has been imported as \fileent{luaotfload-merged.lua}.
+% It is generated by \fileent{mtx-package}, a \LUA source code merging
+% too developed by Hans Hagen.\footnote{%
+% \fileent{mtx-package} is
+% \href
+% {http://repo.or.cz/w/context.git/blob_plain/refs/heads/origin:/scripts/context/lua/mtx-package.lua}
+% {part of \CONTEXT}
+% and requires \fileent{mtxrun}.
+% Run
+% \verb|mtxrun --script package --help|
+% to display further information.
+% For the actual merging code see the file
+% \fileent{util-mrg.lua} that is part of \CONTEXT.
+% }
+% It houses several \LUA files that can be classed in three
+% categories.
+%
+% \begin{itemize}
+% \let\normalitem=\item
+% \def\incitem#1{%
+% \normalitem{\fileent{#1}}
+% }
+% \normalitem \emphasis{\LUA utility libraries}, a subset
+% of what is provided by the \identifier{lualibs}
+% package.
+%
+% \begin{multicols}{2}
+% \begin{itemize}
+% \incitem{l-lua.lua} \incitem{l-lpeg.lua}
+% \incitem{l-function.lua} \incitem{l-string.lua}
+% \incitem{l-table.lua} \incitem{l-io.lua}
+% \incitem{l-file.lua} \incitem{l-boolean.lua}
+% \incitem{l-math.lua} \incitem{util-str.lua}
+% \end{itemize}
+% \end{multicols}
+%
+% \normalitem The \emphasis{font loader} itself.
+% These files have been written for
+% \LUATEX-Fonts and they are distributed along
+% with \identifier{luaotfload}.
+% \begin{multicols}{2}
+% \begin{itemize}
+% \incitem{luatex-basics-gen.lua}
+% \incitem{luatex-basics-nod.lua}
+% \incitem{luatex-fonts-enc.lua}
+% \incitem{luatex-fonts-syn.lua}
+% \incitem{luatex-fonts-tfm.lua}
+% \incitem{luatex-fonts-chr.lua}
+% \incitem{luatex-fonts-lua.lua}
+% \incitem{luatex-fonts-def.lua}
+% \incitem{luatex-fonts-ext.lua}
+% \incitem{luatex-fonts-cbk.lua}
+% \end{itemize}
+% \end{multicols}
+%
+% \normalitem Code related to \emphasis{font handling and
+% node processing}, taken directly from
+% \CONTEXT.
+% \begin{multicols}{2}
+% \begin{itemize}
+% \incitem{data-con.lua} \incitem{font-ini.lua}
+% \incitem{font-con.lua} \incitem{font-cid.lua}
+% \incitem{font-map.lua} \incitem{font-oti.lua}
+% \incitem{font-otf.lua} \incitem{font-otb.lua}
+% \incitem{node-inj.lua} \incitem{font-ota.lua}
+% \incitem{font-otn.lua} \incitem{font-def.lua}
+% \end{itemize}
+% \end{multicols}
+% \end{itemize}
+%
+% Note that if \identifier{luaotfload} cannot locate the
+% merged file, it will load the individual \LUA libraries
+% instead.
+% Their names remain the same as in \CONTEXT (without the
+% \verb|otfl|-prefix) since we imported the relevant section of
+% \fileent{luatex-fonts.lua} unmodified into \fileent{luaotfload.lua}.
+% Thus if you prefer running bleeding edge code from the
+% \CONTEXT beta, all you have to do is remove
+% \fileent{luaotfload-merged.lua} from the search path.
+%
+% Also, the merged file at some point
+% loads the Adobe Glyph List from a \LUA table that is contained in
+% \fileent{font-age.lua}, which is automatically generated by the
+% script \fileent{mkglyphlist}.\footnote{%
+% See \fileent{luaotfload-font-enc.lua}.
+% The hard-coded file name is why the file lacks the \fileent{luaotfload-}
+% prefix.
+% }
+% There is a make target \identifier{glyphs} that will create a fresh
+% \fileent{font-age.lua} so we don’t need to import it from \CONTEXT
+% any longer.
+%
+% In addition to these, \identifier{luaotfload} requires a number of
+% files not contained in the merge. Some of these have no equivalent in
+% \LUATEX-Fonts or \CONTEXT, some were taken unmodified from the
+% latter.
+%
+% \begin{itemize}
+% \let\normalitem=\item
+% \def\ouritem#1{%
+% \normalitem{\fileent{#1}}%
+% \space--\hskip1em
+% }
+% \ouritem {luaotfload-font-otc.lua} \fileent{font-otc} from \CONTEXT;
+% font feature handling.
+% \ouritem {luaotfload-lib-dir.lua} \fileent{l-dir} from \CONTEXT;
+% contains functionality required
+% by \fileent{luaotfload-font-nms.lua}.
+% \ouritem {luaotfload-luat-ovr.lua} overrides the \CONTEXT logging
+% functionality.
+% \ouritem {luaotfload-font-pfb.lua} registers the \OpenType
+% font reader as handler for
+% Postscript fonts.
+% \ouritem {luaotfload-font-nms.lua} font database.
+% \ouritem {luaotfload-font-clr.lua} color handling.
+% \ouritem {luaotfload-font-ltx.lua} font feature handling.
+% \ouritem {luaotfload-features.lua} definitions of the \verb|anum| and
+% \verb|tlig| features.
+% \end{itemize}
+%
+% \begin{figure}[b]
+% \caption{Schematic of the files in \identifier{Luaotfload}}
+% \includegraphics[width=\textwidth]{filegraph.pdf}
+% \label{file-graph}
+% \end{figure}
%
-% The \context files are renamed by adding the prefix |otfl-| to them (|otfl|
-% as |OTF L|oad). The files are:
+% \section{Troubleshooting}
%
-% \begin{multicols}{3}
-% \begin{itemize*}
-% \item |data-con.lua|
-% \item |font-cid.lua|
-% \item |font-con.lua|
-% \item |font-def.lua|
-% \item |font-ini.lua|
-% \item |font-map.lua|
-% \item |font-ota.lua|
-% \item |font-otb.lua|
-% \item |font-otc.lua|
-% \item |font-otf.lua|
-% \item |font-oti.lua|
-% \item |font-otn.lua|
-% \item |node-inj.lua|
-% \item |luatex-fonts-cbk.lua|
-% \item |luatex-fonts-enc.lua|
-% \item |luatex-fonts-ext.lua|
-% \item |luatex-fonts-lua.lua|
-% \item |luatex-fonts-tfm.lua|
-% \item |luatex-basics-gen.lua|
-% \item |luatex-basics-nod.lua|
-% \item |font-age.lua|\footnote{Not renamed as it is loaded directly from
-% |fonts-enc.lua|.}
-% \end{itemize*}
-% \end{multicols}
+% If you encounter problems with some fonts, please first update to the latest
+% version of this package before reporting a bug, as
+% \identifier{luaotfload} is under active development and still a
+% moving target.
%
-% The following files have been written for this package:
-% \begin{itemize*}
-% \item |otfl-font-clr.lua|
-% \item |otfl-font-nms.lua|
-% \item |otfl-luat-ovr.lua|
-% \item |otfl-font-ltx.lua|\footnote{A heavily modified version of
-% |luatex-fonts-def.lua|.}
-% \end{itemize*}
+% Errors during database generation can be traced by increasing
+% verbosity levels and redirecting log output to \fileent{stdout}:
%
-% \section{Troubleshooting}
+% \begin{verbatim}
+% fontdbutil -fuvvv --log=stdout
+% \end{verbatim}
%
-% If you encounter problems with some fonts, please first update to the latest
-% version of this package before reporting a bug, as this package is under
-% active development.
+% If this fails, the font last printed to the terminal is likely to be
+% the culprit.
+% Please specify it when reporting a bug, and blacklist it for the time
+% being (see above, page \pageref{font-blacklist}).
+%
+% A common problem is the lack of features for some
+% \OpenType fonts even when specified.
+% This can be related to the fact that some fonts do not provide
+% features for the \verb|dflt| script (see above on page
+% \pageref{script-tag}),
+% which is the default one in this package.
+% If this happens, assigning a noth script when the font is defined should
+% fix it.
+% For example with \verb|latn|:
+%
+% \begin{verbatim}
+% \font\test=file:MyFont.otf:script=latn;+liga;
+% \end{verbatim}
+%
+% \part{Implementation}
%
-% A very common problem is the lack of features for some OpenType fonts even
-% when specified. It can be related to the fact that some fonts do not provide
-% features for the |dflt| script, which is the default one in this package, so
-% you may have to specify the script in the command line, for example:
+% \section{\fileent{luaotfload.lua}}
%
-% |\font\test=file:MyFont.otf:script=latn;+liga;|
+% This file initializes the system and loads the font loader.
+% To minimize potential conflicts between other packages and the
+% code imported from \CONTEXT, several precautions are in order.
+% Some of the functionality that the font loader expects to be present,
+% like raw access to callbacks, are assumed to have been disabled by
+% \identifier{luatexbase} when this file is processed.
+% In some cases it is possible to trick it by putting dummies into
+% place and restoring the behavior from \identifier{luatexbase} after
+% initilization.
+% Other cases such as attribute allocation require that we hook the
+% functionality from \identifier{luatexbase} into locations where they
+% normally wouldn’t be.
%
-% \part{\texttt{luaotfload.lua}}
+% Anyways we can import the code base without modifications, which is
+% due mostly to the extra effort by
+% Hans Hagen to make \LUATEX-Fonts self-contained and encapsulate it,
+% and especially due to his willingness to incorporate our suggestions.
%
% \iffalse
%<*lua>
% \fi
-%
-% \section{Initializations}
-%
-% \begin{macrocode}
-module("luaotfload", package.seeall)
-% \end{macrocode}
-%
% \begin{macrocode}
+luaotfload = luaotfload or {}
+local luaotfload = luaotfload
+
+config = config or { }
+config.luaotfload = config.luaotfload or { }
+luaotfload.prefer_merge = config.luaotfload.prefer_merge or true
+
luaotfload.module = {
name = "luaotfload",
- version = 2.0,
- date = "2011/10/06",
+ version = 2.2,
+ date = "2013/04/15",
description = "OpenType layout system.",
author = "Elie Roux & Hans Hagen",
copyright = "Elie Roux",
license = "CC0"
}
+
+local luatexbase = luatexbase
+
+local type, next = type, next
+local setmetatable = setmetatable
+local stringfind = string.find
+local stringsub = string.sub
+local stringmatch = string.match
+local stringformat = string.format
+local find_file = kpse.find_file
+
+local add_to_callback, create_callback =
+ luatexbase.add_to_callback, luatexbase.create_callback
+local reset_callback, call_callback =
+ luatexbase.reset_callback, luatexbase.call_callback
+
+local dummy_function = function () end
+
% \end{macrocode}
+% No final decision has been made on how to handle font definition. At
+% the moment, there are three candidates: The \identifier{generic}
+% callback as hard-coded in the font loader, the \identifier{old}
+% wrapper, and a simplified version of the latter (\identifier{patch})
+% that does nothing besides applying font patches.
%
% \begin{macrocode}
-local error, warning, info, log = luatexbase.provides_module(luaotfload.module)
+
+luaotfload.font_definer = "patch" --- | “generic” | “old”
+
+local error, warning, info, log =
+ luatexbase.provides_module(luaotfload.module)
+
% \end{macrocode}
%
-% The minimal required \luatex version.
-%
-% \begin{macrocode}
-local luatex_version = 70
-% \end{macrocode}
+% We set the minimum version requirement for \LUATEX to v0.74, as it was
+% the first version to include version 5.2 of the \LUA interpreter.
%
% \begin{macrocode}
+
+local luatex_version = 74
+
if tex.luatexversion < luatex_version then
warning("LuaTeX v%.2f is old, v%.2f is recommended.",
tex.luatexversion/100,
luatex_version /100)
end
+
% \end{macrocode}
+% \subsection{Module loading}
+% We load the files imported from \CONTEXT with this function.
+% It automatically prepends the prefix \fileent{luaotfload-} to its
+% argument, so we can refer to the files with their actual \CONTEXT name.
%
-% Some required functions missing from \textsf{lualibs} package.
+% \begin{macrocode}
+
+local fl_prefix = "luaotfload" -- “luatex” for luatex-plain
+local loadmodule = function (name)
+ require(fl_prefix .."-"..name)
+end
+
+% \end{macrocode}
+% Before \TeX Live 2013 version, \LUATEX had a bug that made ofm fonts fail
+% when called with their extension. There was a side-effect making ofm
+% totally unloadable when luaotfload was present. The following lines are
+% a patch for this bug. The utility of these lines is questionable as they
+% are not necessary since \TeX Live 2013. They should be removed in the next
+% version.
%
% \begin{macrocode}
-function table.reversed(t)
- if t then
- local tt, tn = { }, #t
- if tn > 0 then
- local ttn = 0
- for i=tn,1,-1 do
- ttn = ttn + 1
- tt[ttn] = t[i]
- end
- end
- return tt
+local Cs, P, lpegmatch = lpeg.Cs, lpeg.P, lpeg.match
+
+local p_dot, p_slash = P".", P"/"
+local p_suffix = (p_dot * (1 - p_dot - p_slash)^1 * P(-1)) / ""
+local p_removesuffix = Cs((p_suffix + 1)^1)
+
+local find_vf_file = function (name)
+ local fullname = find_file(name, "ovf")
+ if not fullname then
+ --fullname = find_file(file.removesuffix(name), "ovf")
+ fullname = find_file(lpegmatch(p_removesuffix, name), "ovf")
+ end
+ if fullname then
+ log("loading virtual font file %s.", fullname)
end
+ return fullname
end
+
+% \end{macrocode}
+% \subsection{Preparing the Font Loader}
+% We treat the fontloader as a black box so behavior is consistent
+% between formats.
+% We do no longer run the intermediate wrapper file
+% \fileent{luaotfload-fonts.lua} which we used to import from
+% \href{http://standalone.contextgarden.net/current/context/experimental/tex/generic/context/luatex/}{\LUATEX-Plain}.
+% Rather, we load the fontloader code directly in the same fashion as
+% \identifier{luatex-fonts}.
+% How this is executed depends on the presence on the \emphasis{merged
+% font loader code}.
+% In \identifier{luaotfload} this is contained in the file
+% \fileent{luaotfload-merged.lua}.
+% If this file cannot be found, the original libraries from \CONTEXT of
+% which the merged code was composed are loaded instead.
+%
+% The imported font loader will call \luafunction{callback.register} once
+% while reading \fileent{font-def.lua}.
+% This is unavoidable unless we modify the imported files, but harmless
+% if we make it call a dummy instead.
+% However, this problem might vanish if we decide to do the merging
+% ourselves, like the \identifier{lualibs} package does.
+% With this step we would obtain the freedom to load our own overrides in
+% the process right where they are needed, at the cost of losing
+% encapsulation.
+% The decision on how to progress is currently on indefinite hold.
+%
+% \begin{macrocode}
+
+local starttime = os.gettimeofday()
+
+local trapped_register = callback.register
+callback.register = dummy_function
+
% \end{macrocode}
+% By default, the fontloader requires a number of \emphasis{private
+% attributes} for internal use.
+% These must be kept consistent with the attribute handling methods as
+% provided by \identifier{luatexbase}.
+% Our strategy is to override the function that allocates new attributes
+% before we initialize the font loader, making it a wrapper around
+% \luafunction{luatexbase.new_attribute}.\footnote{%
+% Many thanks, again, to Hans Hagen for making this part
+% configurable!
+% }
+% The attribute identifiers are prefixed “\fileent{luaotfload@}” to
+% avoid name clashes.
%
% \begin{macrocode}
-function table.derive(parent)
- local child = { }
- if parent then
- setmetatable(child,{ __index = parent })
+
+do
+ local new_attribute = luatexbase.new_attribute
+ local the_attributes = luatexbase.attributes
+
+ attributes = attributes or { }
+
+ attributes.private = function (name)
+ local attr = "luaotfload@" .. name --- used to be: “otfl@”
+ local number = the_attributes[attr]
+ if not number then
+ number = new_attribute(attr)
+ end
+ return number
end
- return child
end
+
% \end{macrocode}
+% These next lines replicate the behavior of \fileent{luatex-fonts.lua}.
%
% \begin{macrocode}
-function string.quoted(str)
- return string.format("%q",str)
+
+local context_environment = { }
+
+local push_namespaces = function ()
+ log("push namespace for font loader")
+ local normalglobal = { }
+ for k, v in next, _G do
+ normalglobal[k] = v
+ end
+ return normalglobal
+end
+
+local pop_namespaces = function (normalglobal, isolate)
+ if normalglobal then
+ local _G = _G
+ local mode = "non-destructive"
+ if isolate then mode = "destructive" end
+ log("pop namespace from font loader -- "..mode)
+ for k, v in next, _G do
+ if not normalglobal[k] then
+ context_environment[k] = v
+ if isolate then
+ _G[k] = nil
+ end
+ end
+ end
+ for k, v in next, normalglobal do
+ _G[k] = v
+ end
+ -- just to be sure:
+ setmetatable(context_environment,_G)
+ else
+ log("irrecoverable error during pop_namespace: no globals to restore")
+ os.exit()
+ end
end
+
+luaotfload.context_environment = context_environment
+luaotfload.push_namespaces = push_namespaces
+luaotfload.pop_namespaces = pop_namespaces
+
+local our_environment = push_namespaces()
+
% \end{macrocode}
-%
-% \section{Module loading}
+% The font loader requires that the attribute with index zero be zero.
+% We happily oblige.
+% (Cf. \fileent{luatex-fonts-nod.lua}.)
%
% \begin{macrocode}
-require('otfl-basics-gen.lua')
-require('otfl-luat-ovr.lua') -- overrides some otfl-basics-gen.lua functions
-require('otfl-data-con.lua')
-require('otfl-basics-nod.lua')
+
+tex.attribute[0] = 0
+
% \end{macrocode}
-%
-% By default \context takes some private attributes for internal use. To
-% avoide attribute clashes with other packages, we override the function
-% that allocates new attributes, making it a wraper around
-% |luatexbase.new_attribute()|. We also prefix attributes with |otfl@| to
-% avoid possiple name clashes.
+% Now that things are sorted out we can finally load the fontloader.
%
% \begin{macrocode}
-function attributes.private(name)
- local attr = "otfl@" .. name
- local number = luatexbase.attributes[attr]
- if not number then
- number = luatexbase.new_attribute(attr)
+
+loadmodule"merged.lua"
+
+if fonts then
+
+ if not fonts._merge_loaded_message_done_ then
+ --- a program talking first person -- HH sure believes in strong AI ...
+ log[[“I am using the merged version of 'luaotfload.lua' here. If]]
+ log[[ you run into problems or experience unexpected behaviour,]]
+ log[[ and if you have ConTeXt installed you can try to delete the]]
+ log[[ file 'luaotfload-font-merged.lua' as I might then use the]]
+ log[[ possibly updated libraries. The merged version is not]]
+ log[[ supported as it is a frozen instance. Problems can be]]
+ log[[ reported to the ConTeXt mailing list.”]]
end
- return number
-end
+ fonts._merge_loaded_message_done_ = true
+
+else--- the loading sequence is known to change, so this might have to
+ --- be updated with future updates!
+ --- do not modify it though unless there is a change to the merged
+ --- package!
+ loadmodule("l-lua.lua")
+ loadmodule("l-lpeg.lua")
+ loadmodule("l-function.lua")
+ loadmodule("l-string.lua")
+ loadmodule("l-table.lua")
+ loadmodule("l-io.lua")
+ loadmodule("l-file.lua")
+ loadmodule("l-boolean.lua")
+ loadmodule("l-math.lua")
+ loadmodule("util-str.lua")
+ loadmodule('luatex-basics-gen.lua')
+ loadmodule('data-con.lua')
+ loadmodule('luatex-basics-nod.lua')
+ loadmodule('font-ini.lua')
+ loadmodule('font-con.lua')
+ loadmodule('luatex-fonts-enc.lua')
+ loadmodule('font-cid.lua')
+ loadmodule('font-map.lua')
+ loadmodule('luatex-fonts-syn.lua')
+ loadmodule('luatex-fonts-tfm.lua')
+ loadmodule('font-oti.lua')
+ loadmodule('font-otf.lua')
+ loadmodule('font-otb.lua')
+ loadmodule('node-inj.lua')
+ loadmodule('font-ota.lua')
+ loadmodule('font-otn.lua')
+ loadmodule('luatex-fonts-lua.lua')
+ loadmodule('font-def.lua')
+ loadmodule('luatex-fonts-def.lua')
+ loadmodule('luatex-fonts-ext.lua')
+ loadmodule('luatex-fonts-cbk.lua')
+end --- non-merge fallback scope
+
% \end{macrocode}
+% Here we adjust the globals created during font loader initialization.
+% If the second argument to \luafunction{pop_namespaces()} is \verb|true|
+% this will restore the state of \luafunction{_G}, eliminating every
+% global generated since the last call to \luafunction{push_namespaces()}.
+% At the moment we see no reason to do this, and since the font loader is
+% considered an essential part of \identifier{luatex} as well as a very
+% well organized piece of code, we happily concede it the right to add to
+% \luafunction{_G} if needed.
%
% \begin{macrocode}
-require('otfl-font-ini.lua')
-require('otfl-font-con.lua')
-require('otfl-fonts-enc.lua')
-require('otfl-font-cid.lua')
-require('otfl-font-map.lua')
-require('otfl-font-nms.lua')
-require('otfl-fonts-tfm.lua')
-require('otfl-font-oti.lua')
-require('otfl-font-otf.lua')
-require('otfl-font-pfb.lua')
-require('otfl-font-otb.lua')
-require('otfl-node-inj.lua')
-require('otfl-font-otn.lua')
-require('otfl-font-ota.lua')
-require('otfl-font-otc.lua')
-require('otfl-fonts-lua.lua')
-require('otfl-font-def.lua')
-require('otfl-font-ltx.lua')
-require('otfl-fonts-ext.lua')
-require('otfl-fonts-cbk.lua')
-require('otfl-font-clr.lua')
+
+pop_namespaces(our_environment, false)-- true)
+
+log("fontloader loaded in %0.3f seconds", os.gettimeofday()-starttime)
+
% \end{macrocode}
+% \subsection{Callbacks}
+% After the fontloader is ready we can restore the callback trap from
+% \identifier{luatexbase}.
%
-% Here we override some defaults set in \context code.
+% \begin{macrocode}
+
+callback.register = trapped_register
+
+% \end{macrocode}
+% We do our own callback handling with the means provided by luatexbase.
+% Note: \luafunction{pre_linebreak_filter} and \luafunction{hpack_filter}
+% are coupled in \CONTEXT in the concept of \emphasis{node processor}.
%
% \begin{macrocode}
-fonts.mode = "node"
-caches.compilemethod = "both"
+
+add_to_callback("pre_linebreak_filter",
+ nodes.simple_font_handler,
+ "luaotfload.node_processor",
+ 1)
+add_to_callback("hpack_filter",
+ nodes.simple_font_handler,
+ "luaotfload.node_processor",
+ 1)
+add_to_callback("find_vf_file",
+ find_vf_file, "luaotfload.find_vf_file")
+
+loadmodule"lib-dir.lua" --- required by luaofload-database.lua
+loadmodule"override.lua" --- “luat-ovr”
+
% \end{macrocode}
+% \CONTEXT does not support ofm, these lines were added in order to make it
+% work. However they do not seem necessary so they are commented for now.
%
-% Now overriding the \context's definition of |tlig| and |trep| features,
-% using code points instead of glyph names to make it font independent.
+% \begin{macrocode}
+-- if fonts and fonts.readers.tfm then
+-- fonts.readers.ofm = fonts.readers.tfm
+-- fonts.handlers.ofm = fonts.handlers.tfm --- empty anyways
+-- fonts.formats.ofm = fonts.formats.tfm --- “type1”
+-- --- fonts.readers.sequence[#fonts.readers.sequence+1] = "ofm"
+--end
+% \end{macrocode}
+% Now we load the modules written for \identifier{luaotfload}.
%
% \begin{macrocode}
-local everywhere = { ["*"] = { ["*"] = true } }
-
-local tlig = {
- {
- type = "substitution",
- features = everywhere,
- data = {
- [0x0022] = 0x201D, -- quotedblright
- [0x0027] = 0x2019, -- quoteleft
- [0x0060] = 0x2018, -- quoteright
- },
- flags = { },
- },
- {
- type = "ligature",
- features = everywhere,
- data = {
- [0x2013] = {0x002D, 0x002D}, -- endash
- [0x2014] = {0x002D, 0x002D, 0x002D}, -- emdash
- [0x201C] = {0x2018, 0x2018}, -- quotedblleft
- [0x201D] = {0x2019, 0x2019}, -- quotedblright
- [0x201E] = {0x002C, 0x002C}, -- quotedblbase
- [0x00A1] = {0x0021, 0x2018}, -- exclamdown
- [0x00BF] = {0x003F, 0x2018}, -- questiondown
- },
- flags = { },
- },
- {
- type = "ligature",
- features = everywhere,
- data = {
- [0x201C] = {0x0060, 0x0060}, -- quotedblleft
- [0x201D] = {0x0027, 0x0027}, -- quotedblright
- [0x00A1] = {0x0021, 0x0060}, -- exclamdown
- [0x00BF] = {0x003F, 0x0060}, -- questiondown
- },
- flags = { },
- },
-}
+loadmodule"loaders.lua" --- “font-pfb” new in 2.0, added 2011
+loadmodule"database.lua" --- “font-nms”
+loadmodule"colors.lua" --- “font-clr”
-fonts.handlers.otf.addfeature("tlig", tlig)
-fonts.handlers.otf.addfeature("trep", { }) -- empty, all in tlig now
% \end{macrocode}
+% This hack makes fonts called with file method found by fonts.names.resolve
+% instead of just trying to find them with kpse. It is necessary in case
+% of fonts that are not accessible by kpse but present in the database, a
+% quite common case under Linux.
%
-% And overriding the \context's definition of |anum|.
+% \begin{macrocode}
+
+fonts.definers.resolvers.file = function (specification)
+ specification.name = fonts.names.resolve('', '', specification)
+end
+
+% \end{macrocode}
+% We create a callback for patching fonts on the fly, to be used by other
+% packages.
+% It initially contains the empty function that we are going to override
+% below.
%
% \begin{macrocode}
-local anum_arabic = {
- [0x0030] = 0x0660,
- [0x0031] = 0x0661,
- [0x0032] = 0x0662,
- [0x0033] = 0x0663,
- [0x0034] = 0x0664,
- [0x0035] = 0x0665,
- [0x0036] = 0x0666,
- [0x0037] = 0x0667,
- [0x0038] = 0x0668,
- [0x0039] = 0x0669,
-}
-local anum_persian = {
- [0x0030] = 0x06F0,
- [0x0031] = 0x06F1,
- [0x0032] = 0x06F2,
- [0x0033] = 0x06F3,
- [0x0034] = 0x06F4,
- [0x0035] = 0x06F5,
- [0x0036] = 0x06F6,
- [0x0037] = 0x06F7,
- [0x0038] = 0x06F8,
- [0x0039] = 0x06F9,
-}
+create_callback("luaotfload.patch_font", "simple", dummy_function)
-local function valid(data)
- local features = data.resources.features
- if features then
- for k, v in next, features do
- for k, v in next, v do
- if v.arab then
- return true
+% \end{macrocode}
+% This is a wrapper for the imported font loader.
+% As of 2013, everything it does appear to be redundand, so we won’t use
+% it unless somebody points out a cogent reason.
+% Nevertheless, it has been adapted to work with the current structure of
+% font data objects and will stay here for reference / until breakage is
+% reported.
+% \emphasis{TODO}
+% This one also enables patching fonts.
+% The current fontloader apparently comes with a dedicated mechanism for
+% that already: enhancers.
+% How those work remains to be figured out.
+%
+% \begin{macrocode}
+local define_font_wrapper = function (...)
+ --- we use “tfmdata” (not “fontdata”) for consistency with the
+ --- font loader
+ local tfmdata = fonts.definers.read(...)
+ if type(tfmdata) == "table" and tfmdata.shared then
+ local metadata = tfmdata.shared.rawdata.metadata
+ local mathdata = metadata.math --- do all fonts have this field?
+ if mathdata then
+ local mathconstants = { } --- why new hash, not modify in place?
+ local units_per_em = metadata.units_per_em
+ local size = tfmdata.size
+ for k,v in next, mathdata do
+ --- afaics this is alread taken care of by
+ --- definers.read
+ if stringfind(k, "Percent") then
+ -- keep percent values as is
+ print(k,v)
+ mathconstants[k] = v
+ else
+ mathconstants[k] = v / units_per_em * size
end
end
+ --- for \overwithdelims
+ --- done by definers.read as well
+ mathconstants.FractionDelimiterSize = 1.01 * size
+ --- fontloader has 2.4 × size
+ mathconstants.FractionDelimiterDisplayStyleSize = 2.39 * size
+ tfmdata.MathConstants = mathconstants
end
+ call_callback("luaotfload.patch_font", tfmdata)
end
+ return tfmdata
end
-local anum_specification = {
- {
- type = "substitution",
- features = { arab = { far = true, urd = true, snd = true } },
- data = anum_persian,
- flags = { },
- valid = valid,
- },
- {
- type = "substitution",
- features = { arab = { ["*"] = true } },
- data = anum_arabic,
- flags = { },
- valid = valid,
- },
-}
-
-fonts.handlers.otf.addfeature("anum",anum_specification)
-% \end{macrocode}
-%
-% we provide a callback for patching fonts on the fly, to be used by other
-% packages.
-%
-% \begin{macrocode}
-luatexbase.create_callback("luaotfload.patch_font", "simple", function() end)
% \end{macrocode}
+% \subsection{\CONTEXT override}
+% We provide a simplified version of the original font definition
+% callback.
%
% \begin{macrocode}
-local function deffont(...)
- local fontdata = fonts.definers.read(...)
- if type(fontdata) == "table" then
- luatexbase.call_callback("luaotfload.patch_font", fontdata)
+
+local read_font_file = fonts.definers.read
+local patch_defined_font = function (...)
+ local tfmdata = read_font_file(...)-- spec -> size -> id -> tmfdata
+ if type(tfmdata) == "table" then
+ call_callback("luaotfload.patch_font", tfmdata)
end
- return fontdata
+ -- inspect(table.keys(tfmdata))
+ return tfmdata
end
+
+caches.compilemethod = "both"
+
+reset_callback("define_font")
+
% \end{macrocode}
-%
-% Finally we register the callbacks
+% Finally we register the callbacks.
%
% \begin{macrocode}
-local handler = nodes.simple_font_handler
-luatexbase.reset_callback("define_font")
-luatexbase.add_to_callback("pre_linebreak_filter", handler, "luaotfload")
-luatexbase.add_to_callback("hpack_filter", handler, "luaotfload")
-luatexbase.add_to_callback("define_font", deffont, "luaotfload")
+
+if luaotfload.font_definer == "old" then
+ add_to_callback("define_font",
+ define_font_wrapper,
+ "luaotfload.define_font",
+ 1)
+elseif luaotfload.font_definer == "generic" then
+ add_to_callback("define_font",
+ fonts.definers.read,
+ "luaotfload.define_font",
+ 1)
+elseif luaotfload.font_definer == "patch" then
+ add_to_callback("define_font",
+ patch_defined_font,
+ "luaotfload.define_font",
+ 1)
+end
+
+--[[todo--
+--- The manual promises coercion of the file: lookup if
+--- the asked name is enclosed in brackets.
+--- A couple things make me doubt that this is the case:
+---
+--- 1) there doesn’t appear to be code for these cases
+--- 2) the brackets remain part of the file name
+--- 3) we still get calls to names.resolve which
+--- ignores the “lookup” field of the spec it gets
+---
+--- For this reason here is some code that a) coerces
+--- file: lookups in these cases and b) strips the brackets
+--- from the file name. As we *still* get name: lookups even
+--- though this code is active I’ll just leave it here
+--- for reference, ineffective as it is.
+do
+ local getspecification, makespecification =
+ fonts.definers.getspecification, fonts.definers.makespecification
+
+ local analyze = function (specification, size)
+ local lookup, name, sub, method, detail = getspecification(specification or "")
+ local filename = stringmatch(name, "^%[(.*)%]$")
+ if filename then
+ lookup = "file" --> coerce file:
+ name = filename --> remove brackets
+ end
+ return makespecification(specification, lookup, name, sub, method, detail, size)
+ end
+ fonts.definers.analyze = analyze
+end
+--]]--
+
+loadmodule"features.lua" --- contains what was “font-ltx” and “font-otc”
+
+-- vim:tw=71:sw=4:ts=4:expandtab
+
+
% \end{macrocode}
%
% \iffalse
%</lua>
% \fi
%
-% \part{\texttt{luaotfload.sty}}
+% \section{\fileent{luaotfload.sty}}
%
% \iffalse
%<*package>
% \fi
%
-% Classical Plain+\latex package initialization.
+% Classical Plain+\LATEX package initialization.
%
% \begin{macrocode}
\csname ifluaotfloadloaded\endcsname
\let\ifluaotfloadloaded\endinput
-% \end{macrocode}
-%
-% \begin{macrocode}
\bgroup\expandafter\expandafter\expandafter\egroup
\expandafter\ifx\csname ProvidesPackage\endcsname\relax
\input luatexbase.sty
\else
\NeedsTeXFormat{LaTeX2e}
\ProvidesPackage{luaotfload}%
- [2011/10/06 v2.0 OpenType layout system]
+ [2013/04/16 v2.2 OpenType layout system]
\RequirePackage{luatexbase}
\fi
-% \end{macrocode}
-%
-% \begin{macrocode}
-\RequireLuaModule{lualibs}
-% \end{macrocode}
-%
-% \begin{macrocode}
\RequireLuaModule{luaotfload}
+\endinput
% \end{macrocode}
% \iffalse
%</package>
@@ -902,7 +1649,7 @@ luatexbase.add_to_callback("define_font", deffont, "luaotfload")
%
% \begin{enumerate}
%
-% \item
+% \item
% You must cause the modified files to carry prominent notices stating that
% you changed the files and the date of any change.
%
diff --git a/mkglyphlist b/mkglyphlist
new file mode 100755
index 0000000..9ee1528
--- /dev/null
+++ b/mkglyphlist
@@ -0,0 +1,143 @@
+#!/usr/bin/env texlua
+-----------------------------------------------------------------------
+-- FILE: mkglyphlist.lua
+-- USAGE: ./mkglyphlist.lua
+-- DESCRIPTION: part of the luaotfload package
+-- REQUIREMENTS: lua, lpeg, luasocket, the lualibs package
+-- AUTHOR: Philipp Gesang (Phg), <phg42.2a@gmail.com>
+-- VERSION: 1.0
+-- CREATED: 04/23/2013 12:42:17 PM CEST
+-----------------------------------------------------------------------
+-- config
+-----------------------------------------------------------------------
+local glyphfile = "./glyphlist.txt"
+local font_age = "./font-age.lua"
+local glyph_source = "http://partners.adobe.com/public/developer/en/opentype/glyphlist.txt"
+
+-----------------------------------------------------------------------
+-- includes
+-----------------------------------------------------------------------
+require"lpeg"
+require"socket"
+
+kpse.set_program_name"luatex"
+dofile(kpse.find_file("lualibs-lua.lua", "lua"))
+dofile(kpse.find_file("lualibs-lpeg.lua", "lua"))
+dofile(kpse.find_file("lualibs-table.lua", "lua")) --- for serialization
+
+local C, Cf, Cg, Ct, P, R =
+ lpeg.C, lpeg.Cf, lpeg.Cg, lpeg.Ct, lpeg.P, lpeg.R
+
+local http = socket.http
+
+-----------------------------------------------------------------------
+-- functionality
+-----------------------------------------------------------------------
+
+local dec_of_hex = function (hex) return tonumber(hex, 16) end
+
+local separator = P";"
+local gartenzaun = P"#"
+local eol = P"\n\r" + P"\r\n" + P"\r" + P"\n"
+local space = P" "
+local alphanum = R("az", "AZ", "09")
+local hexdigit = R("af", "AF", "09")
+local eof_tag = gartenzaun * P"--end"
+local header_line = gartenzaun * (1-eol)^0 * eol
+local codepoint = hexdigit^1
+local glyphname = alphanum^1
+
+local definition = Cg(C(glyphname) * separator * (C(codepoint)/ dec_of_hex))
+ --- With combined glyphs we take only the first
+ --- value as char-def and font-age do, and skip
+ --- the rest.
+ * (space * codepoint)^0
+ * eol
+local definitions = Cf(Ct"" * definition^1, rawset)
+
+local p_glyphs = header_line^0 * definitions * eof_tag
+
+local get_glyphs = function (data)
+ local res = lpeg.match(p_glyphs, data)
+ if not res then
+ print("error: could not parse glyph list")
+ os.exit(-1)
+ end
+ return res
+end
+
+local file_header = [==[
+if not modules then modules = { } end modules ["font-age"] = {
+ version = 2.200,
+ comment = "part of the luaotfload package",
+ author = "luaotfload team / mkglyphlist",
+ copyright = "derived from %s",
+ original = "Adobe Glyph List, version 2.0, September 20, 2002",
+ dataonly = true,
+}
+
+if context then
+ logs.report("fatal error","this module is not for context")
+ os.exit(-1)
+end
+
+--[[doc--
+Everything below has been autogenerated. Run mkglyphlist to rebuild
+font-age.lua.
+--doc]]--
+
+]==]
+
+local writedata = function (data)
+ data = table.serialize(data, true)
+ data = string.format(file_header, glyph_source) .. data
+ local fh = io.open(font_age, "wb")
+ if not fh then
+ print(string.format("error: %s not writable", font_age))
+ os.exit(-1)
+ end
+ print(string.format("saving %d bytes to %s", #data, font_age))
+ fh:write(data)
+ fh:close()
+end
+
+
+local get_raw get_raw = function (retry)
+ local fh = io.open(glyphfile, "rb")
+ if fh then
+ local data = fh:read"*all"
+ fh:close()
+ if data then return data end
+ elseif not retry then --- attempt download
+ print"info: retrieving glyph list from"
+ print(glyph_source)
+ local glyphdata = http.request(glyph_source)
+ if glyphdata then
+ local fh = io.open(glyphfile, "wb")
+ if not fh then
+ print"error: glyph file not writable"
+ os.exit(-1)
+ end
+ fh:write(glyphdata)
+ fh:close()
+ return get_raw(true)
+ end
+ print"error: download failed"
+ os.exit(-1)
+ end
+ print("error: could not obtain glyph data from "..glyphfile)
+ os.exit(-1)
+end
+
+local main = function ()
+ if arg[1] then glyphfile = arg[1] end
+ if arg[2] then font_age = arg[2] end
+
+ local data = get_raw()
+ local parsed = get_glyphs(data)
+ writedata(parsed)
+ return 0
+end
+
+
+return main()
diff --git a/mkluatexfontdb.lua b/mkluatexfontdb.lua
deleted file mode 100755
index 4275c84..0000000
--- a/mkluatexfontdb.lua
+++ /dev/null
@@ -1,101 +0,0 @@
-#!/usr/bin/env texlua
---[[
-This file was originally written by Elie Roux and Khaled Hosny and is under CC0
-license (see http://creativecommons.org/publicdomain/zero/1.0/legalcode).
-
-This file is a wrapper for the luaotfload's font names module. It is part of the
-luaotfload bundle, please see the luaotfload documentation for more info.
---]]
-
-kpse.set_program_name("luatex")
-
-function string.quoted(s) return string.format("%q",s) end -- XXX
-
-require("lualibs")
-require("otfl-basics-gen.lua")
-require("otfl-font-nms")
-require("alt_getopt")
-
-local name = 'mkluatexfontdb'
-local version = '1.07' -- same version number as luaotfload
-
-local names = fonts.names
-
-local function help_msg()
- texio.write(string.format([[
-Usage: %s [OPTION]...
-
-Rebuild the LuaTeX font database.
-
-Valid options:
- -f --force force re-indexing all fonts
- -q --quiet don't output anything
- -v --verbose=LEVEL be more verbose (print the searched directories)
- -vv print the loaded fonts
- -vvv print all steps of directory searching
- -V --version print version and exit
- -h --help print this message
-
-The output database file is named otfl-fonts.lua and is placed under:
-
- %s"
-]], name, names.path.dir))
-end
-
-local function version_msg()
- texio.write(string.format(
- "%s version %s, database version %s.\n", name, version, names.version))
-end
-
---[[
-Command-line processing.
-Here we fill cmdargs with the good values, and then analyze it.
---]]
-
-local long_opts = {
- force = "f",
- quiet = "q",
- help = "h",
- verbose = 1 ,
- version = "V",
-}
-
-local short_opts = "fqpvVh"
-
-local force_reload = nil
-
-local function process_cmdline()
- local opts, optind, optarg = alt_getopt.get_ordered_opts (arg, short_opts, long_opts)
- local log_level = 1
- for i,v in next, opts do
- if v == "q" then
- log_level = 0
- elseif v == "v" then
- if log_level > 0 then
- log_level = log_level + 1
- else
- log_level = 2
- end
- elseif v == "V" then
- version_msg()
- os.exit(0)
- elseif v == "h" then
- help_msg()
- os.exit(0)
- elseif v == "f" then
- force_reload = 1
- end
- end
- names.set_log_level(log_level)
-end
-
-local function generate(force)
- local fontnames, saved
- fontnames = names.update(fontnames, force)
- logs.report("fonts in the database", "%i", #fontnames.mappings)
- saved = names.save(fontnames)
- texio.write_nl("")
-end
-
-process_cmdline()
-generate(force_reload)
diff --git a/otfl-data-con.lua b/otfl-data-con.lua
deleted file mode 100644
index ed4f2de..0000000
--- a/otfl-data-con.lua
+++ /dev/null
@@ -1,135 +0,0 @@
-if not modules then modules = { } end modules ['data-con'] = {
- version = 1.100,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local format, lower, gsub = string.format, string.lower, string.gsub
-
-local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end)
-local trace_containers = false trackers.register("resolvers.containers", function(v) trace_containers = v end)
-local trace_storage = false trackers.register("resolvers.storage", function(v) trace_storage = v end)
-
---[[ldx--
-<p>Once we found ourselves defining similar cache constructs
-several times, containers were introduced. Containers are used
-to collect tables in memory and reuse them when possible based
-on (unique) hashes (to be provided by the calling function).</p>
-
-<p>Caching to disk is disabled by default. Version numbers are
-stored in the saved table which makes it possible to change the
-table structures without bothering about the disk cache.</p>
-
-<p>Examples of usage can be found in the font related code.</p>
---ldx]]--
-
-containers = containers or { }
-local containers = containers
-containers.usecache = true
-
-local report_containers = logs.reporter("resolvers","containers")
-
-local function report(container,tag,name)
- if trace_cache or trace_containers then
- report_containers("container: %s, tag: %s, name: %s",container.subcategory,tag,name or 'invalid')
- end
-end
-
-local allocated = { }
-
-local mt = {
- __index = function(t,k)
- if k == "writable" then
- local writable = caches.getwritablepath(t.category,t.subcategory) or { "." }
- t.writable = writable
- return writable
- elseif k == "readables" then
- local readables = caches.getreadablepaths(t.category,t.subcategory) or { "." }
- t.readables = readables
- return readables
- end
- end,
- __storage__ = true
-}
-
-function containers.define(category, subcategory, version, enabled)
- if category and subcategory then
- local c = allocated[category]
- if not c then
- c = { }
- allocated[category] = c
- end
- local s = c[subcategory]
- if not s then
- s = {
- category = category,
- subcategory = subcategory,
- storage = { },
- enabled = enabled,
- version = version or math.pi, -- after all, this is TeX
- trace = false,
- -- writable = caches.getwritablepath and caches.getwritablepath (category,subcategory) or { "." },
- -- readables = caches.getreadablepaths and caches.getreadablepaths(category,subcategory) or { "." },
- }
- setmetatable(s,mt)
- c[subcategory] = s
- end
- return s
- end
-end
-
-function containers.is_usable(container, name)
- return container.enabled and caches and caches.is_writable(container.writable, name)
-end
-
-function containers.is_valid(container, name)
- if name and name ~= "" then
- local storage = container.storage[name]
- return storage and storage.cache_version == container.version
- else
- return false
- end
-end
-
-function containers.read(container,name)
- local storage = container.storage
- local stored = storage[name]
- if not stored and container.enabled and caches and containers.usecache then
- stored = caches.loaddata(container.readables,name)
- if stored and stored.cache_version == container.version then
- report(container,"loaded",name)
- else
- stored = nil
- end
- storage[name] = stored
- elseif stored then
- report(container,"reusing",name)
- end
- return stored
-end
-
-function containers.write(container, name, data)
- if data then
- data.cache_version = container.version
- if container.enabled and caches then
- local unique, shared = data.unique, data.shared
- data.unique, data.shared = nil, nil
- caches.savedata(container.writable, name, data)
- report(container,"saved",name)
- data.unique, data.shared = unique, shared
- end
- report(container,"stored",name)
- container.storage[name] = data
- end
- return data
-end
-
-function containers.content(container,name)
- return container.storage[name]
-end
-
-function containers.cleanname(name)
- return (gsub(lower(name),"[^%w%d]+","-"))
-end
diff --git a/otfl-font-cid.lua b/otfl-font-cid.lua
deleted file mode 100644
index 4a4c4d2..0000000
--- a/otfl-font-cid.lua
+++ /dev/null
@@ -1,165 +0,0 @@
-if not modules then modules = { } end modules ['font-cid'] = {
- version = 1.001,
- comment = "companion to font-otf.lua (cidmaps)",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local format, match, lower = string.format, string.match, string.lower
-local tonumber = tonumber
-local P, S, R, C, V, lpegmatch = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.match
-
-local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
-
-local report_otf = logs.reporter("fonts","otf loading")
-
-local fonts = fonts
-
-local cid = { }
-fonts.cid = cid
-
-local cidmap = { }
-local cidmax = 10
-
--- original string parser: 0.109, lpeg parser: 0.036 seconds for Adobe-CNS1-4.cidmap
---
--- 18964 18964 (leader)
--- 0 /.notdef
--- 1..95 0020
--- 99 3000
-
-local number = C(R("09","af","AF")^1)
-local space = S(" \n\r\t")
-local spaces = space^0
-local period = P(".")
-local periods = period * period
-local name = P("/") * C((1-space)^1)
-
-local unicodes, names = { }, { } -- we could use Carg now
-
-local function do_one(a,b)
- unicodes[tonumber(a)] = tonumber(b,16)
-end
-
-local function do_range(a,b,c)
- c = tonumber(c,16)
- for i=tonumber(a),tonumber(b) do
- unicodes[i] = c
- c = c + 1
- end
-end
-
-local function do_name(a,b)
- names[tonumber(a)] = b
-end
-
-local grammar = P { "start",
- start = number * spaces * number * V("series"),
- series = (spaces * (V("one") + V("range") + V("named")))^1,
- one = (number * spaces * number) / do_one,
- range = (number * periods * number * spaces * number) / do_range,
- named = (number * spaces * name) / do_name
-}
-
-local function loadcidfile(filename)
- local data = io.loaddata(filename)
- if data then
- unicodes, names = { }, { }
- lpegmatch(grammar,data)
- local supplement, registry, ordering = match(filename,"^(.-)%-(.-)%-()%.(.-)$")
- return {
- supplement = supplement,
- registry = registry,
- ordering = ordering,
- filename = filename,
- unicodes = unicodes,
- names = names
- }
- end
-end
-
-cid.loadfile = loadcidfile -- we use the frozen variant
-
-local template = "%s-%s-%s.cidmap"
-
-local function locate(registry,ordering,supplement)
- local filename = format(template,registry,ordering,supplement)
- local hashname = lower(filename)
- local found = cidmap[hashname]
- if not found then
- if trace_loading then
- report_otf("checking cidmap, registry: %s, ordering: %s, supplement: %s, filename: %s",registry,ordering,supplement,filename)
- end
- local fullname = resolvers.findfile(filename,'cid') or ""
- if fullname ~= "" then
- found = loadcidfile(fullname)
- if found then
- if trace_loading then
- report_otf("using cidmap file %s",filename)
- end
- cidmap[hashname] = found
- found.usedname = file.basename(filename)
- end
- end
- end
- return found
-end
-
--- cf Arthur R. we can safely scan upwards since cids are downward compatible
-
-function cid.getmap(specification)
- if not specification then
- report_otf("invalid cidinfo specification (table expected)")
- return
- end
- local registry = specification.registry
- local ordering = specification.ordering
- local supplement = specification.supplement
- -- check for already loaded file
- local filename = format(registry,ordering,supplement)
- local found = cidmap[lower(filename)]
- if found then
- return found
- end
- if trace_loading then
- report_otf("needed cidmap, registry: %s, ordering: %s, supplement: %s",registry,ordering,supplement)
- end
- found = locate(registry,ordering,supplement)
- if not found then
- local supnum = tonumber(supplement)
- local cidnum = nil
- -- next highest (alternatively we could start high)
- if supnum < cidmax then
- for s=supnum+1,cidmax do
- local c = locate(registry,ordering,s)
- if c then
- found, cidnum = c, s
- break
- end
- end
- end
- -- next lowest (least worse fit)
- if not found and supnum > 0 then
- for s=supnum-1,0,-1 do
- local c = locate(registry,ordering,s)
- if c then
- found, cidnum = c, s
- break
- end
- end
- end
- -- prevent further lookups -- somewhat tricky
- registry = lower(registry)
- ordering = lower(ordering)
- if found and cidnum > 0 then
- for s=0,cidnum-1 do
- local filename = format(template,registry,ordering,s)
- if not cidmap[filename] then
- cidmap[filename] = found
- end
- end
- end
- end
- return found
-end
diff --git a/otfl-font-con.lua b/otfl-font-con.lua
deleted file mode 100644
index 9280996..0000000
--- a/otfl-font-con.lua
+++ /dev/null
@@ -1,1332 +0,0 @@
-if not modules then modules = { } end modules ['font-con'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-
--- some names of table entries will be changed (no _)
-
-local utf = unicode.utf8
-
-local next, tostring, rawget = next, tostring, rawget
-local format, match, lower, gsub = string.format, string.match, string.lower, string.gsub
-local utfbyte = utf.byte
-local sort, insert, concat, sortedkeys, serialize, fastcopy = table.sort, table.insert, table.concat, table.sortedkeys, table.serialize, table.fastcopy
-local derivetable = table.derive
-
-local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end)
-local trace_scaling = false trackers.register("fonts.scaling" , function(v) trace_scaling = v end)
-
-local report_defining = logs.reporter("fonts","defining")
-
--- watch out: no negative depths and negative eights permitted in regular fonts
-
---[[ldx--
-<p>Here we only implement a few helper functions.</p>
---ldx]]--
-
-local fonts = fonts
-local constructors = { }
-fonts.constructors = constructors
-local handlers = { }
-fonts.handlers = handlers
-
-local specifiers = fonts.specifiers
-local contextsetups = specifiers.contextsetups
-local contextnumbers = specifiers.contextnumbers
-
-local allocate = utilities.storage.allocate
-local setmetatableindex = table.setmetatableindex
-
--- will be directives
-
-constructors.dontembed = allocate()
-constructors.autocleanup = true
-constructors.namemode = "fullpath" -- will be a function
-
-constructors.version = 1.01
-constructors.cache = containers.define("fonts", "constructors", constructors.version, false)
-
-constructors.privateoffset = 0xF0000 -- 0x10FFFF
-
--- Some experimental helpers (handy for tracing):
---
--- todo: extra:
---
--- extra_space => space.extra
--- space => space.width
--- space_stretch => space.stretch
--- space_shrink => space.shrink
-
--- We do keep the x-height, extra_space, space_shrink and space_stretch
--- around as these are low level official names.
-
-constructors.keys = {
- properties = {
- encodingbytes = "number",
- embedding = "number",
- cidinfo = {
- },
- format = "string",
- fontname = "string",
- fullname = "string",
- filename = "filename",
- psname = "string",
- name = "string",
- virtualized = "boolean",
- hasitalics = "boolean",
- autoitalicamount = "basepoints",
- nostackmath = "boolean",
- noglyphnames = "boolean",
- mode = "string",
- hasmath = "boolean",
- mathitalics = "boolean",
- textitalics = "boolean",
- finalized = "boolean",
- },
- parameters = {
- mathsize = "number",
- scriptpercentage = "float",
- scriptscriptpercentage = "float",
- units = "cardinal",
- designsize = "scaledpoints",
- expansion = {
- stretch = "integerscale", -- might become float
- shrink = "integerscale", -- might become float
- step = "integerscale", -- might become float
- auto = "boolean",
- },
- protrusion = {
- auto = "boolean",
- },
- slantfactor = "float",
- extendfactor = "float",
- factor = "float",
- hfactor = "float",
- vfactor = "float",
- size = "scaledpoints",
- units = "scaledpoints",
- scaledpoints = "scaledpoints",
- slantperpoint = "scaledpoints",
- spacing = {
- width = "scaledpoints",
- stretch = "scaledpoints",
- shrink = "scaledpoints",
- extra = "scaledpoints",
- },
- xheight = "scaledpoints",
- quad = "scaledpoints",
- ascender = "scaledpoints",
- descender = "scaledpoints",
- synonyms = {
- space = "spacing.width",
- spacestretch = "spacing.stretch",
- spaceshrink = "spacing.shrink",
- extraspace = "spacing.extra",
- x_height = "xheight",
- space_stretch = "spacing.stretch",
- space_shrink = "spacing.shrink",
- extra_space = "spacing.extra",
- em = "quad",
- ex = "xheight",
- slant = "slantperpoint",
- },
- },
- description = {
- width = "basepoints",
- height = "basepoints",
- depth = "basepoints",
- boundingbox = { },
- },
- character = {
- width = "scaledpoints",
- height = "scaledpoints",
- depth = "scaledpoints",
- italic = "scaledpoints",
- },
-}
-
--- This might become an interface:
-
-local designsizes = allocate()
-constructors.designsizes = designsizes
-local loadedfonts = allocate()
-constructors.loadedfonts = loadedfonts
-
---[[ldx--
-<p>We need to normalize the scale factor (in scaled points). This has to
-do with the fact that <l n='tex'/> uses a negative multiple of 1000 as
-a signal for a font scaled based on the design size.</p>
---ldx]]--
-
-local factors = {
- pt = 65536.0,
- bp = 65781.8,
-}
-
-function constructors.setfactor(f)
- constructors.factor = factors[f or 'pt'] or factors.pt
-end
-
-constructors.setfactor()
-
-function constructors.scaled(scaledpoints, designsize) -- handles designsize in sp as well
- if scaledpoints < 0 then
- if designsize then
- local factor = constructors.factor
- if designsize > factor then -- or just 1000 / when? mp?
- return (- scaledpoints/1000) * designsize -- sp's
- else
- return (- scaledpoints/1000) * designsize * factor
- end
- else
- return (- scaledpoints/1000) * 10 * factor
- end
- else
- return scaledpoints
- end
-end
-
---[[ldx--
-<p>Beware, the boundingbox is passed as reference so we may not overwrite it
-in the process; numbers are of course copies. Here 65536 equals 1pt. (Due to
-excessive memory usage in CJK fonts, we no longer pass the boundingbox.)</p>
---ldx]]--
-
--- The scaler is only used for otf and afm and virtual fonts. If
--- a virtual font has italic correction make sure to set the
--- hasitalics flag. Some more flags will be added in
--- the future.
-
---[[ldx--
-<p>The reason why the scaler was originally split, is that for a while we experimented
-with a helper function. However, in practice the <l n='api'/> calls are too slow to
-make this profitable and the <l n='lua'/> based variant was just faster. A days
-wasted day but an experience richer.</p>
---ldx]]--
-
--- we can get rid of the tfm instance when we have fast access to the
--- scaled character dimensions at the tex end, e.g. a fontobject.width
--- actually we already have soem of that now as virtual keys in glyphs
---
--- flushing the kern and ligature tables from memory saves a lot (only
--- base mode) but it complicates vf building where the new characters
--- demand this data .. solution: functions that access them
-
-function constructors.cleanuptable(tfmdata)
- if constructors.autocleanup and tfmdata.properties.virtualized then
- for k, v in next, tfmdata.characters do
- if v.commands then v.commands = nil end
- -- if v.kerns then v.kerns = nil end
- end
- end
-end
-
--- experimental, sharing kerns (unscaled and scaled) saves memory
--- local sharedkerns, basekerns = constructors.check_base_kerns(tfmdata)
--- loop over descriptions (afm and otf have descriptions, tfm not)
--- there is no need (yet) to assign a value to chr.tonunicode
-
--- constructors.prepare_base_kerns(tfmdata) -- optimalization
-
--- we have target.name=metricfile and target.fullname=RealName and target.filename=diskfilename
--- when collapsing fonts, luatex looks as both target.name and target.fullname as ttc files
--- can have multiple subfonts
-
-function constructors.calculatescale(tfmdata,scaledpoints)
- local parameters = tfmdata.parameters
- if scaledpoints < 0 then
- scaledpoints = (- scaledpoints/1000) * (tfmdata.designsize or parameters.designsize) -- already in sp
- end
- return scaledpoints, scaledpoints / (parameters.units or 1000) -- delta
-end
-
-local unscaled = {
- ScriptPercentScaleDown = true,
- ScriptScriptPercentScaleDown = true,
- RadicalDegreeBottomRaisePercent = true
-}
-
-function constructors.assignmathparameters(target,original) -- simple variant, not used in context
- -- when a tfm file is loaded, it has already been scaled
- -- and it never enters the scaled so this is otf only and
- -- even then we do some extra in the context math plugins
- local mathparameters = original.mathparameters
- if mathparameters and next(mathparameters) then
- local targetparameters = target.parameters
- local targetproperties = target.properties
- local targetmathparameters = { }
- local factor = targetproperties.math_is_scaled and 1 or targetparameters.factor
- for name, value in next, mathparameters do
- if unscaled[name] then
- targetmathparameters[name] = value
- else
- targetmathparameters[name] = value * factor
- end
- end
- if not targetmathparameters.FractionDelimiterSize then
- targetmathparameters.FractionDelimiterSize = 1.01 * targetparameters.size
- end
- if not mathparameters.FractionDelimiterDisplayStyleSize then
- targetmathparameters.FractionDelimiterDisplayStyleSize = 2.40 * targetparameters.size
- end
- target.mathparameters = targetmathparameters
- end
-end
-
-function constructors.beforecopyingcharacters(target,original)
- -- can be used for additional tweaking
-end
-
-function constructors.aftercopyingcharacters(target,original)
- -- can be used for additional tweaking
-end
-
-function constructors.enhanceparameters(parameters)
- local xheight = parameters.x_height
- local quad = parameters.quad
- local space = parameters.space
- local stretch = parameters.space_stretch
- local shrink = parameters.space_shrink
- local extra = parameters.extra_space
- local slant = parameters.slant
- parameters.xheight = xheight
- parameters.spacestretch = stretch
- parameters.spaceshrink = shrink
- parameters.extraspace = extra
- parameters.em = quad
- parameters.ex = xheight
- parameters.slantperpoint = slant
- parameters.spacing = {
- width = space,
- stretch = stretch,
- shrink = shrink,
- extra = extra,
- }
-end
-
-function constructors.scale(tfmdata,specification)
- local target = { } -- the new table
- --
- if tonumber(specification) then
- specification = { size = specification }
- end
- --
- local scaledpoints = specification.size
- local relativeid = specification.relativeid
- --
- local properties = tfmdata.properties or { }
- local goodies = tfmdata.goodies or { }
- local resources = tfmdata.resources or { }
- local descriptions = tfmdata.descriptions or { } -- bad news if empty
- local characters = tfmdata.characters or { } -- bad news if empty
- local changed = tfmdata.changed or { } -- for base mode
- local shared = tfmdata.shared or { }
- local parameters = tfmdata.parameters or { }
- local mathparameters = tfmdata.mathparameters or { }
- --
- local targetcharacters = { }
- local targetdescriptions = derivetable(descriptions)
- local targetparameters = derivetable(parameters)
- local targetproperties = derivetable(properties)
- local targetgoodies = goodies -- we need to loop so no metatable
- target.characters = targetcharacters
- target.descriptions = targetdescriptions
- target.parameters = targetparameters
- -- target.mathparameters = targetmathparameters -- happens elsewhere
- target.properties = targetproperties
- target.goodies = targetgoodies
- target.shared = shared
- target.resources = resources
- target.unscaled = tfmdata -- the original unscaled one
- --
- -- specification.mathsize : 1=text 2=script 3=scriptscript
- -- specification.textsize : natural (text)size
- -- parameters.mathsize : 1=text 2=script 3=scriptscript >1000 enforced size (feature value other than yes)
- --
- local mathsize = tonumber(specification.mathsize) or 0
- local textsize = tonumber(specification.textsize) or scaledpoints
- local forcedsize = tonumber(parameters.mathsize ) or 0
- local extrafactor = tonumber(specification.factor ) or 1
- if (mathsize == 2 or forcedsize == 2) and parameters.scriptpercentage then
- scaledpoints = parameters.scriptpercentage * textsize / 100
- elseif (mathsize == 3 or forcedsize == 3) and parameters.scriptscriptpercentage then
- scaledpoints = parameters.scriptscriptpercentage * textsize / 100
- elseif forcedsize > 1000 then -- safeguard
- scaledpoints = forcedsize
- end
- --
- local tounicode = resources.tounicode
- local defaultwidth = resources.defaultwidth or 0
- local defaultheight = resources.defaultheight or 0
- local defaultdepth = resources.defaultdepth or 0
- local units = parameters.units or 1000
- --
- if target.fonts then
- target.fonts = fastcopy(target.fonts) -- maybe we virtualize more afterwards
- end
- --
- -- boundary keys are no longer needed as we now have a string 'right_boundary'
- -- that can be used in relevant tables (kerns and ligatures) ... not that I ever
- -- used them
- --
- -- boundarychar_label = 0, -- not needed
- -- boundarychar = 65536, -- there is now a string 'right_boundary'
- -- false_boundarychar = 65536, -- produces invalid tfm in luatex
- --
- targetproperties.language = properties.language or "dflt" -- inherited
- targetproperties.script = properties.script or "dflt" -- inherited
- targetproperties.mode = properties.mode or "base" -- inherited
- --
- local askedscaledpoints = scaledpoints
- local scaledpoints, delta = constructors.calculatescale(tfmdata,scaledpoints) -- no shortcut, dan be redefined
- --
- local hdelta = delta
- local vdelta = delta
- --
- target.designsize = parameters.designsize -- not really needed so it muight become obsolete
- target.units_per_em = units -- just a trigger for the backend (does luatex use this? if not it will go)
- --
- local direction = properties.direction or tfmdata.direction or 0 -- pointless, as we don't use omf fonts at all
- target.direction = direction
- properties.direction = direction
- --
- target.size = scaledpoints
- --
- target.encodingbytes = properties.encodingbytes or 1
- target.embedding = properties.embedding or "subset"
- target.tounicode = 1
- target.cidinfo = properties.cidinfo
- target.format = properties.format
- --
- local fontname = properties.fontname or tfmdata.fontname -- for the moment we fall back on
- local fullname = properties.fullname or tfmdata.fullname -- names in the tfmdata although
- local filename = properties.filename or tfmdata.filename -- that is not the right place to
- local psname = properties.psname or tfmdata.psname -- pass them
- local name = properties.name or tfmdata.name
- --
- if not psname or psname == "" then
- -- name used in pdf file as well as for selecting subfont in ttc/dfont
- psname = fontname or (fullname and fonts.names.cleanname(fullname))
- end
- target.fontname = fontname
- target.fullname = fullname
- target.filename = filename
- target.psname = psname
- target.name = name
- --
- properties.fontname = fontname
- properties.fullname = fullname
- properties.filename = filename
- properties.psname = psname
- properties.name = name
- -- expansion (hz)
- local expansion = parameters.expansion
- if expansion then
- target.stretch = expansion.stretch
- target.shrink = expansion.shrink
- target.step = expansion.step
- target.auto_expand = expansion.auto
- end
- -- protrusion
- local protrusion = parameters.protrusion
- if protrusion then
- target.auto_protrude = protrusion.auto
- end
- -- widening
- local extendfactor = parameters.extendfactor or 0
- if extendfactor ~= 0 and extendfactor ~= 1 then
- hdelta = hdelta * extendfactor
- target.extend = extendfactor * 1000 -- extent ?
- else
- target.extend = 1000 -- extent ?
- end
- -- slanting
- local slantfactor = parameters.slantfactor or 0
- if slantfactor ~= 0 then
- target.slant = slantfactor * 1000
- else
- target.slant = 0
- end
- --
- targetparameters.factor = delta
- targetparameters.hfactor = hdelta
- targetparameters.vfactor = vdelta
- targetparameters.size = scaledpoints
- targetparameters.units = units
- targetparameters.scaledpoints = askedscaledpoints
- --
- local isvirtual = properties.virtualized or tfmdata.type == "virtual"
- local hasquality = target.auto_expand or target.auto_protrude
- local hasitalics = properties.hasitalics
- local autoitalicamount = properties.autoitalicamount
- local stackmath = not properties.nostackmath
- local nonames = properties.noglyphnames
- local nodemode = properties.mode == "node"
- --
- if changed and not next(changed) then
- changed = false
- end
- --
- target.type = isvirtual and "virtual" or "real"
- --
- target.postprocessors = tfmdata.postprocessors
- --
- local targetslant = (parameters.slant or parameters[1] or 0)
- local targetspace = (parameters.space or parameters[2] or 0)*hdelta
- local targetspace_stretch = (parameters.space_stretch or parameters[3] or 0)*hdelta
- local targetspace_shrink = (parameters.space_shrink or parameters[4] or 0)*hdelta
- local targetx_height = (parameters.x_height or parameters[5] or 0)*vdelta
- local targetquad = (parameters.quad or parameters[6] or 0)*hdelta
- local targetextra_space = (parameters.extra_space or parameters[7] or 0)*hdelta
- --
- targetparameters.slant = targetslant -- slantperpoint
- targetparameters.space = targetspace
- targetparameters.space_stretch = targetspace_stretch
- targetparameters.space_shrink = targetspace_shrink
- targetparameters.x_height = targetx_height
- targetparameters.quad = targetquad
- targetparameters.extra_space = targetextra_space
- --
- local ascender = parameters.ascender
- if ascender then
- targetparameters.ascender = delta * ascender
- end
- local descender = parameters.descender
- if descender then
- targetparameters.descender = delta * descender
- end
- --
- constructors.enhanceparameters(targetparameters) -- official copies for us
- --
- local protrusionfactor = (targetquad ~= 0 and 1000/targetquad) or 0
- local scaledwidth = defaultwidth * hdelta
- local scaledheight = defaultheight * vdelta
- local scaleddepth = defaultdepth * vdelta
- --
- if trace_defining then
- report_defining("scaling by (%s,%s): name '%s', fullname: '%s', filename: '%s'",
- hdelta,vdelta,name or "noname",fullname or "nofullname",filename or "nofilename")
- end
- --
- local hasmath = (properties.hasmath or next(mathparameters)) and true
- if hasmath then
- if trace_defining then
- report_defining("math enabled for: name '%s', fullname: '%s', filename: '%s'",
- name or "noname",fullname or "nofullname",filename or "nofilename")
- end
- constructors.assignmathparameters(target,tfmdata) -- does scaling and whatever is needed
- properties.hasmath = true
- target.nomath = false
- target.MathConstants = target.mathparameters
- else
- if trace_defining then
- report_defining("math disabled for: name '%s', fullname: '%s', filename: '%s'",
- name or "noname",fullname or "nofullname",filename or "nofilename")
- end
- properties.hasmath = false
- target.nomath = true
- target.mathparameters = nil -- nop
- end
- --
- local italickey = "italic"
- --
- -- some context specific trickery (this will move to a plugin)
- --
- if hasmath then
- if properties.mathitalics then
- italickey = "italic_correction"
- if trace_defining then
- report_defining("math italics disabled for: name '%s', fullname: '%s', filename: '%s'",
- name or "noname",fullname or "nofullname",filename or "nofilename")
- end
- end
- autoitalicamount = false -- new
- else
- if properties.textitalics then
- italickey = "italic_correction"
- if trace_defining then
- report_defining("text italics disabled for: name '%s', fullname: '%s', filename: '%s'",
- name or "noname",fullname or "nofullname",filename or "nofilename")
- end
- if properties.delaytextitalics then
- autoitalicamount = false
- end
- end
- end
- --
- -- end of context specific trickery
- --
- constructors.beforecopyingcharacters(target,tfmdata)
- --
- local sharedkerns = { }
- --
- -- we can have a dumb mode (basemode without math etc) that skips most
- --
- for unicode, character in next, characters do
- local chr, description, index, touni
- if changed then
- -- basemode hack (we try to catch missing tounicodes, e.g. needed for ssty in math cambria)
- local c = changed[unicode]
- if c then
- description = descriptions[c] or descriptions[unicode] or character
- character = characters[c] or character
- index = description.index or c
- if tounicode then
- touni = tounicode[index] -- nb: index!
- if not touni then -- goodie
- local d = descriptions[unicode] or characters[unicode]
- local i = d.index or unicode
- touni = tounicode[i] -- nb: index!
- end
- end
- else
- description = descriptions[unicode] or character
- index = description.index or unicode
- if tounicode then
- touni = tounicode[index] -- nb: index!
- end
- end
- else
- description = descriptions[unicode] or character
- index = description.index or unicode
- if tounicode then
- touni = tounicode[index] -- nb: index!
- end
- end
- local width = description.width
- local height = description.height
- local depth = description.depth
- if width then width = hdelta*width else width = scaledwidth end
- if height then height = vdelta*height else height = scaledheight end
- -- if depth then depth = vdelta*depth else depth = scaleddepth end
- if depth and depth ~= 0 then
- depth = delta*depth
- if nonames then
- chr = {
- index = index,
- height = height,
- depth = depth,
- width = width,
- }
- else
- chr = {
- name = description.name,
- index = index,
- height = height,
- depth = depth,
- width = width,
- }
- end
- else
- -- this saves a little bit of memory time and memory, esp for big cjk fonts
- if nonames then
- chr = {
- index = index,
- height = height,
- width = width,
- }
- else
- chr = {
- name = description.name,
- index = index,
- height = height,
- width = width,
- }
- end
- end
- if touni then
- chr.tounicode = touni
- end
- -- if trace_scaling then
- -- report_defining("t=%s, u=%s, i=%s, n=%s c=%s",k,chr.tounicode or "",index or 0,description.name or '-',description.class or '-')
- -- end
- if hasquality then
- -- we could move these calculations elsewhere (saves calculations)
- local ve = character.expansion_factor
- if ve then
- chr.expansion_factor = ve*1000 -- expansionfactor, hm, can happen elsewhere
- end
- local vl = character.left_protruding
- if vl then
- chr.left_protruding = protrusionfactor*width*vl
- end
- local vr = character.right_protruding
- if vr then
- chr.right_protruding = protrusionfactor*width*vr
- end
- end
- --
- if autoitalicamount then
- local vi = description.italic
- if not vi then
- local vi = description.boundingbox[3] - description.width + autoitalicamount
- if vi > 0 then -- < 0 indicates no overshoot or a very small auto italic
- chr[italickey] = vi*hdelta
- end
- elseif vi ~= 0 then
- chr[italickey] = vi*hdelta
- end
- elseif hasitalics then
- local vi = description.italic
- if vi and vi ~= 0 then
- chr[italickey] = vi*hdelta
- end
- end
- -- to be tested
- if hasmath then
- -- todo, just operate on descriptions.math
- local vn = character.next
- if vn then
- chr.next = vn
- -- if character.vert_variants or character.horiz_variants then
- -- report_defining("glyph U+%05X has combination of next, vert_variants and horiz_variants",index)
- -- end
- else
- local vv = character.vert_variants
- if vv then
- local t = { }
- for i=1,#vv do
- local vvi = vv[i]
- t[i] = {
- ["start"] = (vvi["start"] or 0)*vdelta,
- ["end"] = (vvi["end"] or 0)*vdelta,
- ["advance"] = (vvi["advance"] or 0)*vdelta,
- ["extender"] = vvi["extender"],
- ["glyph"] = vvi["glyph"],
- }
- end
- chr.vert_variants = t
- else
- local hv = character.horiz_variants
- if hv then
- local t = { }
- for i=1,#hv do
- local hvi = hv[i]
- t[i] = {
- ["start"] = (hvi["start"] or 0)*hdelta,
- ["end"] = (hvi["end"] or 0)*hdelta,
- ["advance"] = (hvi["advance"] or 0)*hdelta,
- ["extender"] = hvi["extender"],
- ["glyph"] = hvi["glyph"],
- }
- end
- chr.horiz_variants = t
- end
- end
- end
- local va = character.top_accent
- if va then
- chr.top_accent = vdelta*va
- end
- if stackmath then
- local mk = character.mathkerns -- not in math ?
- if mk then
- local kerns = { }
- local v = mk.top_right if v then local k = { } for i=1,#v do local vi = v[i]
- k[i] = { height = vdelta*vi.height, kern = vdelta*vi.kern }
- end kerns.top_right = k end
- local v = mk.top_left if v then local k = { } for i=1,#v do local vi = v[i]
- k[i] = { height = vdelta*vi.height, kern = vdelta*vi.kern }
- end kerns.top_left = k end
- local v = mk.bottom_left if v then local k = { } for i=1,#v do local vi = v[i]
- k[i] = { height = vdelta*vi.height, kern = vdelta*vi.kern }
- end kerns.bottom_left = k end
- local v = mk.bottom_right if v then local k = { } for i=1,#v do local vi = v[i]
- k[i] = { height = vdelta*vi.height, kern = vdelta*vi.kern }
- end kerns.bottom_right = k end
- chr.mathkern = kerns -- singular -> should be patched in luatex !
- end
- end
- end
- if not nodemode then
- local vk = character.kerns
- if vk then
- local s = sharedkerns[vk]
- if not s then
- s = { }
- for k,v in next, vk do s[k] = v*hdelta end
- sharedkerns[vk] = s
- end
- chr.kerns = s
- end
- local vl = character.ligatures
- if vl then
- if true then
- chr.ligatures = vl -- shared
- else
- local tt = { }
- for i,l in next, vl do
- tt[i] = l
- end
- chr.ligatures = tt
- end
- end
- end
- if isvirtual then
- local vc = character.commands
- if vc then
- -- we assume non scaled commands here
- -- tricky .. we need to scale pseudo math glyphs too
- -- which is why we deal with rules too
- local ok = false
- for i=1,#vc do
- local key = vc[i][1]
- if key == "right" or key == "down" then
- ok = true
- break
- end
- end
- if ok then
- local tt = { }
- for i=1,#vc do
- local ivc = vc[i]
- local key = ivc[1]
- if key == "right" then
- tt[i] = { key, ivc[2]*hdelta }
- elseif key == "down" then
- tt[i] = { key, ivc[2]*vdelta }
- elseif key == "rule" then
- tt[i] = { key, ivc[2]*vdelta, ivc[3]*hdelta }
- else -- not comment
- tt[i] = ivc -- shared since in cache and untouched
- end
- end
- chr.commands = tt
- else
- chr.commands = vc
- end
- chr.index = nil
- end
- end
- targetcharacters[unicode] = chr
- end
- --
- constructors.aftercopyingcharacters(target,tfmdata)
- --
- return target
-end
-
-function constructors.finalize(tfmdata)
- if tfmdata.properties and tfmdata.properties.finalized then
- return
- end
- --
- if not tfmdata.characters then
- return nil
- end
- --
- if not tfmdata.goodies then
- tfmdata.goodies = { } -- context specific
- end
- --
- local parameters = tfmdata.parameters
- if not parameters then
- return nil
- end
- --
- if not parameters.expansion then
- parameters.expansion = {
- stretch = tfmdata.stretch or 0,
- shrink = tfmdata.shrink or 0,
- step = tfmdata.step or 0,
- auto = tfmdata.auto_expand or false,
- }
- end
- --
- if not parameters.protrusion then
- parameters.protrusion = {
- auto = auto_protrude
- }
- end
- --
- if not parameters.size then
- parameters.size = tfmdata.size
- end
- --
- if not parameters.extendfactor then
- parameters.extendfactor = tfmdata.extend or 0
- end
- --
- if not parameters.slantfactor then
- parameters.slantfactor = tfmdata.slant or 0
- end
- --
- if not parameters.designsize then
- parameters.designsize = tfmdata.designsize or 655360
- end
- --
- if not parameters.units then
- parameters.units = tfmdata.units_per_em or 1000
- end
- --
- if not tfmdata.descriptions then
- local descriptions = { } -- yes or no
- setmetatableindex(descriptions, function(t,k) local v = { } t[k] = v return v end)
- tfmdata.descriptions = descriptions
- end
- --
- local properties = tfmdata.properties
- if not properties then
- properties = { }
- tfmdata.properties = properties
- end
- --
- if not properties.virtualized then
- properties.virtualized = tfmdata.type == "virtual"
- end
- --
- if not tfmdata.properties then
- tfmdata.properties = {
- fontname = tfmdata.fontname,
- filename = tfmdata.filename,
- fullname = tfmdata.fullname,
- name = tfmdata.name,
- psname = tfmdata.psname,
- --
- encodingbytes = tfmdata.encodingbytes or 1,
- embedding = tfmdata.embedding or "subset",
- tounicode = tfmdata.tounicode or 1,
- cidinfo = tfmdata.cidinfo or nil,
- format = tfmdata.format or "type1",
- direction = tfmdata.direction or 0,
- }
- end
- if not tfmdata.resources then
- tfmdata.resources = { }
- end
- if not tfmdata.shared then
- tfmdata.shared = { }
- end
- --
- -- tfmdata.fonts
- -- tfmdata.unscaled
- --
- if not properties.hasmath then
- properties.hasmath = not tfmdata.nomath
- end
- --
- tfmdata.MathConstants = nil
- tfmdata.postprocessors = nil
- --
- tfmdata.fontname = nil
- tfmdata.filename = nil
- tfmdata.fullname = nil
- tfmdata.name = nil -- most tricky part
- tfmdata.psname = nil
- --
- tfmdata.encodingbytes = nil
- tfmdata.embedding = nil
- tfmdata.tounicode = nil
- tfmdata.cidinfo = nil
- tfmdata.format = nil
- tfmdata.direction = nil
- tfmdata.type = nil
- tfmdata.nomath = nil
- tfmdata.designsize = nil
- --
- tfmdata.size = nil
- tfmdata.stretch = nil
- tfmdata.shrink = nil
- tfmdata.step = nil
- tfmdata.auto_expand = nil
- tfmdata.auto_protrude = nil
- tfmdata.extend = nil
- tfmdata.slant = nil
- tfmdata.units_per_em = nil
- --
- properties.finalized = true
- --
- return tfmdata
-end
-
---[[ldx--
-<p>A unique hash value is generated by:</p>
---ldx]]--
-
-local hashmethods = { }
-constructors.hashmethods = hashmethods
-
-function constructors.hashfeatures(specification) -- will be overloaded
- local features = specification.features
- if features then
- local t, tn = { }, 0
- for category, list in next, features do
- if next(list) then
- local hasher = hashmethods[category]
- if hasher then
- local hash = hasher(list)
- if hash then
- tn = tn + 1
- t[tn] = category .. ":" .. hash
- end
- end
- end
- end
- if tn > 0 then
- return concat(t," & ")
- end
- end
- return "unknown"
-end
-
-hashmethods.normal = function(list)
- local s = { }
- local n = 0
- for k, v in next, list do
- if k ~= "number" and k ~= "features" then -- I need to figure this out, features
- n = n + 1
- s[n] = k
- end
- end
- if n > 0 then
- sort(s)
- for i=1,n do
- local k = s[i]
- s[i] = k .. '=' .. tostring(list[k])
- end
- return concat(s,"+")
- end
-end
-
---[[ldx--
-<p>In principle we can share tfm tables when we are in node for a font, but then
-we need to define a font switch as an id/attr switch which is no fun, so in that
-case users can best use dynamic features ... so, we will not use that speedup. Okay,
-when we get rid of base mode we can optimize even further by sharing, but then we
-loose our testcases for <l n='luatex'/>.</p>
---ldx]]--
-
-function constructors.hashinstance(specification,force)
- local hash, size, fallbacks = specification.hash, specification.size, specification.fallbacks
- if force or not hash then
- hash = constructors.hashfeatures(specification)
- specification.hash = hash
- end
- if size < 1000 and designsizes[hash] then
- size = math.round(constructors.scaled(size,designsizes[hash]))
- specification.size = size
- end
- -- local mathsize = specification.mathsize or 0
- -- if mathsize > 0 then
- -- local textsize = specification.textsize
- -- if fallbacks then
- -- return hash .. ' @ ' .. tostring(size) .. ' [ ' .. tostring(mathsize) .. ' : ' .. tostring(textsize) .. ' ] @ ' .. fallbacks
- -- else
- -- return hash .. ' @ ' .. tostring(size) .. ' [ ' .. tostring(mathsize) .. ' : ' .. tostring(textsize) .. ' ]'
- -- end
- -- else
- if fallbacks then
- return hash .. ' @ ' .. tostring(size) .. ' @ ' .. fallbacks
- else
- return hash .. ' @ ' .. tostring(size)
- end
- -- end
-end
-
-function constructors.setname(tfmdata,specification) -- todo: get specification from tfmdata
- if constructors.namemode == "specification" then
- -- not to be used in context !
- local specname = specification.specification
- if specname then
- tfmdata.properties.name = specname
- if trace_defining then
- report_otf("overloaded fontname: '%s'",specname)
- end
- end
- end
-end
-
-function constructors.checkedfilename(data)
- local foundfilename = data.foundfilename
- if not foundfilename then
- local askedfilename = data.filename or ""
- if askedfilename ~= "" then
- askedfilename = resolvers.resolve(askedfilename) -- no shortcut
- foundfilename = resolvers.findbinfile(askedfilename,"") or ""
- if foundfilename == "" then
- report_defining("source file '%s' is not found",askedfilename)
- foundfilename = resolvers.findbinfile(file.basename(askedfilename),"") or ""
- if foundfilename ~= "" then
- report_defining("using source file '%s' (cache mismatch)",foundfilename)
- end
- end
- end
- data.foundfilename = foundfilename
- end
- return foundfilename
-end
-
-local formats = allocate()
-fonts.formats = formats
-
-setmetatableindex(formats, function(t,k)
- local l = lower(k)
- if rawget(t,k) then
- t[k] = l
- return l
- end
- return rawget(t,file.extname(l))
-end)
-
-local locations = { }
-
-local function setindeed(mode,target,group,name,action,position)
- local t = target[mode]
- if not t then
- report_defining("fatal error in setting feature '%s', group '%s', mode '%s'",name or "?",group or "?",mode)
- os.exit()
- elseif position then
- -- todo: remove existing
- insert(t, position, { name = name, action = action })
- else
- for i=1,#t do
- local ti = t[i]
- if ti.name == name then
- ti.action = action
- return
- end
- end
- insert(t, { name = name, action = action })
- end
-end
-
-local function set(group,name,target,source)
- target = target[group]
- if not target then
- report_defining("fatal target error in setting feature '%s', group '%s'",name or "?",group or "?")
- os.exit()
- end
- local source = source[group]
- if not source then
- report_defining("fatal source error in setting feature '%s', group '%s'",name or "?",group or "?")
- os.exit()
- end
- local node = source.node
- local base = source.base
- local position = source.position
- if node then
- setindeed("node",target,group,name,node,position)
- end
- if base then
- setindeed("base",target,group,name,base,position)
- end
-end
-
-local function register(where,specification)
- local name = specification.name
- if name and name ~= "" then
- local default = specification.default
- local description = specification.description
- local initializers = specification.initializers
- local processors = specification.processors
- local manipulators = specification.manipulators
- local modechecker = specification.modechecker
- if default then
- where.defaults[name] = default
- end
- if description and description ~= "" then
- where.descriptions[name] = description
- end
- if initializers then
- set('initializers',name,where,specification)
- end
- if processors then
- set('processors', name,where,specification)
- end
- if manipulators then
- set('manipulators',name,where,specification)
- end
- if modechecker then
- where.modechecker = modechecker
- end
- end
-end
-
-constructors.registerfeature = register
-
-function constructors.getfeatureaction(what,where,mode,name)
- what = handlers[what].features
- if what then
- where = what[where]
- if where then
- mode = where[mode]
- if mode then
- for i=1,#mode do
- local m = mode[i]
- if m.name == name then
- return m.action
- end
- end
- end
- end
- end
-end
-
-function constructors.newfeatures(what)
- local features = handlers[what].features
- if not features then
- local tables = handlers[what].tables -- can be preloaded
- features = allocate {
- defaults = { },
- descriptions = tables and tables.features or { },
- initializers = { base = { }, node = { } },
- processors = { base = { }, node = { } },
- manipulators = { base = { }, node = { } },
- }
- features.register = function(specification) return register(features,specification) end
- handlers[what].features = features -- will also become hidden
- end
- return features
-end
-
---[[ldx--
-<p>We need to check for default features. For this we provide
-a helper function.</p>
---ldx]]--
-
-function constructors.checkedfeatures(what,features)
- local defaults = handlers[what].features.defaults
- if features and next(features) then
- features = fastcopy(features) -- can be inherited (mt) but then no loops possible
- for key, value in next, defaults do
- if features[key] == nil then
- features[key] = value
- end
- end
- return features
- else
- return fastcopy(defaults) -- we can change features in place
- end
-end
-
--- before scaling
-
-function constructors.initializefeatures(what,tfmdata,features,trace,report)
- if features and next(features) then
- local properties = tfmdata.properties or { } -- brrr
- local whathandler = handlers[what]
- local whatfeatures = whathandler.features
- local whatinitializers = whatfeatures.initializers
- local whatmodechecker = whatfeatures.modechecker
- -- properties.mode can be enforces (for instance in font-otd)
- local mode = properties.mode or (whatmodechecker and whatmodechecker(tfmdata,features,features.mode)) or features.mode or "base"
- properties.mode = mode -- also status
- features.mode = mode -- both properties.mode or features.mode can be changed
- --
- local done = { }
- while true do
- local redo = false
- local initializers = whatfeatures.initializers[mode]
- if initializers then
- for i=1,#initializers do
- local step = initializers[i]
- local feature = step.name
--- we could intercept mode here .. needs a rewrite of this whole loop then but it's cleaner that way
- local value = features[feature]
- if not value then
- -- disabled
- elseif done[feature] then
- -- already done
- else
- local action = step.action
- if trace then
- report("initializing feature %s to %s for mode %s for font %s",feature,
- tostring(value),mode or 'unknown', tfmdata.properties.fullname or 'unknown')
- end
- action(tfmdata,value,features) -- can set mode (e.g. goodies) so it can trigger a restart
- if mode ~= properties.mode or mode ~= features.mode then
- if whatmodechecker then
- properties.mode = whatmodechecker(tfmdata,features,properties.mode) -- force checking
- features.mode = properties.mode
- end
- if mode ~= properties.mode then
- mode = properties.mode
- redo = true
- end
- end
- done[feature] = true
- end
- if redo then
- break
- end
- end
- if not redo then
- break
- end
- else
- break
- end
- end
- properties.mode = mode -- to be sure
- return true
- else
- return false
- end
-end
-
--- while typesetting
-
-function constructors.collectprocessors(what,tfmdata,features,trace,report)
- local processes, nofprocesses = { }, 0
- if features and next(features) then
- local properties = tfmdata.properties
- local whathandler = handlers[what]
- local whatfeatures = whathandler.features
- local whatprocessors = whatfeatures.processors
- local processors = whatprocessors[properties.mode]
- if processors then
- for i=1,#processors do
- local step = processors[i]
- local feature = step.name
- if features[feature] then
- local action = step.action
- if trace then
- report("installing feature processor %s for mode %s for font %s",feature,
- mode or 'unknown', tfmdata.properties.fullname or 'unknown')
- end
- if action then
- nofprocesses = nofprocesses + 1
- processes[nofprocesses] = action
- end
- end
- end
- elseif trace then
- report("no feature processors for mode %s for font %s",
- mode or 'unknown', tfmdata.properties.fullname or 'unknown')
- end
- end
- return processes
-end
-
--- after scaling
-
-function constructors.applymanipulators(what,tfmdata,features,trace,report)
- if features and next(features) then
- local properties = tfmdata.properties
- local whathandler = handlers[what]
- local whatfeatures = whathandler.features
- local whatmanipulators = whatfeatures.manipulators
- local manipulators = whatmanipulators[properties.mode]
- if manipulators then
- for i=1,#manipulators do
- local step = manipulators[i]
- local feature = step.name
- local value = features[feature]
- if value then
- local action = step.action
- if trace then
- report("applying feature manipulator %s for mode %s for font %s",feature,
- mode or 'unknown', tfmdata.properties.fullname or 'unknown')
- end
- if action then
- action(tfmdata,feature,value)
- end
- end
- end
- end
- end
-end
diff --git a/otfl-font-def.lua b/otfl-font-def.lua
deleted file mode 100644
index 96de480..0000000
--- a/otfl-font-def.lua
+++ /dev/null
@@ -1,440 +0,0 @@
-if not modules then modules = { } end modules ['font-def'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local concat = table.concat
-local format, gmatch, match, find, lower, gsub = string.format, string.gmatch, string.match, string.find, string.lower, string.gsub
-local tostring, next = tostring, next
-local lpegmatch = lpeg.match
-
-local allocate = utilities.storage.allocate
-
-local trace_defining = false trackers .register("fonts.defining", function(v) trace_defining = v end)
-local directive_embedall = false directives.register("fonts.embedall", function(v) directive_embedall = v end)
-
-trackers.register("fonts.loading", "fonts.defining", "otf.loading", "afm.loading", "tfm.loading")
-trackers.register("fonts.all", "fonts.*", "otf.*", "afm.*", "tfm.*")
-
-local report_defining = logs.reporter("fonts","defining")
-
---[[ldx--
-<p>Here we deal with defining fonts. We do so by intercepting the
-default loader that only handles <l n='tfm'/>.</p>
---ldx]]--
-
-local fonts = fonts
-local fontdata = fonts.hashes.identifiers
-local readers = fonts.readers
-local definers = fonts.definers
-local specifiers = fonts.specifiers
-local constructors = fonts.constructors
-
-readers.sequence = allocate { 'otf', 'ttf', 'afm', 'tfm', 'lua' } -- dfont ttc
-
-local variants = allocate()
-specifiers.variants = variants
-
-definers.methods = definers.methods or { }
-
-local internalized = allocate() -- internal tex numbers (private)
-
-
-local loadedfonts = constructors.loadedfonts
-local designsizes = constructors.designsizes
-
---[[ldx--
-<p>We hardly gain anything when we cache the final (pre scaled)
-<l n='tfm'/> table. But it can be handy for debugging, so we no
-longer carry this code along. Also, we now have quite some reference
-to other tables so we would end up with lots of catches.</p>
---ldx]]--
-
---[[ldx--
-<p>We can prefix a font specification by <type>name:</type> or
-<type>file:</type>. The first case will result in a lookup in the
-synonym table.</p>
-
-<typing>
-[ name: | file: ] identifier [ separator [ specification ] ]
-</typing>
-
-<p>The following function split the font specification into components
-and prepares a table that will move along as we proceed.</p>
---ldx]]--
-
--- beware, we discard additional specs
---
--- method:name method:name(sub) method:name(sub)*spec method:name*spec
--- name name(sub) name(sub)*spec name*spec
--- name@spec*oeps
-
-local splitter, splitspecifiers = nil, ""
-
-local P, C, S, Cc = lpeg.P, lpeg.C, lpeg.S, lpeg.Cc
-
-local left = P("(")
-local right = P(")")
-local colon = P(":")
-local space = P(" ")
-
-definers.defaultlookup = "file"
-
-local prefixpattern = P(false)
-
-local function addspecifier(symbol)
- splitspecifiers = splitspecifiers .. symbol
- local method = S(splitspecifiers)
- local lookup = C(prefixpattern) * colon
- local sub = left * C(P(1-left-right-method)^1) * right
- local specification = C(method) * C(P(1)^1)
- local name = C((1-sub-specification)^1)
- splitter = P((lookup + Cc("")) * name * (sub + Cc("")) * (specification + Cc("")))
-end
-
-local function addlookup(str,default)
- prefixpattern = prefixpattern + P(str)
-end
-
-definers.addlookup = addlookup
-
-addlookup("file")
-addlookup("name")
-addlookup("spec")
-
-local function getspecification(str)
- return lpegmatch(splitter,str)
-end
-
-definers.getspecification = getspecification
-
-function definers.registersplit(symbol,action,verbosename)
- addspecifier(symbol)
- variants[symbol] = action
- if verbosename then
- variants[verbosename] = action
- end
-end
-
-function definers.makespecification(specification,lookup,name,sub,method,detail,size)
- size = size or 655360
- if trace_defining then
- report_defining("%s -> lookup: %s, name: %s, sub: %s, method: %s, detail: %s",
- specification, (lookup ~= "" and lookup) or "[file]", (name ~= "" and name) or "-",
- (sub ~= "" and sub) or "-", (method ~= "" and method) or "-", (detail ~= "" and detail) or "-")
- end
- if not lookup or lookup == "" then
- lookup = definers.defaultlookup
- end
- local t = {
- lookup = lookup, -- forced type
- specification = specification, -- full specification
- size = size, -- size in scaled points or -1000*n
- name = name, -- font or filename
- sub = sub, -- subfont (eg in ttc)
- method = method, -- specification method
- detail = detail, -- specification
- resolved = "", -- resolved font name
- forced = "", -- forced loader
- features = { }, -- preprocessed features
- }
- return t
-end
-
-function definers.analyze(specification, size)
- -- can be optimized with locals
- local lookup, name, sub, method, detail = getspecification(specification or "")
- return definers.makespecification(specification, lookup, name, sub, method, detail, size)
-end
-
---[[ldx--
-<p>We can resolve the filename using the next function:</p>
---ldx]]--
-
-definers.resolvers = definers.resolvers or { }
-local resolvers = definers.resolvers
-
--- todo: reporter
-
-function resolvers.file(specification)
- local suffix = file.suffix(specification.name)
- if fonts.formats[suffix] then
- specification.forced = suffix
- specification.name = file.removesuffix(specification.name)
- end
-end
-
-function resolvers.name(specification)
- local resolve = fonts.names.resolve
- if resolve then
- local resolved, sub = resolve(specification.name,specification.sub,specification) -- we pass specification for overloaded versions
- if resolved then
- specification.resolved = resolved
- specification.sub = sub
- local suffix = file.suffix(resolved)
- if fonts.formats[suffix] then
- specification.forced = suffix
- specification.name = file.removesuffix(resolved)
- else
- specification.name = resolved
- end
- end
- else
- resolvers.file(specification)
- end
-end
-
-function resolvers.spec(specification)
- local resolvespec = fonts.names.resolvespec
- if resolvespec then
- local resolved, sub = resolvespec(specification.name,specification.sub,specification) -- we pass specification for overloaded versions
- if resolved then
- specification.resolved = resolved
- specification.sub = sub
- specification.forced = file.extname(resolved)
- specification.name = file.removesuffix(resolved)
- end
- else
- resolvers.name(specification)
- end
-end
-
-function definers.resolve(specification)
- if not specification.resolved or specification.resolved == "" then -- resolved itself not per se in mapping hash
- local r = resolvers[specification.lookup]
- if r then
- r(specification)
- end
- end
- if specification.forced == "" then
- specification.forced = nil
- else
- specification.forced = specification.forced
- end
- specification.hash = lower(specification.name .. ' @ ' .. constructors.hashfeatures(specification))
- if specification.sub and specification.sub ~= "" then
- specification.hash = specification.sub .. ' @ ' .. specification.hash
- end
- return specification
-end
-
---[[ldx--
-<p>The main read function either uses a forced reader (as determined by
-a lookup) or tries to resolve the name using the list of readers.</p>
-
-<p>We need to cache when possible. We do cache raw tfm data (from <l
-n='tfm'/>, <l n='afm'/> or <l n='otf'/>). After that we can cache based
-on specificstion (name) and size, that is, <l n='tex'/> only needs a number
-for an already loaded fonts. However, it may make sense to cache fonts
-before they're scaled as well (store <l n='tfm'/>'s with applied methods
-and features). However, there may be a relation between the size and
-features (esp in virtual fonts) so let's not do that now.</p>
-
-<p>Watch out, here we do load a font, but we don't prepare the
-specification yet.</p>
---ldx]]--
-
--- very experimental:
-
-function definers.applypostprocessors(tfmdata)
- local postprocessors = tfmdata.postprocessors
- if postprocessors then
- for i=1,#postprocessors do
- local extrahash = postprocessors[i](tfmdata) -- after scaling etc
- if type(extrahash) == "string" and extrahash ~= "" then
- -- e.g. a reencoding needs this
- extrahash = gsub(lower(extrahash),"[^a-z]","-")
- tfmdata.properties.fullname = format("%s-%s",tfmdata.properties.fullname,extrahash)
- end
- end
- end
- return tfmdata
-end
-
--- function definers.applypostprocessors(tfmdata)
--- return tfmdata
--- end
-
-local function checkembedding(tfmdata)
- local properties = tfmdata.properties
- local embedding
- if directive_embedall then
- embedding = "full"
- elseif properties and properties.filename and constructors.dontembed[properties.filename] then
- embedding = "no"
- else
- embedding = "subset"
- end
- if properties then
- properties.embedding = embedding
- else
- tfmdata.properties = { embedding = embedding }
- end
- tfmdata.embedding = embedding
-end
-
-function definers.loadfont(specification)
- local hash = constructors.hashinstance(specification)
- local tfmdata = loadedfonts[hash] -- hashes by size !
- if not tfmdata then
- local forced = specification.forced or ""
- if forced ~= "" then
- local reader = readers[lower(forced)]
- tfmdata = reader and reader(specification)
- if not tfmdata then
- report_defining("forced type %s of %s not found",forced,specification.name)
- end
- else
- local sequence = readers.sequence -- can be overloaded so only a shortcut here
- for s=1,#sequence do
- local reader = sequence[s]
- if readers[reader] then -- we skip not loaded readers
- if trace_defining then
- report_defining("trying (reader sequence driven) type %s for %s with file %s",reader,specification.name,specification.filename or "unknown")
- end
- tfmdata = readers[reader](specification)
- if tfmdata then
- break
- else
- specification.filename = nil
- end
- end
- end
- end
- if tfmdata then
- tfmdata = definers.applypostprocessors(tfmdata)
- checkembedding(tfmdata) -- todo: general postprocessor
- loadedfonts[hash] = tfmdata
- designsizes[specification.hash] = tfmdata.parameters.designsize
- end
- end
- if not tfmdata then
- report_defining("font with asked name '%s' is not found using lookup '%s'",specification.name,specification.lookup)
- end
- return tfmdata
-end
-
---[[ldx--
-<p>For virtual fonts we need a slightly different approach:</p>
---ldx]]--
-
-function constructors.readanddefine(name,size) -- no id -- maybe a dummy first
- local specification = definers.analyze(name,size)
- local method = specification.method
- if method and variants[method] then
- specification = variants[method](specification)
- end
- specification = definers.resolve(specification)
- local hash = constructors.hashinstance(specification)
- local id = definers.registered(hash)
- if not id then
- local tfmdata = definers.loadfont(specification)
- if tfmdata then
- tfmdata.properties.hash = hash
- id = font.define(tfmdata)
- definers.register(tfmdata,id)
- else
- id = 0 -- signal
- end
- end
- return fontdata[id], id
-end
-
---[[ldx--
-<p>So far the specifiers. Now comes the real definer. Here we cache
-based on id's. Here we also intercept the virtual font handler. Since
-it evolved stepwise I may rewrite this bit (combine code).</p>
-
-In the previously defined reader (the one resulting in a <l n='tfm'/>
-table) we cached the (scaled) instances. Here we cache them again, but
-this time based on id. We could combine this in one cache but this does
-not gain much. By the way, passing id's back to in the callback was
-introduced later in the development.</p>
---ldx]]--
-
-local lastdefined = nil -- we don't want this one to end up in s-tra-02
-local internalized = { }
-
-function definers.current() -- or maybe current
- return lastdefined
-end
-
-function definers.registered(hash)
- local id = internalized[hash]
- return id, id and fontdata[id]
-end
-
-function definers.register(tfmdata,id)
- if tfmdata and id then
- local hash = tfmdata.properties.hash
- if not internalized[hash] then
- internalized[hash] = id
- if trace_defining then
- report_defining("registering font, id: %s, hash: %s",id or "?",hash or "?")
- end
- fontdata[id] = tfmdata
- end
- end
-end
-
-function definers.read(specification,size,id) -- id can be optional, name can already be table
- statistics.starttiming(fonts)
- if type(specification) == "string" then
- specification = definers.analyze(specification,size)
- end
- local method = specification.method
- if method and variants[method] then
- specification = variants[method](specification)
- end
- specification = definers.resolve(specification)
- local hash = constructors.hashinstance(specification)
- local tfmdata = definers.registered(hash) -- id
- if tfmdata then
- if trace_defining then
- report_defining("already hashed: %s",hash)
- end
- else
- tfmdata = definers.loadfont(specification) -- can be overloaded
- if tfmdata then
- if trace_defining then
- report_defining("loaded and hashed: %s",hash)
- end
- --~ constructors.checkvirtualid(tfmdata) -- interferes
- tfmdata.properties.hash = hash
- if id then
- definers.register(tfmdata,id)
- end
- else
- if trace_defining then
- report_defining("not loaded and hashed: %s",hash)
- end
- end
- end
- lastdefined = tfmdata or id -- todo ! ! ! ! !
- if not tfmdata then -- or id?
- report_defining( "unknown font %s, loading aborted",specification.name)
- elseif trace_defining and type(tfmdata) == "table" then
- local properties = tfmdata.properties or { }
- local parameters = tfmdata.parameters or { }
- report_defining("using %s font with id %s, name:%s size:%s bytes:%s encoding:%s fullname:%s filename:%s",
- properties.format or "unknown",
- id or "?",
- properties.name or "?",
- parameters.size or "default",
- properties.encodingbytes or "?",
- properties.encodingname or "unicode",
- properties.fullname or "?",
- file.basename(properties.filename or "?"))
- end
- statistics.stoptiming(fonts)
- return tfmdata
-end
-
---[[ldx--
-<p>We overload the <l n='tfm'/> reader.</p>
---ldx]]--
-
-callbacks.register('define_font', definers.read, "definition of fonts (tfmdata preparation)")
diff --git a/otfl-font-ini.lua b/otfl-font-ini.lua
deleted file mode 100644
index 8eeba0c..0000000
--- a/otfl-font-ini.lua
+++ /dev/null
@@ -1,38 +0,0 @@
-if not modules then modules = { } end modules ['font-ini'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- basemethods -> can also be in list
--- presetcontext -> defaults
--- hashfeatures -> ctx version
-
---[[ldx--
-<p>Not much is happening here.</p>
---ldx]]--
-
-local lower = string.lower
-local allocate, mark = utilities.storage.allocate, utilities.storage.mark
-
-local report_defining = logs.reporter("fonts","defining")
-
-fontloader.totable = fontloader.to_table
-
-fonts = fonts or { } -- already defined in context
-local fonts = fonts
-
--- some of these might move to where they are used first:
-
-fonts.hashes = { identifiers = allocate() }
-fonts.analyzers = { } -- not needed here
-fonts.readers = { }
-fonts.tables = { }
-fonts.definers = { methods = { } }
-fonts.specifiers = fonts.specifiers or { } -- in format !
-fonts.loggers = { register = function() end }
-fonts.helpers = { }
-
-fonts.tracers = { } -- for the moment till we have move to moduledata
diff --git a/otfl-font-ltx.lua b/otfl-font-ltx.lua
deleted file mode 100644
index 2025db5..0000000
--- a/otfl-font-ltx.lua
+++ /dev/null
@@ -1,195 +0,0 @@
-if not modules then modules = { } end modules ['font-ltx'] = {
- version = 1.001,
- comment = "companion to luatex-*.tex",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local fonts = fonts
-
--- A bit of tuning for definitions.
-
-fonts.constructors.namemode = "specification" -- somehow latex needs this (changed name!) => will change into an overload
-
--- tricky: we sort of bypass the parser and directly feed all into
--- the sub parser
-
-function fonts.definers.getspecification(str)
- return "", str, "", ":", str
-end
-
--- the generic name parser (different from context!)
-
-local list = { }
-
-local function isstyle(s)
- local style = string.lower(s):split("/")
- for _,v in next, style do
- if v == "b" then
- list.style = "bold"
- elseif v == "i" then
- list.style = "italic"
- elseif v == "bi" or v == "ib" then
- list.style = "bolditalic"
- elseif v:find("^s=") then
- list.optsize = v:split("=")[2]
- elseif v == "aat" or v == "icu" or v == "gr" then
- logs.report("load font", "unsupported font option: %s", v)
- elseif not v:is_empty() then
- list.style = v:gsub("[^%a%d]", "")
- end
- end
-end
-
-local defaults = {
- dflt = {
- "ccmp", "locl", "rlig", "liga", "clig",
- "kern", "mark", "mkmk", 'itlc',
- },
- arab = {
- "ccmp", "locl", "isol", "fina", "fin2",
- "fin3", "medi", "med2", "init", "rlig",
- "calt", "liga", "cswh", "mset", "curs",
- "kern", "mark", "mkmk",
- },
- deva = {
- "ccmp", "locl", "init", "nukt", "akhn",
- "rphf", "blwf", "half", "pstf", "vatu",
- "pres", "blws", "abvs", "psts", "haln",
- "calt", "blwm", "abvm", "dist", "kern",
- "mark", "mkmk",
- },
- khmr = {
- "ccmp", "locl", "pref", "blwf", "abvf",
- "pstf", "pres", "blws", "abvs", "psts",
- "clig", "calt", "blwm", "abvm", "dist",
- "kern", "mark", "mkmk",
- },
- thai = {
- "ccmp", "locl", "liga", "kern", "mark",
- "mkmk",
- },
- hang = {
- "ccmp", "ljmo", "vjmo", "tjmo",
- },
-}
-
-defaults.beng = defaults.deva
-defaults.guru = defaults.deva
-defaults.gujr = defaults.deva
-defaults.orya = defaults.deva
-defaults.taml = defaults.deva
-defaults.telu = defaults.deva
-defaults.knda = defaults.deva
-defaults.mlym = defaults.deva
-defaults.sinh = defaults.deva
-
-defaults.syrc = defaults.arab
-defaults.mong = defaults.arab
-defaults.nko = defaults.arab
-
-defaults.tibt = defaults.khmr
-
-defaults.lao = defaults.thai
-
-local function set_default_features(script)
- local features
- local script = script or "dflt"
- logs.report("load font", "auto-selecting default features for script: %s", script)
- if defaults[script] then
- features = defaults[script]
- else
- features = defaults["dflt"]
- end
- for _,v in next, features do
- if list[v] ~= false then
- list[v] = true
- end
- end
-end
-
-local function issome () list.lookup = 'name' end
-local function isfile () list.lookup = 'file' end
-local function isname () list.lookup = 'name' end
-local function thename(s) list.name = s end
-local function issub (v) list.sub = v end
-local function istrue (s) list[s] = true end
-local function isfalse(s) list[s] = false end
-local function iskey (k,v) list[k] = v end
-
-local P, S, R, C = lpeg.P, lpeg.S, lpeg.R, lpeg.C
-
-local spaces = P(" ")^0
-local namespec = (1-S("/:("))^0 -- was: (1-S("/: ("))^0
-local filespec = (R("az", "AZ") * P(":"))^-1 * (1-S(":("))^1
-local stylespec = spaces * P("/") * (((1-P(":"))^0)/isstyle) * spaces
-local filename = (P("file:")/isfile * (filespec/thename)) + (P("[") * P(true)/isname * (((1-P("]"))^0)/thename) * P("]"))
-local fontname = (P("name:")/isname * (namespec/thename)) + P(true)/issome * (namespec/thename)
-local sometext = (R("az","AZ","09") + S("+-.,"))^1
-local truevalue = P("+") * spaces * (sometext/istrue)
-local falsevalue = P("-") * spaces * (sometext/isfalse)
-local keyvalue = P("+") + (C(sometext) * spaces * P("=") * spaces * C(sometext))/iskey
-local somevalue = sometext/istrue
-local subvalue = P("(") * (C(P(1-S("()"))^1)/issub) * P(")") -- for Kim
-local option = spaces * (keyvalue + falsevalue + truevalue + somevalue) * spaces
-local options = P(":") * spaces * (P(";")^0 * option)^0
-local pattern = (filename + fontname) * subvalue^0 * stylespec^0 * options^0
-
-local function colonized(specification) -- xetex mode
- list = { }
- lpeg.match(pattern,specification.specification)
- set_default_features(list.script)
- if list.style then
- specification.style = list.style
- list.style = nil
- end
- if list.optsize then
- specification.optsize = list.optsize
- list.optsize = nil
- end
- if list.name then
- if resolvers.findfile(list.name, "tfm") then
- list.lookup = "file"
- list.name = file.addsuffix(list.name, "tfm")
- elseif resolvers.findfile(list.name, "ofm") then
- list.lookup = "file"
- list.name = file.addsuffix(list.name, "ofm")
- end
-
- specification.name = list.name
- list.name = nil
- end
- if list.lookup then
- specification.lookup = list.lookup
- list.lookup = nil
- end
- if list.sub then
- specification.sub = list.sub
- list.sub = nil
- end
- if not list.mode then
- -- if no mode is set, use our default
- list.mode = fonts.mode
- end
- specification.features.normal = fonts.handlers.otf.features.normalize(list)
- return specification
-end
-
-fonts.definers.registersplit(":",colonized,"cryptic")
-fonts.definers.registersplit("", colonized,"more cryptic") -- catches \font\text=[names]
-
-function fonts.definers.applypostprocessors(tfmdata)
- local postprocessors = tfmdata.postprocessors
- if postprocessors then
- for i=1,#postprocessors do
- local extrahash = postprocessors[i](tfmdata) -- after scaling etc
- if type(extrahash) == "string" and extrahash ~= "" then
- -- e.g. a reencoding needs this
- extrahash = string.gsub(lower(extrahash),"[^a-z]","-")
- tfmdata.properties.fullname = format("%s-%s",tfmdata.properties.fullname,extrahash)
- end
- end
- end
- return tfmdata
-end
diff --git a/otfl-font-map.lua b/otfl-font-map.lua
deleted file mode 100644
index 7f5305f..0000000
--- a/otfl-font-map.lua
+++ /dev/null
@@ -1,307 +0,0 @@
-if not modules then modules = { } end modules ['font-map'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local match, format, find, concat, gsub, lower = string.match, string.format, string.find, table.concat, string.gsub, string.lower
-local P, R, S, C, Ct, Cc, lpegmatch = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cc, lpeg.match
-local utfbyte = utf.byte
-
-local trace_loading = false trackers.register("fonts.loading", function(v) trace_loading = v end)
-local trace_mapping = false trackers.register("fonts.mapping", function(v) trace_unimapping = v end)
-
-local report_fonts = logs.reporter("fonts","loading") -- not otf only
-
-local fonts = fonts
-local mappings = { }
-fonts.mappings = mappings
-
---[[ldx--
-<p>Eventually this code will disappear because map files are kind
-of obsolete. Some code may move to runtime or auxiliary modules.</p>
-<p>The name to unciode related code will stay of course.</p>
---ldx]]--
-
-local function loadlumtable(filename) -- will move to font goodies
- local lumname = file.replacesuffix(file.basename(filename),"lum")
- local lumfile = resolvers.findfile(lumname,"map") or ""
- if lumfile ~= "" and lfs.isfile(lumfile) then
- if trace_loading or trace_mapping then
- report_fonts("enhance: loading %s ",lumfile)
- end
- lumunic = dofile(lumfile)
- return lumunic, lumfile
- end
-end
-
-local hex = R("AF","09")
-local hexfour = (hex*hex*hex*hex) / function(s) return tonumber(s,16) end
-local hexsix = (hex^1) / function(s) return tonumber(s,16) end
-local dec = (R("09")^1) / tonumber
-local period = P(".")
-local unicode = P("uni") * (hexfour * (period + P(-1)) * Cc(false) + Ct(hexfour^1) * Cc(true))
-local ucode = P("u") * (hexsix * (period + P(-1)) * Cc(false) + Ct(hexsix ^1) * Cc(true))
-local index = P("index") * dec * Cc(false)
-
-local parser = unicode + ucode + index
-
-local parsers = { }
-
-local function makenameparser(str)
- if not str or str == "" then
- return parser
- else
- local p = parsers[str]
- if not p then
- p = P(str) * period * dec * Cc(false)
- parsers[str] = p
- end
- return p
- end
-end
-
---~ local parser = mappings.makenameparser("Japan1")
---~ local parser = mappings.makenameparser()
---~ local function test(str)
---~ local b, a = lpegmatch(parser,str)
---~ print((a and table.serialize(b)) or b)
---~ end
---~ test("a.sc")
---~ test("a")
---~ test("uni1234")
---~ test("uni1234.xx")
---~ test("uni12349876")
---~ test("index1234")
---~ test("Japan1.123")
-
-local function tounicode16(unicode)
- if unicode < 0x10000 then
- return format("%04X",unicode)
- else
- return format("%04X%04X",unicode/1024+0xD800,unicode%1024+0xDC00)
- end
-end
-
-local function tounicode16sequence(unicodes)
- local t = { }
- for l=1,#unicodes do
- local unicode = unicodes[l]
- if unicode < 0x10000 then
- t[l] = format("%04X",unicode)
- else
- t[l] = format("%04X%04X",unicode/1024+0xD800,unicode%1024+0xDC00)
- end
- end
- return concat(t)
-end
-
-local function fromunicode16(str)
- if #str == 4 then
- return tonumber(str,16)
- else
- local l, r = match(str,"(....)(....)")
- return (tonumber(l,16)- 0xD800)*0x400 + tonumber(r,16) - 0xDC00
- end
-end
-
---~ This is quite a bit faster but at the cost of some memory but if we
---~ do this we will also use it elsewhere so let's not follow this route
---~ now. I might use this method in the plain variant (no caching there)
---~ but then I need a flag that distinguishes between code branches.
---~
---~ local cache = { }
---~
---~ function mappings.tounicode16(unicode)
---~ local s = cache[unicode]
---~ if not s then
---~ if unicode < 0x10000 then
---~ s = format("%04X",unicode)
---~ else
---~ s = format("%04X%04X",unicode/1024+0xD800,unicode%1024+0xDC00)
---~ end
---~ cache[unicode] = s
---~ end
---~ return s
---~ end
-
-mappings.loadlumtable = loadlumtable
-mappings.makenameparser = makenameparser
-mappings.tounicode16 = tounicode16
-mappings.tounicode16sequence = tounicode16sequence
-mappings.fromunicode16 = fromunicode16
-
-local separator = S("_.")
-local other = C((1 - separator)^1)
-local ligsplitter = Ct(other * (separator * other)^0)
-
---~ print(table.serialize(lpegmatch(ligsplitter,"this")))
---~ print(table.serialize(lpegmatch(ligsplitter,"this.that")))
---~ print(table.serialize(lpegmatch(ligsplitter,"japan1.123")))
---~ print(table.serialize(lpegmatch(ligsplitter,"such_so_more")))
---~ print(table.serialize(lpegmatch(ligsplitter,"such_so_more.that")))
-
-function mappings.addtounicode(data,filename)
- local resources = data.resources
- local properties = data.properties
- local descriptions = data.descriptions
- local unicodes = resources.unicodes
- if not unicodes then
- return
- end
- -- we need to move this code
- unicodes['space'] = unicodes['space'] or 32
- unicodes['hyphen'] = unicodes['hyphen'] or 45
- unicodes['zwj'] = unicodes['zwj'] or 0x200D
- unicodes['zwnj'] = unicodes['zwnj'] or 0x200C
- -- the tounicode mapping is sparse and only needed for alternatives
- local private = fonts.constructors.privateoffset
- local unknown = format("%04X",utfbyte("?"))
- local unicodevector = fonts.encodings.agl.unicodes -- loaded runtime in context
- local tounicode = { }
- local originals = { }
- resources.tounicode = tounicode
- resources.originals = originals
- local lumunic, uparser, oparser
- local cidinfo, cidnames, cidcodes, usedmap
- if false then -- will become an option
- lumunic = loadlumtable(filename)
- lumunic = lumunic and lumunic.tounicode
- end
- --
- cidinfo = properties.cidinfo
- usedmap = cidinfo and fonts.cid.getmap(cidinfo)
- --
- if usedmap then
- oparser = usedmap and makenameparser(cidinfo.ordering)
- cidnames = usedmap.names
- cidcodes = usedmap.unicodes
- end
- uparser = makenameparser()
- local ns, nl = 0, 0
- for unic, glyph in next, descriptions do
- local index = glyph.index
- local name = glyph.name
- if unic == -1 or unic >= private or (unic >= 0xE000 and unic <= 0xF8FF) or unic == 0xFFFE or unic == 0xFFFF then
- local unicode = lumunic and lumunic[name] or unicodevector[name]
- if unicode then
- originals[index] = unicode
- tounicode[index] = tounicode16(unicode)
- ns = ns + 1
- end
- -- cidmap heuristics, beware, there is no guarantee for a match unless
- -- the chain resolves
- if (not unicode) and usedmap then
- local foundindex = lpegmatch(oparser,name)
- if foundindex then
- unicode = cidcodes[foundindex] -- name to number
- if unicode then
- originals[index] = unicode
- tounicode[index] = tounicode16(unicode)
- ns = ns + 1
- else
- local reference = cidnames[foundindex] -- number to name
- if reference then
- local foundindex = lpegmatch(oparser,reference)
- if foundindex then
- unicode = cidcodes[foundindex]
- if unicode then
- originals[index] = unicode
- tounicode[index] = tounicode16(unicode)
- ns = ns + 1
- end
- end
- if not unicode then
- local foundcodes, multiple = lpegmatch(uparser,reference)
- if foundcodes then
- originals[index] = foundcodes
- if multiple then
- tounicode[index] = tounicode16sequence(foundcodes)
- nl = nl + 1
- unicode = true
- else
- tounicode[index] = tounicode16(foundcodes)
- ns = ns + 1
- unicode = foundcodes
- end
- end
- end
- end
- end
- end
- end
- -- a.whatever or a_b_c.whatever or a_b_c (no numbers)
- if not unicode then
- local split = lpegmatch(ligsplitter,name)
- local nplit = split and #split or 0
- if nplit >= 2 then
- local t, n = { }, 0
- for l=1,nplit do
- local base = split[l]
- local u = unicodes[base] or unicodevector[base]
- if not u then
- break
- elseif type(u) == "table" then
- n = n + 1
- t[n] = u[1]
- else
- n = n + 1
- t[n] = u
- end
- end
- if n == 0 then -- done then
- -- nothing
- elseif n == 1 then
- originals[index] = t[1]
- tounicode[index] = tounicode16(t[1])
- else
- originals[index] = t
- tounicode[index] = tounicode16sequence(t)
- end
- nl = nl + 1
- unicode = true
- else
- -- skip: already checked and we don't want privates here
- end
- end
- -- last resort (we might need to catch private here as well)
- if not unicode then
- local foundcodes, multiple = lpegmatch(uparser,name)
- if foundcodes then
- if multiple then
- originals[index] = foundcodes
- tounicode[index] = tounicode16sequence(foundcodes)
- nl = nl + 1
- unicode = true
- else
- originals[index] = foundcodes
- tounicode[index] = tounicode16(foundcodes)
- ns = ns + 1
- unicode = foundcodes
- end
- end
- end
- -- if not unicode then
- -- originals[index] = 0xFFFD
- -- tounicode[index] = "FFFD"
- -- end
- end
- end
- if trace_mapping then
- for unic, glyph in table.sortedhash(descriptions) do
- local name = glyph.name
- local index = glyph.index
- local toun = tounicode[index]
- if toun then
- report_fonts("internal: 0x%05X, name: %s, unicode: U+%05X, tounicode: %s",index,name,unic,toun)
- else
- report_fonts("internal: 0x%05X, name: %s, unicode: U+%05X",index,name,unic)
- end
- end
- end
- if trace_loading and (ns > 0 or nl > 0) then
- report_fonts("enhance: %s tounicode entries added (%s ligatures)",nl+ns, ns)
- end
-end
diff --git a/otfl-font-nms.lua b/otfl-font-nms.lua
deleted file mode 100644
index 8290e17..0000000
--- a/otfl-font-nms.lua
+++ /dev/null
@@ -1,770 +0,0 @@
-if not modules then modules = { } end modules ['font-nms'] = {
- version = 1.002,
- comment = "companion to luaotfload.lua",
- author = "Khaled Hosny and Elie Roux",
- copyright = "Luaotfload Development Team",
- license = "GNU GPL v2"
-}
-
-fonts = fonts or { }
-fonts.names = fonts.names or { }
-
-local names = fonts.names
-local names_dir = "luatex-cache/generic/names"
-names.version = 2.010 -- not the same as in context
-names.data = nil
-names.path = {
- basename = "otfl-names.lua",
- dir = file.join(kpse.expand_var("$TEXMFVAR"), names_dir),
-}
-
-local success = pcall(require, "luatexbase.modutils")
-if success then
- success = pcall(luatexbase.require_module, "lualatex-platform", "2011/03/30")
-end
-local get_installed_fonts
-if success then
- get_installed_fonts = lualatex.platform.get_installed_fonts
-else
- function get_installed_fonts()
- end
-end
-
-local splitpath, expandpath = file.split_path, kpse.expand_path
-local glob, basename = dir.glob, file.basename
-local extname = file.extname
-local upper, lower, format = string.upper, string.lower, string.format
-local gsub, match, rpadd = string.gsub, string.match, string.rpadd
-local gmatch, sub, find = string.gmatch, string.sub, string.find
-local utfgsub = unicode.utf8.gsub
-
-local trace_short = false --tracing adapted to rebuilding of the database inside a document
-local trace_search = false --trackers.register("names.search", function(v) trace_search = v end)
-local trace_loading = false --trackers.register("names.loading", function(v) trace_loading = v end)
-
-local function sanitize(str)
- if str then
- return utfgsub(lower(str), "[^%a%d]", "")
- else
- return str -- nil
- end
-end
-
-local function fontnames_init()
- return {
- mappings = { },
- status = { },
- version = names.version,
- }
-end
-
-local function make_name(path)
- return file.replacesuffix(path, "lua"), file.replacesuffix(path, "luc")
-end
-
-local function load_names()
- local path = file.join(names.path.dir, names.path.basename)
- local luaname, lucname = make_name(path)
- local foundname
- local data
- if file.isreadable(lucname) then
- data = dofile(lucname)
- foundname = lucname
- elseif file.isreadable(luaname) then
- data = dofile(luaname)
- foundname = luaname
- end
- if data then
- logs.info("Font names database loaded", "%s", foundname)
- else
- logs.info([[Font names database not found, generating new one.
- This can take several minutes; please be patient.]])
- data = names.update(fontnames_init())
- names.save(data)
- end
- texio.write_nl("")
- return data
-end
-
-local synonyms = {
- regular = { "normal", "roman", "plain", "book", "medium" },
- bold = { "boldregular", "demi", "demibold" },
- italic = { "regularitalic", "normalitalic", "oblique", "slanted" },
- bolditalic = { "boldoblique", "boldslanted", "demiitalic", "demioblique", "demislanted", "demibolditalic" },
-}
-
-local loaded = false
-local reloaded = false
-
-function names.resolve(_,_,specification) -- the 1st two parameters are used by ConTeXt
- local name = sanitize(specification.name)
- local style = sanitize(specification.style) or "regular"
-
- local size
- if specification.optsize then
- size = tonumber(specification.optsize)
- elseif specification.size then
- size = specification.size / 65536
- end
-
-
- if not loaded then
- names.data = names.load()
- loaded = true
- end
-
- local data = names.data
- if type(data) == "table" and data.version == names.version then
- if data.mappings then
- local found = { }
- for _,face in next, data.mappings do
- local family = sanitize(face.names.family)
- local subfamily = sanitize(face.names.subfamily)
- local fullname = sanitize(face.names.fullname)
- local psname = sanitize(face.names.psname)
- local fontname = sanitize(face.fontname)
- local pfullname = sanitize(face.fullname)
- local optsize, dsnsize, maxsize, minsize
- if #face.size > 0 then
- optsize = face.size
- dsnsize = optsize[1] and optsize[1] / 10
- -- can be nil
- maxsize = optsize[2] and optsize[2] / 10 or dsnsize
- minsize = optsize[3] and optsize[3] / 10 or dsnsize
- end
- if name == family then
- if subfamily == style then
- if optsize then
- if dsnsize == size
- or (size > minsize and size <= maxsize) then
- found[1] = face
- break
- else
- found[#found+1] = face
- end
- else
- found[1] = face
- break
- end
- elseif synonyms[style] and
- table.contains(synonyms[style], subfamily) then
- if optsize then
- if dsnsize == size
- or (size > minsize and size <= maxsize) then
- found[1] = face
- break
- else
- found[#found+1] = face
- end
- else
- found[1] = face
- break
- end
- elseif subfamily == "regular" or
- table.contains(synonyms.regular, subfamily) then
- found.fallback = face
- end
- else
- if name == fullname
- or name == pfullname
- or name == fontname
- or name == psname then
- if optsize then
- if dsnsize == size
- or (size > minsize and size <= maxsize) then
- found[1] = face
- break
- else
- found[#found+1] = face
- end
- else
- found[1] = face
- break
- end
- end
- end
- end
- if #found == 1 then
- if kpse.lookup(found[1].filename[1]) then
- logs.report("load font",
- "font family='%s', subfamily='%s' found: %s",
- name, style, found[1].filename[1])
- return found[1].filename[1], found[1].filename[2]
- end
- elseif #found > 1 then
- -- we found matching font(s) but not in the requested optical
- -- sizes, so we loop through the matches to find the one with
- -- least difference from the requested size.
- local closest
- local least = math.huge -- initial value is infinity
- for i,face in next, found do
- local dsnsize = face.size[1]/10
- local difference = math.abs(dsnsize-size)
- if difference < least then
- closest = face
- least = difference
- end
- end
- if kpse.lookup(closest.filename[1]) then
- logs.report("load font",
- "font family='%s', subfamily='%s' found: %s",
- name, style, closest.filename[1])
- return closest.filename[1], closest.filename[2]
- end
- elseif found.fallback then
- return found.fallback.filename[1], found.fallback.filename[2]
- end
- -- no font found so far
- if not reloaded then
- -- try reloading the database
- names.data = names.update(names.data)
- names.save(names.data)
- reloaded = true
- return names.resolve(_,_,specification)
- else
- -- else, fallback to filename
- -- XXX: specification.name is empty with absolute paths, looks
- -- like a bug in the specification parser
- return specification.name, false
- end
- end
- else
- if not reloaded then
- names.data = names.update()
- names.save(names.data)
- reloaded = true
- return names.resolve(_,_,specification)
- else
- return specification.name, false
- end
- end
-end
-
-names.resolvespec = names.resolve
-
-function names.set_log_level(level)
- if level == 2 then
- trace_loading = true
- elseif level >= 3 then
- trace_loading = true
- trace_search = true
- end
-end
-
-local lastislog = 0
-
-local function log(category, fmt, ...)
- lastislog = 1
- if fmt then
- texio.write_nl(format("luaotfload | %s: %s", category, format(fmt, ...)))
- elseif category then
- texio.write_nl(format("luaotfload | %s", category))
- else
- texio.write_nl(format("luaotfload |"))
- end
- io.flush()
-end
-
-logs = logs or { }
-logs.report = logs.report or log
-logs.info = logs.info or log
-
-local function font_fullinfo(filename, subfont, texmf)
- local t = { }
- local f = fontloader.open(filename, subfont)
- if not f then
- if trace_loading then
- logs.report("error", "failed to open %s", filename)
- end
- return
- end
- local m = fontloader.to_table(f)
- fontloader.close(f)
- collectgarbage('collect')
- -- see http://www.microsoft.com/typography/OTSPEC/features_pt.htm#size
- if m.fontstyle_name then
- for _,v in next, m.fontstyle_name do
- if v.lang == 1033 then
- t.fontstyle_name = v.name
- end
- end
- end
- if m.names then
- for _,v in next, m.names do
- if v.lang == "English (US)" then
- t.names = {
- -- see
- -- http://developer.apple.com/textfonts/
- -- TTRefMan/RM06/Chap6name.html
- fullname = v.names.compatfull or v.names.fullname,
- family = v.names.preffamilyname or v.names.family,
- subfamily= t.fontstyle_name or v.names.prefmodifiers or v.names.subfamily,
- psname = v.names.postscriptname
- }
- end
- end
- else
- -- no names table, propably a broken font
- if trace_loading then
- logs.report("broken font rejected", "%s", basefile)
- end
- return
- end
- t.fontname = m.fontname
- t.fullname = m.fullname
- t.familyname = m.familyname
- t.filename = { texmf and basename(filename) or filename, subfont }
- t.weight = m.pfminfo.weight
- t.width = m.pfminfo.width
- t.slant = m.italicangle
- -- don't waste the space with zero values
- t.size = {
- m.design_size ~= 0 and m.design_size or nil,
- m.design_range_top ~= 0 and m.design_range_top or nil,
- m.design_range_bottom ~= 0 and m.design_range_bottom or nil,
- }
- return t
-end
-
-local function load_font(filename, fontnames, newfontnames, texmf)
- local newmappings = newfontnames.mappings
- local newstatus = newfontnames.status
- local mappings = fontnames.mappings
- local status = fontnames.status
- local basefile = texmf and basename(filename) or filename
- if filename then
- if names.blacklist[filename] or
- names.blacklist[basename(filename)] then
- if trace_search then
- logs.report("ignoring font", "%s", filename)
- end
- return
- end
- local timestamp, db_timestamp
- db_timestamp = status[basefile] and status[basefile].timestamp
- timestamp = lfs.attributes(filename, "modification")
-
- local index_status = newstatus[basefile] or (not texmf and newstatus[basename(filename)])
- if index_status and index_status.timestamp == timestamp then
- -- already indexed this run
- return
- end
-
- newstatus[basefile] = newstatus[basefile] or { }
- newstatus[basefile].timestamp = timestamp
- newstatus[basefile].index = newstatus[basefile].index or { }
-
- if db_timestamp == timestamp and not newstatus[basefile].index[1] then
- for _,v in next, status[basefile].index do
- local index = #newstatus[basefile].index
- newmappings[#newmappings+1] = mappings[v]
- newstatus[basefile].index[index+1] = #newmappings
- end
- if trace_loading then
- logs.report("font already indexed", "%s", basefile)
- end
- return
- end
- local info = fontloader.info(filename)
- if info then
- if type(info) == "table" and #info > 1 then
- for i in next, info do
- local fullinfo = font_fullinfo(filename, i-1, texmf)
- if not fullinfo then
- return
- end
- local index = newstatus[basefile].index[i]
- if newstatus[basefile].index[i] then
- index = newstatus[basefile].index[i]
- else
- index = #newmappings+1
- end
- newmappings[index] = fullinfo
- newstatus[basefile].index[i] = index
- end
- else
- local fullinfo = font_fullinfo(filename, false, texmf)
- if not fullinfo then
- return
- end
- local index
- if newstatus[basefile].index[1] then
- index = newstatus[basefile].index[1]
- else
- index = #newmappings+1
- end
- newmappings[index] = fullinfo
- newstatus[basefile].index[1] = index
- end
- else
- if trace_loading then
- logs.report("failed to load", "%s", basefile)
- end
- end
- end
-end
-
-local function path_normalize(path)
- --[[
- path normalization:
- - a\b\c -> a/b/c
- - a/../b -> b
- - /cygdrive/a/b -> a:/b
- - reading symlinks under non-Win32
- - using kpse.readable_file on Win32
- ]]
- if os.type == "windows" or os.type == "msdos" or os.name == "cygwin" then
- path = path:gsub('\\', '/')
- path = path:lower()
- path = path:gsub('^/cygdrive/(%a)/', '%1:/')
- end
- if os.type ~= "windows" and os.type ~= "msdos" then
- local dest = lfs.readlink(path)
- if dest then
- if kpse.readable_file(dest) then
- path = dest
- elseif kpse.readable_file(file.join(file.dirname(path), dest)) then
- path = file.join(file.dirname(path), dest)
- else
- -- broken symlink?
- end
- end
- end
- path = file.collapse_path(path)
- return path
-end
-
-fonts.path_normalize = path_normalize
-
-names.blacklist = { }
-
-local function read_blacklist()
- local files = {
- kpse.lookup("otfl-blacklist.cnf", {all=true, format="tex"})
- }
- local blacklist = names.blacklist
- local whitelist = { }
-
- if files and type(files) == "table" then
- for _,v in next, files do
- for line in io.lines(v) do
- line = line:strip() -- to get rid of lines like " % foo"
- if line:find("^%%") or line:is_empty() then
- -- comment or empty line
- else
- line = line:split("%")[1]
- line = line:strip()
- if string.sub(line,1,1) == "-" then
- whitelist[string.sub(line,2,-1)] = true
- else
- if trace_search then
- logs.report("blacklisted file", "%s", line)
- end
- blacklist[line] = true
- end
- end
- end
- end
- end
- for _,fontname in next, whitelist do
- blacklist[fontname] = nil
- end
-end
-
-local font_extensions = { "otf", "ttf", "ttc", "dfont" }
-local font_extensions_set = {}
-for key, value in next, font_extensions do
- font_extensions_set[value] = true
-end
-
-local installed_fonts_scanned = false
-
-local function scan_installed_fonts(fontnames, newfontnames)
- -- Try to query and add font list from operating system.
- -- This uses the lualatex-platform module.
- logs.info("Scanning fonts known to operating system...")
- local fonts = get_installed_fonts()
- if fonts and #fonts > 0 then
- installed_fonts_scanned = true
- if trace_search then
- logs.report("operating system fonts found", "%d", #fonts)
- end
- for key, value in next, fonts do
- local file = value.path
- if file then
- local ext = extname(file)
- if ext and font_extensions_set[ext] then
- file = path_normalize(file)
- if trace_loading then
- logs.report("loading font", "%s", file)
- end
- load_font(file, fontnames, newfontnames, false)
- end
- end
- end
- else
- if trace_search then
- logs.report("Could not retrieve list of installed fonts")
- end
- end
-end
-
-local function scan_dir(dirname, fontnames, newfontnames, texmf)
- --[[
- This function scans a directory and populates the list of fonts
- with all the fonts it finds.
- - dirname is the name of the directory to scan
- - names is the font database to fill
- - texmf is a boolean saying if we are scanning a texmf directory
- ]]
- local list, found = { }, { }
- local nbfound = 0
- if trace_search then
- logs.report("scanning", "%s", dirname)
- end
- for _,i in next, font_extensions do
- for _,ext in next, { i, upper(i) } do
- found = glob(format("%s/**.%s$", dirname, ext))
- -- note that glob fails silently on broken symlinks, which happens
- -- sometimes in TeX Live.
- if trace_search then
- logs.report("fonts found", "%s '%s' fonts found", #found, ext)
- end
- nbfound = nbfound + #found
- table.append(list, found)
- end
- end
- if trace_search then
- logs.report("fonts found", "%d fonts found in '%s'", nbfound, dirname)
- end
-
- for _,file in next, list do
- file = path_normalize(file)
- if trace_loading then
- logs.report("loading font", "%s", file)
- end
- load_font(file, fontnames, newfontnames, texmf)
- end
-end
-
-local function scan_texmf_fonts(fontnames, newfontnames)
- --[[
- This function scans all fonts in the texmf tree, through kpathsea
- variables OPENTYPEFONTS and TTFONTS of texmf.cnf
- ]]
- if expandpath("$OSFONTDIR"):is_empty() then
- logs.info("Scanning TEXMF fonts...")
- else
- logs.info("Scanning TEXMF and OS fonts...")
- end
- local fontdirs = expandpath("$OPENTYPEFONTS"):gsub("^\.", "")
- fontdirs = fontdirs .. expandpath("$TTFONTS"):gsub("^\.", "")
- if not fontdirs:is_empty() then
- for _,d in next, splitpath(fontdirs) do
- scan_dir(d, fontnames, newfontnames, true)
- end
- end
-end
-
---[[
- For the OS fonts, there are several options:
- - if OSFONTDIR is set (which is the case under windows by default but
- not on the other OSs), it scans it at the same time as the texmf tree,
- in the scan_texmf_fonts.
- - if not:
- - under Windows and Mac OSX, we take a look at some hardcoded directories
- - under Unix, we read /etc/fonts/fonts.conf and read the directories in it
-
- This means that if you have fonts in fancy directories, you need to set them
- in OSFONTDIR.
-]]
-
-local function read_fonts_conf(path, results)
- --[[
- This function parses /etc/fonts/fonts.conf and returns all the dir it finds.
- The code is minimal, please report any error it may generate.
- ]]
- local f = io.open(path)
- if not f then
- if trace_search then
- logs.report("cannot open file", "%s", path)
- end
- return results
- end
- local incomments = false
- for line in f:lines() do
- while line and line ~= "" do
- -- spaghetti code... hmmm...
- if incomments then
- local tmp = find(line, '-->')
- if tmp then
- incomments = false
- line = sub(line, tmp+3)
- else
- line = nil
- end
- else
- local tmp = find(line, '<!--')
- local newline = line
- if tmp then
- -- for the analysis, we take everything that is before the
- -- comment sign
- newline = sub(line, 1, tmp-1)
- -- and we loop again with the comment
- incomments = true
- line = sub(line, tmp+4)
- else
- -- if there is no comment start, the block after that will
- -- end the analysis, we exit the while loop
- line = nil
- end
- for dir in gmatch(newline, '<dir>([^<]+)</dir>') do
- -- now we need to replace ~ by kpse.expand_path('~')
- if sub(dir, 1, 1) == '~' then
- dir = file.join(kpse.expand_path('~'), sub(dir, 2))
- end
- -- we exclude paths with texmf in them, as they should be
- -- found anyway
- if not find(dir, 'texmf') then
- results[#results+1] = dir
- end
- end
- for include in gmatch(newline, '<include[^<]*>([^<]+)</include>') do
- -- include here can be four things: a directory or a file,
- -- in absolute or relative path.
- if sub(include, 1, 1) == '~' then
- include = file.join(kpse.expand_path('~'),sub(include, 2))
- -- First if the path is relative, we make it absolute:
- elseif not lfs.isfile(include) and not lfs.isdir(include) then
- include = file.join(file.dirname(path), include)
- end
- if lfs.isfile(include) then
- -- maybe we should prevent loops here?
- -- we exclude path with texmf in them, as they should
- -- be found otherwise
- read_fonts_conf(include, results)
- elseif lfs.isdir(include) then
- for _,f in next, glob(file.join(include, "*.conf")) do
- read_fonts_conf(f, results)
- end
- end
- end
- end
- end
- end
- f:close()
- return results
-end
-
--- for testing purpose
-names.read_fonts_conf = read_fonts_conf
-
-local function get_os_dirs()
- if os.name == 'macosx' then
- return {
- file.join(kpse.expand_path('~'), "Library/Fonts"),
- "/Library/Fonts",
- "/System/Library/Fonts",
- "/Network/Library/Fonts",
- }
- elseif os.type == "windows" or os.type == "msdos" or os.name == "cygwin" then
- local windir = os.getenv("WINDIR")
- return { file.join(windir, 'Fonts') }
- else
- for _,p in next, {"/usr/local/etc/fonts/fonts.conf", "/etc/fonts/fonts.conf"} do
- if lfs.isfile(p) then
- return read_fonts_conf("/etc/fonts/fonts.conf", {})
- end
- end
- end
- return {}
-end
-
-local function scan_os_fonts(fontnames, newfontnames)
- --[[
- This function scans the OS fonts through
- - fontcache for Unix (reads the fonts.conf file and scans the directories)
- - a static set of directories for Windows and MacOSX
- ]]
- logs.info("Scanning OS fonts...")
- if trace_search then
- logs.info("Searching in static system directories...")
- end
- for _,d in next, get_os_dirs() do
- scan_dir(d, fontnames, newfontnames, false)
- end
-end
-
-local function update_names(fontnames, force)
- --[[
- The main function, scans everything
- - fontnames is the final table to return
- - force is whether we rebuild it from scratch or not
- ]]
- logs.info("Updating the font names database")
-
- if force then
- fontnames = fontnames_init()
- else
- if not fontnames then
- fontnames = names.load()
- end
- if fontnames.version ~= names.version then
- fontnames = fontnames_init()
- if trace_search then
- logs.report("No font names database or old one found; "
- .."generating new one")
- end
- end
- end
- local newfontnames = fontnames_init()
- read_blacklist()
- installed_font_scanned = false
- scan_installed_fonts(fontnames, newfontnames)
- scan_texmf_fonts(fontnames, newfontnames)
- if not installed_fonts_scanned and expandpath("$OSFONTDIR"):is_empty() then
- scan_os_fonts(fontnames, newfontnames)
- end
- return newfontnames
-end
-
-local function save_names(fontnames)
- local path = names.path.dir
- if not lfs.isdir(path) then
- dir.mkdirs(path)
- end
- path = file.join(path, names.path.basename)
- if file.iswritable(path) then
- local luaname, lucname = make_name(path)
- table.tofile(luaname, fontnames, true)
- caches.compile(fontnames,luaname,lucname)
- logs.info("Font names database saved")
- return path
- else
- logs.info("Failed to save names database")
- return nil
- end
-end
-
-local function scan_external_dir(dir)
- local old_names, new_names
- if loaded then
- old_names = names.data
- else
- old_names = names.load()
- loaded = true
- end
- new_names = table.copy(old_names)
- scan_dir(dir, old_names, new_names)
- names.data = new_names
-end
-
-names.scan = scan_external_dir
-names.load = load_names
-names.update = update_names
-names.save = save_names
-
--- dummy
-function fonts.names.getfilename(askedname,suffix) -- only supported in mkiv
- return ""
-end
diff --git a/otfl-font-ota.lua b/otfl-font-ota.lua
deleted file mode 100644
index c4663e1..0000000
--- a/otfl-font-ota.lua
+++ /dev/null
@@ -1,373 +0,0 @@
-if not modules then modules = { } end modules ['font-ota'] = {
- version = 1.001,
- comment = "companion to font-otf.lua (analysing)",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- this might become scrp-*.lua
-
-local type, tostring, match, format, concat = type, tostring, string.match, string.format, table.concat
-
-if not trackers then trackers = { register = function() end } end
-
-local trace_analyzing = false trackers.register("otf.analyzing", function(v) trace_analyzing = v end)
-
-local fonts, nodes, node = fonts, nodes, node
-
-local allocate = utilities.storage.allocate
-
-local otf = fonts.handlers.otf
-
-local analyzers = fonts.analyzers
-local initializers = allocate()
-local methods = allocate()
-
-analyzers.initializers = initializers
-analyzers.methods = methods
-analyzers.useunicodemarks = false
-
-local nodecodes = nodes.nodecodes
-local glyph_code = nodecodes.glyph
-
-local set_attribute = node.set_attribute
-local has_attribute = node.has_attribute
-local traverse_id = node.traverse_id
-local traverse_node_list = node.traverse
-
-local fontdata = fonts.hashes.identifiers
-local state = attributes.private('state')
-local categories = characters and characters.categories or { } -- sorry, only in context
-
-local tracers = nodes.tracers
-local colortracers = tracers and tracers.colors
-local setnodecolor = colortracers and colortracers.set or function() end
-local resetnodecolor = colortracers and colortracers.reset or function() end
-
-local otffeatures = fonts.constructors.newfeatures("otf")
-local registerotffeature = otffeatures.register
-
---[[ldx--
-<p>Analyzers run per script and/or language and are needed in order to
-process features right.</p>
---ldx]]--
-
--- todo: analyzers per script/lang, cross font, so we need an font id hash -> script
--- e.g. latin -> hyphenate, arab -> 1/2/3 analyze -- its own namespace
-
-local state = attributes.private('state')
-
-function analyzers.setstate(head,font)
- local useunicodemarks = analyzers.useunicodemarks
- local tfmdata = fontdata[font]
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local first, last, current, n, done = nil, nil, head, 0, false -- maybe make n boolean
- while current do
- local id = current.id
- if id == glyph_code and current.font == font then
- local char = current.char
- local d = descriptions[char]
- if d then
- if d.class == "mark" or (useunicodemarks and categories[char] == "mn") then
- done = true
- set_attribute(current,state,5) -- mark
- elseif n == 0 then
- first, last, n = current, current, 1
- set_attribute(current,state,1) -- init
- else
- last, n = current, n+1
- set_attribute(current,state,2) -- medi
- end
- else -- finish
- if first and first == last then
- set_attribute(last,state,4) -- isol
- elseif last then
- set_attribute(last,state,3) -- fina
- end
- first, last, n = nil, nil, 0
- end
- elseif id == disc_code then
- -- always in the middle
- set_attribute(current,state,2) -- midi
- last = current
- else -- finish
- if first and first == last then
- set_attribute(last,state,4) -- isol
- elseif last then
- set_attribute(last,state,3) -- fina
- end
- first, last, n = nil, nil, 0
- end
- current = current.next
- end
- if first and first == last then
- set_attribute(last,state,4) -- isol
- elseif last then
- set_attribute(last,state,3) -- fina
- end
- return head, done
-end
-
--- in the future we will use language/script attributes instead of the
--- font related value, but then we also need dynamic features which is
--- somewhat slower; and .. we need a chain of them
-
-local function analyzeinitializer(tfmdata,value) -- attr
- local script, language = otf.scriptandlanguage(tfmdata) -- attr
- local action = initializers[script]
- if action then
- if type(action) == "function" then
- return action(tfmdata,value)
- else
- local action = action[language]
- if action then
- return action(tfmdata,value)
- end
- end
- end
-end
-
-local function analyzeprocessor(head,font,attr)
- local tfmdata = fontdata[font]
- local script, language = otf.scriptandlanguage(tfmdata,attr)
- local action = methods[script]
- if action then
- if type(action) == "function" then
- return action(head,font,attr)
- else
- action = action[language]
- if action then
- return action(head,font,attr)
- end
- end
- end
- return head, false
-end
-
-registerotffeature {
- name = "analyze",
- description = "analysis of (for instance) character classes",
- default = true,
- initializers = {
- node = analyzeinitializer,
- },
- processors = {
- position = 1,
- node = analyzeprocessor,
- }
-}
-
--- latin
-
-methods.latn = analyzers.setstate
-
--- this info eventually will go into char-def and we will have a state
--- table for generic then
-
-local zwnj = 0x200C
-local zwj = 0x200D
-
-local isol = {
- [0x0600] = true, [0x0601] = true, [0x0602] = true, [0x0603] = true,
- [0x0608] = true, [0x060B] = true, [0x0621] = true, [0x0674] = true,
- [0x06DD] = true, [zwnj] = true,
-}
-
-local isol_fina = {
- [0x0622] = true, [0x0623] = true, [0x0624] = true, [0x0625] = true,
- [0x0627] = true, [0x0629] = true, [0x062F] = true, [0x0630] = true,
- [0x0631] = true, [0x0632] = true, [0x0648] = true, [0x0671] = true,
- [0x0672] = true, [0x0673] = true, [0x0675] = true, [0x0676] = true,
- [0x0677] = true, [0x0688] = true, [0x0689] = true, [0x068A] = true,
- [0x068B] = true, [0x068C] = true, [0x068D] = true, [0x068E] = true,
- [0x068F] = true, [0x0690] = true, [0x0691] = true, [0x0692] = true,
- [0x0693] = true, [0x0694] = true, [0x0695] = true, [0x0696] = true,
- [0x0697] = true, [0x0698] = true, [0x0699] = true, [0x06C0] = true,
- [0x06C3] = true, [0x06C4] = true, [0x06C5] = true, [0x06C6] = true,
- [0x06C7] = true, [0x06C8] = true, [0x06C9] = true, [0x06CA] = true,
- [0x06CB] = true, [0x06CD] = true, [0x06CF] = true, [0x06D2] = true,
- [0x06D3] = true, [0x06D5] = true, [0x06EE] = true, [0x06EF] = true,
- [0x0759] = true, [0x075A] = true, [0x075B] = true, [0x076B] = true,
- [0x076C] = true, [0x0771] = true, [0x0773] = true, [0x0774] = true,
- [0x0778] = true, [0x0779] = true, [0xFEF5] = true, [0xFEF7] = true,
- [0xFEF9] = true, [0xFEFB] = true,
-
- -- syriac
-
- [0x0710] = true, [0x0715] = true, [0x0716] = true, [0x0717] = true,
- [0x0718] = true, [0x0719] = true, [0x0728] = true, [0x072A] = true,
- [0x072C] = true, [0x071E] = true,
-}
-
-local isol_fina_medi_init = {
- [0x0626] = true, [0x0628] = true, [0x062A] = true, [0x062B] = true,
- [0x062C] = true, [0x062D] = true, [0x062E] = true, [0x0633] = true,
- [0x0634] = true, [0x0635] = true, [0x0636] = true, [0x0637] = true,
- [0x0638] = true, [0x0639] = true, [0x063A] = true, [0x063B] = true,
- [0x063C] = true, [0x063D] = true, [0x063E] = true, [0x063F] = true,
- [0x0640] = true, [0x0641] = true, [0x0642] = true, [0x0643] = true,
- [0x0644] = true, [0x0645] = true, [0x0646] = true, [0x0647] = true,
- [0x0649] = true, [0x064A] = true, [0x066E] = true, [0x066F] = true,
- [0x0678] = true, [0x0679] = true, [0x067A] = true, [0x067B] = true,
- [0x067C] = true, [0x067D] = true, [0x067E] = true, [0x067F] = true,
- [0x0680] = true, [0x0681] = true, [0x0682] = true, [0x0683] = true,
- [0x0684] = true, [0x0685] = true, [0x0686] = true, [0x0687] = true,
- [0x069A] = true, [0x069B] = true, [0x069C] = true, [0x069D] = true,
- [0x069E] = true, [0x069F] = true, [0x06A0] = true, [0x06A1] = true,
- [0x06A2] = true, [0x06A3] = true, [0x06A4] = true, [0x06A5] = true,
- [0x06A6] = true, [0x06A7] = true, [0x06A8] = true, [0x06A9] = true,
- [0x06AA] = true, [0x06AB] = true, [0x06AC] = true, [0x06AD] = true,
- [0x06AE] = true, [0x06AF] = true, [0x06B0] = true, [0x06B1] = true,
- [0x06B2] = true, [0x06B3] = true, [0x06B4] = true, [0x06B5] = true,
- [0x06B6] = true, [0x06B7] = true, [0x06B8] = true, [0x06B9] = true,
- [0x06BA] = true, [0x06BB] = true, [0x06BC] = true, [0x06BD] = true,
- [0x06BE] = true, [0x06BF] = true, [0x06C1] = true, [0x06C2] = true,
- [0x06CC] = true, [0x06CE] = true, [0x06D0] = true, [0x06D1] = true,
- [0x06FA] = true, [0x06FB] = true, [0x06FC] = true, [0x06FF] = true,
- [0x0750] = true, [0x0751] = true, [0x0752] = true, [0x0753] = true,
- [0x0754] = true, [0x0755] = true, [0x0756] = true, [0x0757] = true,
- [0x0758] = true, [0x075C] = true, [0x075D] = true, [0x075E] = true,
- [0x075F] = true, [0x0760] = true, [0x0761] = true, [0x0762] = true,
- [0x0763] = true, [0x0764] = true, [0x0765] = true, [0x0766] = true,
- [0x0767] = true, [0x0768] = true, [0x0769] = true, [0x076A] = true,
- [0x076D] = true, [0x076E] = true, [0x076F] = true, [0x0770] = true,
- [0x0772] = true, [0x0775] = true, [0x0776] = true, [0x0777] = true,
- [0x077A] = true, [0x077B] = true, [0x077C] = true, [0x077D] = true,
- [0x077E] = true, [0x077F] = true, [zwj] = true,
-
- -- syriac
-
- [0x0712] = true, [0x0713] = true, [0x0714] = true, [0x071A] = true,
- [0x071B] = true, [0x071C] = true, [0x071D] = true, [0x071F] = true,
- [0x0720] = true, [0x0721] = true, [0x0722] = true, [0x0723] = true,
- [0x0725] = true, [0x0726] = true, [0x0727] = true, [0x0729] = true,
- [0x072B] = true, [0x0724] = true, [0x0706] = true, [0x0707] = true,
-}
-
-local arab_warned = { }
-
-
--- todo: gref
-
-local function warning(current,what)
- local char = current.char
- if not arab_warned[char] then
- log.report("analyze","arab: character %s (U+%05X) has no %s class", char, char, what)
- arab_warned[char] = true
- end
-end
-
-function methods.nocolor(head,font,attr)
- for n in traverse_id(glyph_code,head) do
- if not font or n.font == font then
- resetnodecolor(n)
- end
- end
- return head, true
-end
-
-local function finish(first,last)
- if last then
- if first == last then
- local fc = first.char
- if isol_fina_medi_init[fc] or isol_fina[fc] then
- set_attribute(first,state,4) -- isol
- if trace_analyzing then setnodecolor(first,"font:isol") end
- else
- warning(first,"isol")
- set_attribute(first,state,0) -- error
- if trace_analyzing then resetnodecolor(first) end
- end
- else
- local lc = last.char
- if isol_fina_medi_init[lc] or isol_fina[lc] then -- why isol here ?
- -- if laststate == 1 or laststate == 2 or laststate == 4 then
- set_attribute(last,state,3) -- fina
- if trace_analyzing then setnodecolor(last,"font:fina") end
- else
- warning(last,"fina")
- set_attribute(last,state,0) -- error
- if trace_analyzing then resetnodecolor(last) end
- end
- end
- first, last = nil, nil
- elseif first then
- -- first and last are either both set so we never com here
- local fc = first.char
- if isol_fina_medi_init[fc] or isol_fina[fc] then
- set_attribute(first,state,4) -- isol
- if trace_analyzing then setnodecolor(first,"font:isol") end
- else
- warning(first,"isol")
- set_attribute(first,state,0) -- error
- if trace_analyzing then resetnodecolor(first) end
- end
- first = nil
- end
- return first, last
-end
-
-function methods.arab(head,font,attr) -- maybe make a special version with no trace
- local useunicodemarks = analyzers.useunicodemarks
- local tfmdata = fontdata[font]
- local marks = tfmdata.resources.marks
- local first, last, current, done = nil, nil, head, false
- while current do
- if current.id == glyph_code and current.subtype<256 and current.font == font and not has_attribute(current,state) then
- done = true
- local char = current.char
- if marks[char] or (useunicodemarks and categories[char] == "mn") then
- set_attribute(current,state,5) -- mark
- if trace_analyzing then setnodecolor(current,"font:mark") end
- elseif isol[char] then -- can be zwj or zwnj too
- first, last = finish(first,last)
- set_attribute(current,state,4) -- isol
- if trace_analyzing then setnodecolor(current,"font:isol") end
- first, last = nil, nil
- elseif not first then
- if isol_fina_medi_init[char] then
- set_attribute(current,state,1) -- init
- if trace_analyzing then setnodecolor(current,"font:init") end
- first, last = first or current, current
- elseif isol_fina[char] then
- set_attribute(current,state,4) -- isol
- if trace_analyzing then setnodecolor(current,"font:isol") end
- first, last = nil, nil
- else -- no arab
- first, last = finish(first,last)
- end
- elseif isol_fina_medi_init[char] then
- first, last = first or current, current
- set_attribute(current,state,2) -- medi
- if trace_analyzing then setnodecolor(current,"font:medi") end
- elseif isol_fina[char] then
- if not has_attribute(last,state,1) then
- -- tricky, we need to check what last may be !
- set_attribute(last,state,2) -- medi
- if trace_analyzing then setnodecolor(last,"font:medi") end
- end
- set_attribute(current,state,3) -- fina
- if trace_analyzing then setnodecolor(current,"font:fina") end
- first, last = nil, nil
- elseif char >= 0x0600 and char <= 0x06FF then
- if trace_analyzing then setnodecolor(current,"font:rest") end
- first, last = finish(first,last)
- else --no
- first, last = finish(first,last)
- end
- else
- first, last = finish(first,last)
- end
- current = current.next
- end
- first, last = finish(first,last)
- return head, done
-end
-
-methods.syrc = methods.arab
-
-directives.register("otf.analyze.useunicodemarks",function(v)
- analyzers.useunicodemarks = v
-end)
diff --git a/otfl-font-otb.lua b/otfl-font-otb.lua
deleted file mode 100644
index 44639a8..0000000
--- a/otfl-font-otb.lua
+++ /dev/null
@@ -1,636 +0,0 @@
-if not modules then modules = { } end modules ['font-otb'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-local concat = table.concat
-local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
-local type, next, tonumber, tostring = type, next, tonumber, tostring
-local lpegmatch = lpeg.match
-local utfchar = utf.char
-
-local trace_baseinit = false trackers.register("otf.baseinit", function(v) trace_baseinit = v end)
-local trace_singles = false trackers.register("otf.singles", function(v) trace_singles = v end)
-local trace_multiples = false trackers.register("otf.multiples", function(v) trace_multiples = v end)
-local trace_alternatives = false trackers.register("otf.alternatives", function(v) trace_alternatives = v end)
-local trace_ligatures = false trackers.register("otf.ligatures", function(v) trace_ligatures = v end)
-local trace_kerns = false trackers.register("otf.kerns", function(v) trace_kerns = v end)
-local trace_preparing = false trackers.register("otf.preparing", function(v) trace_preparing = v end)
-
-local report_prepare = logs.reporter("fonts","otf prepare")
-
-local fonts = fonts
-local otf = fonts.handlers.otf
-
-local otffeatures = fonts.constructors.newfeatures("otf")
-local registerotffeature = otffeatures.register
-
-otf.defaultbasealternate = "none" -- first last
-
-local wildcard = "*"
-local default = "dflt"
-
-local function gref(descriptions,n)
- if type(n) == "number" then
- local name = descriptions[n].name
- if name then
- return format("U+%05X (%s)",n,name)
- else
- return format("U+%05X")
- end
- elseif n then
- local num, nam = { }, { }
- for i=2,#n do -- first is likely a key
- local ni = n[i]
- num[i] = format("U+%05X",ni)
- nam[i] = descriptions[ni].name or "?"
- end
- return format("%s (%s)",concat(num," "), concat(nam," "))
- else
- return "?"
- end
-end
-
-local function cref(feature,lookupname)
- if lookupname then
- return format("feature %s, lookup %s",feature,lookupname)
- else
- return format("feature %s",feature)
- end
-end
-
-local function report_alternate(feature,lookupname,descriptions,unicode,replacement,value,comment)
- report_prepare("%s: base alternate %s => %s (%s => %s)",cref(feature,lookupname),
- gref(descriptions,unicode),replacement and gref(descriptions,replacement) or "-",
- tostring(value),comment)
-end
-
-local function report_substitution(feature,lookupname,descriptions,unicode,substitution)
- report_prepare("%s: base substitution %s => %s",cref(feature,lookupname),
- gref(descriptions,unicode),gref(descriptions,substitution))
-end
-
-local function report_ligature(feature,lookupname,descriptions,unicode,ligature)
- report_prepare("%s: base ligature %s => %s",cref(feature,lookupname),
- gref(descriptions,ligature),gref(descriptions,unicode))
-end
-
-local basemethods = { }
-local basemethod = "<unset>"
-
-local function applybasemethod(what,...)
- local m = basemethods[basemethod][what]
- if m then
- return m(...)
- end
-end
-
--- We need to make sure that luatex sees the difference between
--- base fonts that have different glyphs in the same slots in fonts
--- that have the same fullname (or filename). LuaTeX will merge fonts
--- eventually (and subset later on). If needed we can use a more
--- verbose name as long as we don't use <()<>[]{}/%> and the length
--- is < 128.
-
-local basehash, basehashes, applied = { }, 1, { }
-
-local function registerbasehash(tfmdata)
- local properties = tfmdata.properties
- local hash = concat(applied," ")
- local base = basehash[hash]
- if not base then
- basehashes = basehashes + 1
- base = basehashes
- basehash[hash] = base
- end
- properties.basehash = base
- properties.fullname = properties.fullname .. "-" .. base
- -- report_prepare("fullname base hash: '%s', featureset '%s'",tfmdata.properties.fullname,hash)
- applied = { }
-end
-
-local function registerbasefeature(feature,value)
- applied[#applied+1] = feature .. "=" .. tostring(value)
-end
-
--- The original basemode ligature builder used the names of components
--- and did some expression juggling to get the chain right. The current
--- variant starts with unicodes but still uses names to make the chain.
--- This is needed because we have to create intermediates when needed
--- but use predefined snippets when available. To some extend the
--- current builder is more stupid but I don't worry that much about it
--- as ligatures are rather predicatable.
---
--- Personally I think that an ff + i == ffi rule as used in for instance
--- latin modern is pretty weird as no sane person will key that in and
--- expect a glyph for that ligature plus the following character. Anyhow,
--- as we need to deal with this, we do, but no guarantes are given.
---
--- latin modern dejavu
---
--- f+f 102 102 102 102
--- f+i 102 105 102 105
--- f+l 102 108 102 108
--- f+f+i 102 102 105
--- f+f+l 102 102 108 102 102 108
--- ff+i 64256 105 64256 105
--- ff+l 64256 108
---
--- As you can see here, latin modern is less complete than dejavu but
--- in practice one will not notice it.
---
--- The while loop is needed because we need to resolve for instance
--- pseudo names like hyphen_hyphen to endash so in practice we end
--- up with a bit too many definitions but the overhead is neglectable.
---
--- Todo: if changed[first] or changed[second] then ... end
-
-local trace = false
-
-local function finalize_ligatures(tfmdata,ligatures)
- local nofligatures = #ligatures
- if nofligatures > 0 then
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local resources = tfmdata.resources
- local unicodes = resources.unicodes
- local private = resources.private
- local alldone = false
- while not alldone do
- local done = 0
- for i=1,nofligatures do
- local ligature = ligatures[i]
- if ligature then
- local unicode, lookupdata = ligature[1], ligature[2]
- if trace then
- print("BUILDING",concat(lookupdata," "),unicode)
- end
- local size = #lookupdata
- local firstcode = lookupdata[1] -- [2]
- local firstdata = characters[firstcode]
- local okay = false
- if firstdata then
- local firstname = "ctx_" .. firstcode
- for i=1,size-1 do -- for i=2,size-1 do
- local firstdata = characters[firstcode]
- if not firstdata then
- firstcode = private
- if trace then
- print(" DEFINING",firstname,firstcode)
- end
- unicodes[firstname] = firstcode
- firstdata = { intermediate = true, ligatures = { } }
- characters[firstcode] = firstdata
- descriptions[firstcode] = { name = firstname }
- private = private + 1
- end
- local target
- local secondcode = lookupdata[i+1]
- local secondname = firstname .. "_" .. secondcode
- if i == size - 1 then
- target = unicode
- if not unicodes[secondname] then
- unicodes[secondname] = unicode -- map final ligature onto intermediates
- end
- okay = true
- else
- target = unicodes[secondname]
- if not target then
- break
- end
- end
- if trace then
- print("CODES",firstname,firstcode,secondname,secondcode,target)
- end
- local firstligs = firstdata.ligatures
- if firstligs then
- firstligs[secondcode] = { char = target }
- else
- firstdata.ligatures = { [secondcode] = { char = target } }
- end
- firstcode = target
- firstname = secondname
- end
- end
- if okay then
- ligatures[i] = false
- done = done + 1
- end
- end
- end
- alldone = done == 0
- end
- if trace then
- for k, v in next, characters do
- if v.ligatures then table.print(v,k) end
- end
- end
- tfmdata.resources.private = private
- end
-end
-
-local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist)
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local resources = tfmdata.resources
- local changed = tfmdata.changed
- local unicodes = resources.unicodes
- local lookuphash = resources.lookuphash
- local lookuptypes = resources.lookuptypes
-
- local ligatures = { }
- local alternate = tonumber(value)
- local defaultalt = otf.defaultbasealternate
-
- local trace_singles = trace_baseinit and trace_singles
- local trace_alternatives = trace_baseinit and trace_alternatives
- local trace_ligatures = trace_baseinit and trace_ligatures
-
- local actions = {
- substitution = function(lookupdata,lookupname,description,unicode)
- if trace_singles then
- report_substitution(feature,lookupname,descriptions,unicode,lookupdata)
- end
- changed[unicode] = lookupdata
- end,
- alternate = function(lookupdata,lookupname,description,unicode)
- local replacement = lookupdata[alternate]
- if replacement then
- changed[unicode] = replacement
- if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal")
- end
- elseif defaultalt == "first" then
- replacement = lookupdata[1]
- changed[unicode] = replacement
- if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
- end
- elseif defaultalt == "last" then
- replacement = lookupdata[#data]
- if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
- end
- else
- if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown")
- end
- end
- end,
- ligature = function(lookupdata,lookupname,description,unicode)
- if trace_ligatures then
- report_ligature(feature,lookupname,descriptions,unicode,lookupdata)
- end
- ligatures[#ligatures+1] = { unicode, lookupdata }
- end,
- }
-
- for unicode, character in next, characters do
- local description = descriptions[unicode]
- local lookups = description.slookups
- if lookups then
- for l=1,#lookuplist do
- local lookupname = lookuplist[l]
- local lookupdata = lookups[lookupname]
- if lookupdata then
- local lookuptype = lookuptypes[lookupname]
- local action = actions[lookuptype]
- if action then
- action(lookupdata,lookupname,description,unicode)
- end
- end
- end
- end
- local lookups = description.mlookups
- if lookups then
- for l=1,#lookuplist do
- local lookupname = lookuplist[l]
- local lookuplist = lookups[lookupname]
- if lookuplist then
- local lookuptype = lookuptypes[lookupname]
- local action = actions[lookuptype]
- if action then
- for i=1,#lookuplist do
- action(lookuplist[i],lookupname,description,unicode)
- end
- end
- end
- end
- end
- end
-
- finalize_ligatures(tfmdata,ligatures)
-end
-
-local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist) -- todo what kind of kerns, currently all
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local resources = tfmdata.resources
- local unicodes = resources.unicodes
- local sharedkerns = { }
- local traceindeed = trace_baseinit and trace_kerns
- for unicode, character in next, characters do
- local description = descriptions[unicode]
- local rawkerns = description.kerns -- shared
- if rawkerns then
- local s = sharedkerns[rawkerns]
- if s == false then
- -- skip
- elseif s then
- character.kerns = s
- else
- local newkerns = character.kerns
- local done = false
- for l=1,#lookuplist do
- local lookup = lookuplist[l]
- local kerns = rawkerns[lookup]
- if kerns then
- for otherunicode, value in next, kerns do
- if value == 0 then
- -- maybe no 0 test here
- elseif not newkerns then
- newkerns = { [otherunicode] = value }
- done = true
- if traceindeed then
- report_prepare("%s: base kern %s + %s => %s",cref(feature,lookup),
- gref(descriptions,unicode),gref(descriptions,otherunicode),value)
- end
- elseif not newkerns[otherunicode] then -- first wins
- newkerns[otherunicode] = value
- done = true
- if traceindeed then
- report_prepare("%s: base kern %s + %s => %s",cref(feature,lookup),
- gref(descriptions,unicode),gref(descriptions,otherunicode),value)
- end
- end
- end
- end
- end
- if done then
- sharedkerns[rawkerns] = newkerns
- character.kerns = newkerns -- no empty assignments
- else
- sharedkerns[rawkerns] = false
- end
- end
- end
- end
-end
-
-basemethods.independent = {
- preparesubstitutions = preparesubstitutions,
- preparepositionings = preparepositionings,
-}
-
-local function makefake(tfmdata,name,present)
- local resources = tfmdata.resources
- local private = resources.private
- local character = { intermediate = true, ligatures = { } }
- resources.unicodes[name] = private
- tfmdata.characters[private] = character
- tfmdata.descriptions[private] = { name = name }
- resources.private = private + 1
- present[name] = private
- return character
-end
-
-local function make_1(present,tree,name)
- for k, v in next, tree do
- if k == "ligature" then
- present[name] = v
- else
- make_1(present,v,name .. "_" .. k)
- end
- end
-end
-
-local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,done,lookupname)
- for k, v in next, tree do
- if k == "ligature" then
- local character = characters[preceding]
- if not character then
- if trace_baseinit then
- report_prepare("weird ligature in lookup %s: U+%05X (%s), preceding U+%05X (%s)",lookupname,v,utfchar(v),preceding,utfchar(preceding))
- end
- character = makefake(tfmdata,name,present)
- end
- local ligatures = character.ligatures
- if ligatures then
- ligatures[unicode] = { char = v }
- else
- character.ligatures = { [unicode] = { char = v } }
- end
- if done then
- local d = done[lookupname]
- if not d then
- done[lookupname] = { "dummy", v }
- else
- d[#d+1] = v
- end
- end
- else
- local code = present[name] or unicode
- local name = name .. "_" .. k
- make_2(present,tfmdata,characters,v,name,code,k,done,lookupname)
- end
- end
-end
-
-local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist)
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local resources = tfmdata.resources
- local changed = tfmdata.changed
- local lookuphash = resources.lookuphash
- local lookuptypes = resources.lookuptypes
-
- local ligatures = { }
- local alternate = tonumber(value)
- local defaultalt = otf.defaultbasealternate
-
- local trace_singles = trace_baseinit and trace_singles
- local trace_alternatives = trace_baseinit and trace_alternatives
- local trace_ligatures = trace_baseinit and trace_ligatures
-
- for l=1,#lookuplist do
- local lookupname = lookuplist[l]
- local lookupdata = lookuphash[lookupname]
- local lookuptype = lookuptypes[lookupname]
- for unicode, data in next, lookupdata do
- if lookuptype == "substitution" then
- if trace_singles then
- report_substitution(feature,lookupname,descriptions,unicode,data)
- end
- changed[unicode] = data
- elseif lookuptype == "alternate" then
- local replacement = data[alternate]
- if replacement then
- changed[unicode] = replacement
- if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal")
- end
- elseif defaultalt == "first" then
- replacement = data[1]
- changed[unicode] = replacement
- if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
- end
- elseif defaultalt == "last" then
- replacement = data[#data]
- if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
- end
- else
- if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown")
- end
- end
- elseif lookuptype == "ligature" then
- ligatures[#ligatures+1] = { unicode, data, lookupname }
- if trace_ligatures then
- report_ligature(feature,lookupname,descriptions,unicode,data)
- end
- end
- end
- end
-
- local nofligatures = #ligatures
-
- if nofligatures > 0 then
-
- local characters = tfmdata.characters
- local present = { }
- local done = trace_baseinit and trace_ligatures and { }
-
- for i=1,nofligatures do
- local ligature = ligatures[i]
- local unicode, tree = ligature[1], ligature[2]
- make_1(present,tree,"ctx_"..unicode)
- end
-
- for i=1,nofligatures do
- local ligature = ligatures[i]
- local unicode, tree, lookupname = ligature[1], ligature[2], ligature[3]
- make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,lookupname)
- end
-
- end
-
-end
-
-local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist)
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local resources = tfmdata.resources
- local lookuphash = resources.lookuphash
- local traceindeed = trace_baseinit and trace_kerns
-
- -- check out this sharedkerns trickery
-
- for l=1,#lookuplist do
- local lookupname = lookuplist[l]
- local lookupdata = lookuphash[lookupname]
- for unicode, data in next, lookupdata do
- local character = characters[unicode]
- local kerns = character.kerns
- if not kerns then
- kerns = { }
- character.kerns = kerns
- end
- if traceindeed then
- for otherunicode, kern in next, data do
- if not kerns[otherunicode] and kern ~= 0 then
- kerns[otherunicode] = kern
- report_prepare("%s: base kern %s + %s => %s",cref(feature,lookup),
- gref(descriptions,unicode),gref(descriptions,otherunicode),kern)
- end
- end
- else
- for otherunicode, kern in next, data do
- if not kerns[otherunicode] and kern ~= 0 then
- kerns[otherunicode] = kern
- end
- end
- end
- end
- end
-
-end
-
-local function initializehashes(tfmdata)
- nodeinitializers.features(tfmdata)
-end
-
-basemethods.shared = {
- initializehashes = initializehashes,
- preparesubstitutions = preparesubstitutions,
- preparepositionings = preparepositionings,
-}
-
-basemethod = "independent"
-
-local function featuresinitializer(tfmdata,value)
- if true then -- value then
- local t = trace_preparing and os.clock()
- local features = tfmdata.shared.features
- if features then
- applybasemethod("initializehashes",tfmdata)
- local collectlookups = otf.collectlookups
- local rawdata = tfmdata.shared.rawdata
- local properties = tfmdata.properties
- local script = properties.script
- local language = properties.language
- local basesubstitutions = rawdata.resources.features.gsub
- local basepositionings = rawdata.resources.features.gpos
- if basesubstitutions then
- for feature, data in next, basesubstitutions do
- local value = features[feature]
- if value then
- local validlookups, lookuplist = collectlookups(rawdata,feature,script,language)
- if validlookups then
- applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist)
- registerbasefeature(feature,value)
- end
- end
- end
- end
- if basepositions then
- for feature, data in next, basepositions do
- local value = features[feature]
- if value then
- local validlookups, lookuplist = collectlookups(rawdata,feature,script,language)
- if validlookups then
- applybasemethod("preparepositionings",tfmdata,feature,features[feature],validlookups,lookuplist)
- registerbasefeature(feature,value)
- end
- end
- end
- end
- registerbasehash(tfmdata)
- end
- if trace_preparing then
- report_prepare("preparation time is %0.3f seconds for %s",os.clock()-t,tfmdata.properties.fullname or "?")
- end
- end
-end
-
-registerotffeature {
- name = "features",
- description = "features",
- default = true,
- initializers = {
- -- position = 1, -- after setscript (temp hack ... we need to force script / language to 1
- base = featuresinitializer,
- }
-}
-
--- independent : collect lookups independently (takes more runtime ... neglectable)
--- shared : shares lookups with node mode (takes more memory unless also a node mode variant is used ... noticeable)
-
-directives.register("fonts.otf.loader.basemethod", function(v)
- if basemethods[v] then
- basemethod = v
- end
-end)
diff --git a/otfl-font-otc.lua b/otfl-font-otc.lua
deleted file mode 100644
index ae463e7..0000000
--- a/otfl-font-otc.lua
+++ /dev/null
@@ -1,334 +0,0 @@
-if not modules then modules = { } end modules ['font-otc'] = {
- version = 1.001,
- comment = "companion to font-otf.lua (context)",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local format, insert = string.format, table.insert
-local type, next = type, next
-local lpegmatch = lpeg.match
-
--- we assume that the other otf stuff is loaded already
-
-local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
-local report_otf = logs.reporter("fonts","otf loading")
-
-local fonts = fonts
-local otf = fonts.handlers.otf
-local otffeatures = fonts.constructors.newfeatures("otf")
-local registerotffeature = otffeatures.register
-local setmetatableindex = table.setmetatableindex
-
--- In the userdata interface we can not longer tweak the loaded font as
--- conveniently as before. For instance, instead of pushing extra data in
--- in the table using the original structure, we now have to operate on
--- the mkiv representation. And as the fontloader interface is modelled
--- after fontforge we cannot change that one too much either.
-
-local types = {
- substitution = "gsub_single",
- ligature = "gsub_ligature",
- alternate = "gsub_alternate",
-}
-
-setmetatableindex(types, function(t,k) t[k] = k return k end) -- "key"
-
-local everywhere = { ["*"] = { ["*"] = true } } -- or: { ["*"] = { "*" } }
-local noflags = { }
-
-local function addfeature(data,feature,specifications)
- local descriptions = data.descriptions
- local resources = data.resources
- local lookups = resources.lookups
- local gsubfeatures = resources.features.gsub
- if gsubfeatures and gsubfeatures[feature] then
- -- already present
- else
- local sequences = resources.sequences
- local fontfeatures = resources.features
- local unicodes = resources.unicodes
- local lookuptypes = resources.lookuptypes
- local splitter = lpeg.splitter(" ",unicodes)
- local done = 0
- local skip = 0
- if not specifications[1] then
- -- so we accept a one entry specification
- specifications = { specifications }
- end
- -- subtables are tables themselves but we also accept flattened singular subtables
- for s=1,#specifications do
- local specification = specifications[s]
- local valid = specification.valid
- if not valid or valid(data,specification,feature) then
- local initialize = specification.initialize
- if initialize then
- -- when false is returned we initialize only once
- specification.initialize = initialize(specification) and initialize or nil
- end
- local askedfeatures = specification.features or everywhere
- local subtables = specification.subtables or { specification.data } or { }
- local featuretype = types[specification.type or "substitution"]
- local featureflags = specification.flags or noflags
- local added = false
- local featurename = format("ctx_%s_%s",feature,s)
- local st = { }
- for t=1,#subtables do
- local list = subtables[t]
- local full = format("%s_%s",featurename,t)
- st[t] = full
- if featuretype == "gsub_ligature" then
- lookuptypes[full] = "ligature"
- for code, ligature in next, list do
- local unicode = tonumber(code) or unicodes[code]
- local description = descriptions[unicode]
- if description then
- local slookups = description.slookups
- if type(ligature) == "string" then
- ligature = { lpegmatch(splitter,ligature) }
- end
- local present = true
- for i=1,#ligature do
- if not descriptions[ligature[i]] then
- present = false
- break
- end
- end
- if present then
- if slookups then
- slookups[full] = ligature
- else
- description.slookups = { [full] = ligature }
- end
- done, added = done + 1, true
- else
- skip = skip + 1
- end
- end
- end
- elseif featuretype == "gsub_single" then
- lookuptypes[full] = "substitution"
- for code, replacement in next, list do
- local unicode = tonumber(code) or unicodes[code]
- local description = descriptions[unicode]
- if description then
- local slookups = description.slookups
- replacement = tonumber(replacement) or unicodes[replacement]
- if descriptions[replacement] then
- if slookups then
- slookups[full] = replacement
- else
- description.slookups = { [full] = replacement }
- end
- done, added = done + 1, true
- end
- end
- end
- end
- end
- if added then
- -- script = { lang1, lang2, lang3 } or script = { lang1 = true, ... }
- for k, v in next, askedfeatures do
- if v[1] then
- askedfeatures[k] = table.tohash(v)
- end
- end
- sequences[#sequences+1] = {
- chain = 0,
- features = { [feature] = askedfeatures },
- flags = featureflags,
- name = featurename,
- subtables = st,
- type = featuretype,
- }
- -- register in metadata (merge as there can be a few)
- if not gsubfeatures then
- gsubfeatures = { }
- fontfeatures.gsub = gsubfeatures
- end
- local k = gsubfeatures[feature]
- if not k then
- k = { }
- gsubfeatures[feature] = k
- end
- for script, languages in next, askedfeatures do
- local kk = k[script]
- if not kk then
- kk = { }
- k[script] = kk
- end
- for language, value in next, languages do
- kk[language] = value
- end
- end
- end
- end
- end
- if trace_loading then
- report_otf("enhance: registering feature '%s', %s glyphs affected, %s glyphs skipped",feature,done,skip)
- end
- end
-end
-
-otf.enhancers.addfeature = addfeature
-
-local extrafeatures = { }
-
-function otf.addfeature(name,specification)
- extrafeatures[name] = specification
-end
-
-local function enhance(data,filename,raw)
- for feature, specification in next, extrafeatures do
- addfeature(data,feature,specification)
- end
-end
-
-otf.enhancers.register("check extra features",enhance)
-
--- tlig --
-
-local tlig = {
- endash = "hyphen hyphen",
- emdash = "hyphen hyphen hyphen",
- -- quotedblleft = "quoteleft quoteleft",
- -- quotedblright = "quoteright quoteright",
- -- quotedblleft = "grave grave",
- -- quotedblright = "quotesingle quotesingle",
- -- quotedblbase = "comma comma",
-}
-
-local tlig_specification = {
- type = "ligature",
- features = everywhere,
- data = tlig,
- flags = noflags,
-}
-
-otf.addfeature("tlig",tlig_specification)
-
-registerotffeature {
- name = 'tlig',
- description = 'tex ligatures',
-}
-
--- trep
-
-local trep = {
- -- [0x0022] = 0x201D,
- [0x0027] = 0x2019,
- -- [0x0060] = 0x2018,
-}
-
-local trep_specification = {
- type = "substitution",
- features = everywhere,
- data = trep,
- flags = noflags,
-}
-
-otf.addfeature("trep",trep_specification)
-
-registerotffeature {
- name = 'trep',
- description = 'tex replacements',
-}
-
--- tcom
-
-if characters.combined then
-
- local tcom = { }
-
- local function initialize()
- characters.initialize()
- for first, seconds in next, characters.combined do
- for second, combination in next, seconds do
- tcom[combination] = { first, second }
- end
- end
- -- return false
- end
-
- local tcom_specification = {
- type = "ligature",
- features = everywhere,
- data = tcom,
- flags = noflags,
- initialize = initialize,
- }
-
- otf.addfeature("tcom",tcom_specification)
-
- registerotffeature {
- name = 'tcom',
- description = 'tex combinations',
- }
-
-end
-
--- anum
-
-local anum_arabic = {
- [0x0030] = 0x0660,
- [0x0031] = 0x0661,
- [0x0032] = 0x0662,
- [0x0033] = 0x0663,
- [0x0034] = 0x0664,
- [0x0035] = 0x0665,
- [0x0036] = 0x0666,
- [0x0037] = 0x0667,
- [0x0038] = 0x0668,
- [0x0039] = 0x0669,
-}
-
-local anum_persian = {
- [0x0030] = 0x06F0,
- [0x0031] = 0x06F1,
- [0x0032] = 0x06F2,
- [0x0033] = 0x06F3,
- [0x0034] = 0x06F4,
- [0x0035] = 0x06F5,
- [0x0036] = 0x06F6,
- [0x0037] = 0x06F7,
- [0x0038] = 0x06F8,
- [0x0039] = 0x06F9,
-}
-
-local function valid(data)
- local features = data.resources.features
- if features then
- for k, v in next, features do
- for k, v in next, v do
- if v.arab then
- return true
- end
- end
- end
- end
-end
-
-local anum_specification = {
- {
- type = "substitution",
- features = { arab = { URD = true, dflt = true } },
- data = anum_arabic,
- flags = noflags, -- { },
- valid = valid,
- },
- {
- type = "substitution",
- features = { arab = { URD = true } },
- data = anum_persian,
- flags = noflags, -- { },
- valid = valid,
- },
-}
-
-otf.addfeature("anum",anum_specification) -- todo: only when there is already an arab script feature
-
-registerotffeature {
- name = 'anum',
- description = 'arabic digits',
-}
diff --git a/otfl-font-otf.lua b/otfl-font-otf.lua
deleted file mode 100644
index e1339ae..0000000
--- a/otfl-font-otf.lua
+++ /dev/null
@@ -1,2080 +0,0 @@
-if not modules then modules = { } end modules ['font-otf'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- langs -> languages enz
--- anchor_classes vs kernclasses
--- modification/creationtime in subfont is runtime dus zinloos
--- to_table -> totable
--- ascent descent
-
--- more checking against low level calls of functions
-
-local utf = unicode.utf8
-
-local utfbyte = utf.byte
-local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
-local type, next, tonumber, tostring = type, next, tonumber, tostring
-local abs = math.abs
-local getn = table.getn
-local lpegmatch = lpeg.match
-local reversed, concat, remove = table.reversed, table.concat, table.remove
-local ioflush = io.flush
-local fastcopy, tohash, derivetable = table.fastcopy, table.tohash, table.derive
-
-local allocate = utilities.storage.allocate
-local registertracker = trackers.register
-local registerdirective = directives.register
-local starttiming = statistics.starttiming
-local stoptiming = statistics.stoptiming
-local elapsedtime = statistics.elapsedtime
-local findbinfile = resolvers.findbinfile
-
-local trace_private = false registertracker("otf.private", function(v) trace_private = v end)
-local trace_loading = false registertracker("otf.loading", function(v) trace_loading = v end)
-local trace_features = false registertracker("otf.features", function(v) trace_features = v end)
-local trace_dynamics = false registertracker("otf.dynamics", function(v) trace_dynamics = v end)
-local trace_sequences = false registertracker("otf.sequences", function(v) trace_sequences = v end)
-local trace_markwidth = false registertracker("otf.markwidth", function(v) trace_markwidth = v end)
-local trace_defining = false registertracker("fonts.defining", function(v) trace_defining = v end)
-
-local report_otf = logs.reporter("fonts","otf loading")
-
-local fonts = fonts
-local otf = fonts.handlers.otf
-
-otf.glists = { "gsub", "gpos" }
-
-otf.version = 2.737 -- beware: also sync font-mis.lua
-otf.cache = containers.define("fonts", "otf", otf.version, true)
-
-local fontdata = fonts.hashes.identifiers
-local chardata = characters and characters.data -- not used
-
-local otffeatures = fonts.constructors.newfeatures("otf")
-local registerotffeature = otffeatures.register
-
-local enhancers = allocate()
-otf.enhancers = enhancers
-local patches = { }
-enhancers.patches = patches
-
-local definers = fonts.definers
-local readers = fonts.readers
-local constructors = fonts.constructors
-
-local forceload = false
-local cleanup = 0 -- mk: 0=885M 1=765M 2=735M (regular run 730M)
-local usemetatables = false -- .4 slower on mk but 30 M less mem so we might change the default -- will be directive
-local packdata = true
-local syncspace = true
-local forcenotdef = false
-
-local wildcard = "*"
-local default = "dflt"
-
-local fontloaderfields = fontloader.fields
-local mainfields = nil
-local glyphfields = nil -- not used yet
-
-registerdirective("fonts.otf.loader.cleanup", function(v) cleanup = tonumber(v) or (v and 1) or 0 end)
-registerdirective("fonts.otf.loader.force", function(v) forceload = v end)
-registerdirective("fonts.otf.loader.usemetatables", function(v) usemetatables = v end)
-registerdirective("fonts.otf.loader.pack", function(v) packdata = v end)
-registerdirective("fonts.otf.loader.syncspace", function(v) syncspace = v end)
-registerdirective("fonts.otf.loader.forcenotdef", function(v) forcenotdef = v end)
-
-local function load_featurefile(raw,featurefile)
- if featurefile and featurefile ~= "" then
- if trace_loading then
- report_otf("featurefile: %s", featurefile)
- end
- fontloader.apply_featurefile(raw, featurefile)
- end
-end
-
-local function showfeatureorder(rawdata,filename)
- local sequences = rawdata.resources.sequences
- if sequences and #sequences > 0 then
- if trace_loading then
- report_otf("font %s has %s sequences",filename,#sequences)
- report_otf(" ")
- end
- for nos=1,#sequences do
- local sequence = sequences[nos]
- local typ = sequence.type or "no-type"
- local name = sequence.name or "no-name"
- local subtables = sequence.subtables or { "no-subtables" }
- local features = sequence.features
- if trace_loading then
- report_otf("%3i %-15s %-20s [%s]",nos,name,typ,concat(subtables,","))
- end
- if features then
- for feature, scripts in next, features do
- local tt = { }
- if type(scripts) == "table" then
- for script, languages in next, scripts do
- local ttt = { }
- for language, _ in next, languages do
- ttt[#ttt+1] = language
- end
- tt[#tt+1] = format("[%s: %s]",script,concat(ttt," "))
- end
- if trace_loading then
- report_otf(" %s: %s",feature,concat(tt," "))
- end
- else
- if trace_loading then
- report_otf(" %s: %s",feature,tostring(scripts))
- end
- end
- end
- end
- end
- if trace_loading then
- report_otf("\n")
- end
- elseif trace_loading then
- report_otf("font %s has no sequences",filename)
- end
-end
-
---[[ldx--
-<p>We start with a lot of tables and related functions.</p>
---ldx]]--
-
-local valid_fields = table.tohash {
- -- "anchor_classes",
- "ascent",
- -- "cache_version",
- "cidinfo",
- "copyright",
- -- "creationtime",
- "descent",
- "design_range_bottom",
- "design_range_top",
- "design_size",
- "encodingchanged",
- "extrema_bound",
- "familyname",
- "fontname",
- "fontname",
- "fontstyle_id",
- "fontstyle_name",
- "fullname",
- -- "glyphs",
- "hasvmetrics",
- -- "head_optimized_for_cleartype",
- "horiz_base",
- "issans",
- "isserif",
- "italicangle",
- -- "kerns",
- -- "lookups",
- "macstyle",
- -- "modificationtime",
- "onlybitmaps",
- "origname",
- "os2_version",
- "pfminfo",
- -- "private",
- "serifcheck",
- "sfd_version",
- -- "size",
- "strokedfont",
- "strokewidth",
- -- "subfonts",
- "table_version",
- -- "tables",
- -- "ttf_tab_saved",
- "ttf_tables",
- "uni_interp",
- "uniqueid",
- "units_per_em",
- "upos",
- "use_typo_metrics",
- "uwidth",
- -- "validation_state",
- "version",
- "vert_base",
- "weight",
- "weight_width_slope_only",
- -- "xuid",
-}
-
-local ordered_enhancers = {
- "prepare tables",
- "prepare glyphs",
- "prepare lookups",
-
- "analyze glyphs",
- "analyze math",
-
- "prepare tounicode", -- maybe merge with prepare
-
- "reorganize lookups",
- "reorganize mark classes",
- "reorganize anchor classes",
-
- "reorganize glyph kerns",
- "reorganize glyph lookups",
- "reorganize glyph anchors",
-
- "merge kern classes",
-
- "reorganize features",
- "reorganize subtables",
-
- "check glyphs",
- "check metadata",
- "check extra features", -- after metadata
-
- "add duplicates",
- "check encoding",
-
- "cleanup tables",
-}
-
---[[ldx--
-<p>Here we go.</p>
---ldx]]--
-
-local actions = allocate()
-local before = allocate()
-local after = allocate()
-
-patches.before = before
-patches.after = after
-
-local function enhance(name,data,filename,raw)
- local enhancer = actions[name]
- if enhancer then
- if trace_loading then
- report_otf("enhance: %s (%s)",name,filename)
- ioflush()
- end
- enhancer(data,filename,raw)
- elseif trace_loading then
- -- report_otf("enhance: %s is undefined",name)
- end
-end
-
-function enhancers.apply(data,filename,raw)
- local basename = file.basename(lower(filename))
- if trace_loading then
- report_otf("start enhancing: %s",filename)
- end
- ioflush() -- we want instant messages
- for e=1,#ordered_enhancers do
- local enhancer = ordered_enhancers[e]
- local b = before[enhancer]
- if b then
- for pattern, action in next, b do
- if find(basename,pattern) then
- action(data,filename,raw)
- end
- end
- end
- enhance(enhancer,data,filename,raw)
- local a = after[enhancer]
- if a then
- for pattern, action in next, a do
- if find(basename,pattern) then
- action(data,filename,raw)
- end
- end
- end
- ioflush() -- we want instant messages
- end
- if trace_loading then
- report_otf("stop enhancing")
- end
- ioflush() -- we want instant messages
-end
-
--- patches.register("before","migrate metadata","cambria",function() end)
-
-function patches.register(what,where,pattern,action)
- local pw = patches[what]
- if pw then
- local ww = pw[where]
- if ww then
- ww[pattern] = action
- else
- pw[where] = { [pattern] = action}
- end
- end
-end
-
-function patches.report(fmt,...)
- if trace_loading then
- report_otf("patching: " ..fmt,...)
- end
-end
-
-function enhancers.register(what,action) -- only already registered can be overloaded
- actions[what] = action
-end
-
-function otf.load(filename,format,sub,featurefile)
- local name = file.basename(file.removesuffix(filename))
- local attr = lfs.attributes(filename)
- local size = attr and attr.size or 0
- local time = attr and attr.modification or 0
- if featurefile then
- name = name .. "@" .. file.removesuffix(file.basename(featurefile))
- end
- if sub == "" then
- sub = false
- end
- local hash = name
- if sub then
- hash = hash .. "-" .. sub
- end
- hash = containers.cleanname(hash)
- local featurefiles
- if featurefile then
- featurefiles = { }
- for s in gmatch(featurefile,"[^,]+") do
- local name = resolvers.findfile(file.addsuffix(s,'fea'),'fea') or ""
- if name == "" then
- report_otf("loading: no featurefile '%s'",s)
- else
- local attr = lfs.attributes(name)
- featurefiles[#featurefiles+1] = {
- name = name,
- size = attr and attr.size or 0,
- time = attr and attr.modification or 0,
- }
- end
- end
- if #featurefiles == 0 then
- featurefiles = nil
- end
- end
- local data = containers.read(otf.cache,hash)
- local reload = not data or data.size ~= size or data.time ~= time
- if forceload then
- report_otf("loading: forced reload due to hard coded flag")
- reload = true
- end
- if not reload then
- local featuredata = data.featuredata
- if featurefiles then
- if not featuredata or #featuredata ~= #featurefiles then
- reload = true
- else
- for i=1,#featurefiles do
- local fi, fd = featurefiles[i], featuredata[i]
- if fi.name ~= fd.name or fi.size ~= fd.size or fi.time ~= fd.time then
- reload = true
- break
- end
- end
- end
- elseif featuredata then
- reload = true
- end
- if reload then
- report_otf("loading: forced reload due to changed featurefile specification: %s",featurefile or "--")
- end
- end
- if reload then
- report_otf("loading: %s (hash: %s)",filename,hash)
- local fontdata, messages
- if sub then
- fontdata, messages = fontloader.open(filename,sub)
- else
- fontdata, messages = fontloader.open(filename)
- end
- if fontdata then
- mainfields = mainfields or (fontloaderfields and fontloaderfields(fontdata))
- end
- if trace_loading and messages and #messages > 0 then
- if type(messages) == "string" then
- report_otf("warning: %s",messages)
- else
- for m=1,#messages do
- report_otf("warning: %s",tostring(messages[m]))
- end
- end
- else
- report_otf("font loaded okay")
- end
- if fontdata then
- if featurefiles then
- for i=1,#featurefiles do
- load_featurefile(fontdata,featurefiles[i].name)
- end
- end
- local unicodes = {
- -- names to unicodes
- }
- local splitter = lpeg.splitter(" ",unicodes)
- data = {
- size = size,
- time = time,
- format = format,
- featuredata = featurefiles,
- resources = {
- filename = resolvers.unresolve(filename), -- no shortcut
- version = otf.version,
- creator = "context mkiv",
- unicodes = unicodes,
- indices = {
- -- index to unicodes
- },
- duplicates = {
- -- alternative unicodes
- },
- variants = {
- -- alternative unicodes (variants)
- },
- lookuptypes = {
- },
- },
- metadata = {
- -- raw metadata, not to be used
- },
- properties = {
- -- normalized metadata
- },
- descriptions = {
- },
- goodies = {
- },
- helpers = {
- tounicodelist = splitter,
- tounicodetable = lpeg.Ct(splitter),
- },
- }
- starttiming(data)
- report_otf("file size: %s", size)
- enhancers.apply(data,filename,fontdata)
- if packdata then
- if cleanup > 0 then
- collectgarbage("collect")
---~ lua.collectgarbage()
- end
- enhance("pack",data,filename,nil)
- end
- report_otf("saving in cache: %s",filename)
- data = containers.write(otf.cache, hash, data)
- if cleanup > 1 then
- collectgarbage("collect")
---~ lua.collectgarbage()
- end
- stoptiming(data)
- if elapsedtime then -- not in generic
- report_otf("preprocessing and caching took %s seconds",elapsedtime(data))
- end
- fontloader.close(fontdata) -- free memory
- if cleanup > 3 then
- collectgarbage("collect")
---~ lua.collectgarbage()
- end
- data = containers.read(otf.cache, hash) -- this frees the old table and load the sparse one
- if cleanup > 2 then
- collectgarbage("collect")
---~ lua.collectgarbage()
- end
- else
- data = nil
- report_otf("loading failed (file read error)")
- end
- end
- if data then
- if trace_defining then
- report_otf("loading from cache: %s",hash)
- end
- enhance("unpack",data,filename,nil,false)
- enhance("add dimensions",data,filename,nil,false)
- if trace_sequences then
- showfeatureorder(data,filename)
- end
- end
- return data
-end
-
-local mt = {
- __index = function(t,k) -- maybe set it
- if k == "height" then
- local ht = t.boundingbox[4]
- return ht < 0 and 0 or ht
- elseif k == "depth" then
- local dp = -t.boundingbox[2]
- return dp < 0 and 0 or dp
- elseif k == "width" then
- return 0
- elseif k == "name" then -- or maybe uni*
- return forcenotdef and ".notdef"
- end
- end
-}
-
-actions["prepare tables"] = function(data,filename,raw)
- data.properties.hasitalics = false
-end
-
-actions["add dimensions"] = function(data,filename)
- -- todo: forget about the width if it's the defaultwidth (saves mem)
- -- we could also build the marks hash here (instead of storing it)
- if data then
- local descriptions = data.descriptions
- local resources = data.resources
- local defaultwidth = resources.defaultwidth or 0
- local defaultheight = resources.defaultheight or 0
- local defaultdepth = resources.defaultdepth or 0
- if usemetatables then
- for _, d in next, descriptions do
- local wd = d.width
- if not wd then
- d.width = defaultwidth
- elseif trace_markwidth and wd ~= 0 and d.class == "mark" then
- report_otf("mark with width %s (%s) in %s",wd,d.name or "<noname>",file.basename(filename))
- -- d.width = -wd
- end
- setmetatable(d,mt)
- end
- else
- for _, d in next, descriptions do
- local bb, wd = d.boundingbox, d.width
- if not wd then
- d.width = defaultwidth
- elseif trace_markwidth and wd ~= 0 and d.class == "mark" then
- report_otf("mark with width %s (%s) in %s",wd,d.name or "<noname>",file.basename(filename))
- -- d.width = -wd
- end
- -- if forcenotdef and not d.name then
- -- d.name = ".notdef"
- -- end
- if bb then
- local ht, dp = bb[4], -bb[2]
- if ht == 0 or ht < 0 then
- -- not set
- else
- d.height = ht
- end
- if dp == 0 or dp < 0 then
- -- not set
- else
- d.depth = dp
- end
- end
- end
- end
- end
-end
-
-local function somecopy(old) -- fast one
- if old then
- local new = { }
- if type(old) == "table" then
- for k, v in next, old do
- if k == "glyphs" then
- -- skip
- elseif type(v) == "table" then
- new[k] = somecopy(v)
- else
- new[k] = v
- end
- end
- else
- for i=1,#mainfields do
- local k = mainfields[i]
- local v = old[k]
- if k == "glyphs" then
- -- skip
- elseif type(v) == "table" then
- new[k] = somecopy(v)
- else
- new[k] = v
- end
- end
- end
- return new
- else
- return { }
- end
-end
-
--- not setting hasitalics and class (when nil) during
--- table cronstruction can save some mem
-
-actions["prepare glyphs"] = function(data,filename,raw)
- local rawglyphs = raw.glyphs
- local rawsubfonts = raw.subfonts
- local rawcidinfo = raw.cidinfo
- local criterium = constructors.privateoffset
- local private = criterium
- local resources = data.resources
- local metadata = data.metadata
- local properties = data.properties
- local descriptions = data.descriptions
- local unicodes = resources.unicodes -- name to unicode
- local indices = resources.indices -- index to unicode
- local duplicates = resources.duplicates
- local variants = resources.variants
-
- if rawsubfonts then
-
- metadata.subfonts = { }
- properties.cidinfo = rawcidinfo
-
- if rawcidinfo.registry then
- local cidmap = fonts.cid.getmap(rawcidinfo)
- if cidmap then
- rawcidinfo.usedname = cidmap.usedname
- local nofnames, nofunicodes = 0, 0
- local cidunicodes, cidnames = cidmap.unicodes, cidmap.names
- for cidindex=1,#rawsubfonts do
- local subfont = rawsubfonts[cidindex]
- local cidglyphs = subfont.glyphs
- metadata.subfonts[cidindex] = somecopy(subfont)
- for index=0,subfont.glyphcnt-1 do -- we could take the previous glyphcnt instead of 0
- local glyph = cidglyphs[index]
- if glyph then
- local unicode = glyph.unicode
- local name = glyph.name or cidnames[index]
- if not unicode or unicode == -1 or unicode >= criterium then
- unicode = cidunicodes[index]
- end
- if not unicode or unicode == -1 or unicode >= criterium then
- if not name then
- name = format("u%06X",private)
- end
- unicode = private
- unicodes[name] = private
- if trace_private then
- report_otf("enhance: glyph %s at index 0x%04X is moved to private unicode slot U+%05X",name,index,private)
- end
- private = private + 1
- nofnames = nofnames + 1
- else
- if not name then
- name = format("u%06X",unicode)
- end
- unicodes[name] = unicode
- nofunicodes = nofunicodes + 1
- end
- indices[index] = unicode -- each index is unique (at least now)
-
- local description = {
- -- width = glyph.width,
- boundingbox = glyph.boundingbox,
- name = glyph.name or name or "unknown", -- uniXXXX
- cidindex = cidindex,
- index = index,
- glyph = glyph,
- }
-
- descriptions[unicode] = description
- else
- -- report_otf("potential problem: glyph 0x%04X is used but empty",index)
- end
- end
- end
- if trace_loading then
- report_otf("cid font remapped, %s unicode points, %s symbolic names, %s glyphs",nofunicodes, nofnames, nofunicodes+nofnames)
- end
- elseif trace_loading then
- report_otf("unable to remap cid font, missing cid file for %s",filename)
- end
- elseif trace_loading then
- report_otf("font %s has no glyphs",filename)
- end
-
- else
-
- for index=0,raw.glyphcnt-1 do -- not raw.glyphmax-1 (as that will crash)
- local glyph = rawglyphs[index]
- if glyph then
- local unicode = glyph.unicode
- local name = glyph.name
- if not unicode or unicode == -1 or unicode >= criterium then
- unicode = private
- unicodes[name] = private
- if trace_private then
- report_otf("enhance: glyph %s at index 0x%04X is moved to private unicode slot U+%05X",name,index,private)
- end
- private = private + 1
- else
- unicodes[name] = unicode
- end
- indices[index] = unicode
- if not name then
- name = format("u%06X",unicode)
- end
- descriptions[unicode] = {
- -- width = glyph.width,
- boundingbox = glyph.boundingbox,
- name = name,
- index = index,
- glyph = glyph,
- }
- local altuni = glyph.altuni
- if altuni then
- local d
- for i=1,#altuni do
- local a = altuni[i]
- local u = a.unicode
- local v = a.variant
- if v then
- local vv = variants[v]
- if vv then
- vv[u] = unicode
- else -- xits-math has some:
- vv = { [u] = unicode }
- variants[v] = vv
- end
- elseif d then
- d[#d+1] = u
- else
- d = { u }
- end
- end
- if d then
- duplicates[unicode] = d
- end
- end
- else
- report_otf("potential problem: glyph 0x%04X is used but empty",index)
- end
- end
-
- end
-
- resources.private = private
-
-end
-
--- the next one is still messy but will get better when we have
--- flattened map/enc tables in the font loader
-
-actions["check encoding"] = function(data,filename,raw)
- local descriptions = data.descriptions
- local resources = data.resources
- local properties = data.properties
- local unicodes = resources.unicodes -- name to unicode
- local indices = resources.indices -- index to unicodes
-
- -- begin of messy (not needed when cidmap)
-
- local mapdata = raw.map or { }
- local unicodetoindex = mapdata and mapdata.map or { }
- -- local encname = lower(data.enc_name or raw.enc_name or mapdata.enc_name or "")
- local encname = lower(data.enc_name or mapdata.enc_name or "")
- local criterium = 0xFFFF -- for instance cambria has a lot of mess up there
-
- -- end of messy
-
- if find(encname,"unicode") then -- unicodebmp, unicodefull, ...
- if trace_loading then
- report_otf("checking embedded unicode map '%s'",encname)
- end
- for unicode, index in next, unicodetoindex do -- altuni already covers this
- if unicode <= criterium and not descriptions[unicode] then
- local parent = indices[index] -- why nil?
- if parent then
- report_otf("weird, unicode U+%05X points to U+%05X with index 0x%04X",unicode,parent,index)
- else
- report_otf("weird, unicode U+%05X points to nowhere with index 0x%04X",unicode,index)
- end
- end
- end
- elseif properties.cidinfo then
- report_otf("warning: no unicode map, used cidmap '%s'",properties.cidinfo.usedname or "?")
- else
- report_otf("warning: non unicode map '%s', only using glyph unicode data",encname or "whatever")
- end
-
- if mapdata then
- mapdata.map = { } -- clear some memory
- end
-end
-
--- for the moment we assume that a fotn with lookups will not use
--- altuni so we stick to kerns only
-
-actions["add duplicates"] = function(data,filename,raw)
- local descriptions = data.descriptions
- local resources = data.resources
- local properties = data.properties
- local unicodes = resources.unicodes -- name to unicode
- local indices = resources.indices -- index to unicodes
- local duplicates = resources.duplicates
-
- for unicode, d in next, duplicates do
- for i=1,#d do
- local u = d[i]
- if not descriptions[u] then
- local description = descriptions[unicode]
- local duplicate = table.copy(description) -- else packing problem
- duplicate.comment = format("copy of U+%05X", unicode)
- descriptions[u] = duplicate
- local n = 0
- for _, description in next, descriptions do
- if kerns then
- local kerns = description.kerns
- for _, k in next, kerns do
- local ku = k[unicode]
- if ku then
- k[u] = ku
- n = n + 1
- end
- end
- end
- -- todo: lookups etc
- end
- if trace_loading then
- report_otf("duplicating U+%05X to U+%05X with index 0x%04X (%s kerns)",unicode,u,description.index,n)
- end
- end
- end
- end
-end
-
--- class : nil base mark ligature component (maybe we don't need it in description)
--- boundingbox: split into ht/dp takes more memory (larger tables and less sharing)
-
-actions["analyze glyphs"] = function(data,filename,raw) -- maybe integrate this in the previous
- local descriptions = data.descriptions
- local resources = data.resources
- local metadata = data.metadata
- local properties = data.properties
- local hasitalics = false
- local widths = { }
- local marks = { } -- always present (saves checking)
- for unicode, description in next, descriptions do
- local glyph = description.glyph
- local italic = glyph.italic_correction
- if not italic then
- -- skip
- elseif italic == 0 then
- -- skip
- else
- description.italic = italic
- hasitalics = true
- end
- local width = glyph.width
- widths[width] = (widths[width] or 0) + 1
- local class = glyph.class
- if class then
- if class == "mark" then
- marks[unicode] = true
- end
- description.class = class
- end
- end
- -- flag italic
- properties.hasitalics = hasitalics
- -- flag marks
- resources.marks = marks
- -- share most common width for cjk fonts
- local wd, most = 0, 1
- for k,v in next, widths do
- if v > most then
- wd, most = k, v
- end
- end
- if most > 1000 then -- maybe 500
- if trace_loading then
- report_otf("most common width: %s (%s times), sharing (cjk font)",wd,most)
- end
- for unicode, description in next, descriptions do
- if description.width == wd then
- -- description.width = nil
- else
- description.width = description.glyph.width
- end
- end
- resources.defaultwidth = wd
- else
- for unicode, description in next, descriptions do
- description.width = description.glyph.width
- end
- end
-end
-
-actions["reorganize mark classes"] = function(data,filename,raw)
- local mark_classes = raw.mark_classes
- if mark_classes then
- local resources = data.resources
- local unicodes = resources.unicodes
- local markclasses = { }
- resources.markclasses = markclasses -- reversed
- for name, class in next, mark_classes do
- local t = { }
- for s in gmatch(class,"[^ ]+") do
- t[unicodes[s]] = true
- end
- markclasses[name] = t
- end
- end
-end
-
-actions["reorganize features"] = function(data,filename,raw) -- combine with other
- local features = { }
- data.resources.features = features
- for k, what in next, otf.glists do
- local dw = raw[what]
- if dw then
- local f = { }
- features[what] = f
- for i=1,#dw do
- local d= dw[i]
- local dfeatures = d.features
- if dfeatures then
- for i=1,#dfeatures do
- local df = dfeatures[i]
- local tag = strip(lower(df.tag))
- local ft = f[tag]
- if not ft then
- ft = { }
- f[tag] = ft
- end
- local dscripts = df.scripts
- for i=1,#dscripts do
- local d = dscripts[i]
- local languages = d.langs
- local script = strip(lower(d.script))
- local fts = ft[script] if not fts then fts = {} ft[script] = fts end
- for i=1,#languages do
- fts[strip(lower(languages[i]))] = true
- end
- end
- end
- end
- end
- end
- end
-end
-
-actions["reorganize anchor classes"] = function(data,filename,raw)
- local resources = data.resources
- local anchor_to_lookup = { }
- local lookup_to_anchor = { }
- resources.anchor_to_lookup = anchor_to_lookup
- resources.lookup_to_anchor = lookup_to_anchor
- local classes = raw.anchor_classes -- anchor classes not in final table
- if classes then
- for c=1,#classes do
- local class = classes[c]
- local anchor = class.name
- local lookups = class.lookup
- if type(lookups) ~= "table" then
- lookups = { lookups }
- end
- local a = anchor_to_lookup[anchor]
- if not a then
- a = { }
- anchor_to_lookup[anchor] = a
- end
- for l=1,#lookups do
- local lookup = lookups[l]
- local l = lookup_to_anchor[lookup]
- if l then
- l[anchor] = true
- else
- l = { [anchor] = true }
- lookup_to_anchor[lookup] = l
- end
- a[lookup] = true
- end
- end
- end
-end
-
-actions["prepare tounicode"] = function(data,filename,raw)
- fonts.mappings.addtounicode(data,filename)
-end
-
-local g_directions = {
- gsub_contextchain = 1,
- gpos_contextchain = 1,
- -- gsub_context = 1,
- -- gpos_context = 1,
- gsub_reversecontextchain = -1,
- gpos_reversecontextchain = -1,
-}
-
--- Research by Khaled Hosny has demonstrated that the font loader merges
--- regular and AAT features and that these can interfere (especially because
--- we dropped checking for valid features elsewhere. So, we just check for
--- the special flag and drop the feature if such a tag is found.
-
-local function supported(features)
- for i=1,#features do
- if features[i].ismac then
- return false
- end
- end
- return true
-end
-
-actions["reorganize subtables"] = function(data,filename,raw)
- local resources = data.resources
- local sequences = { }
- local lookups = { }
- local chainedfeatures = { }
- resources.sequences = sequences
- resources.lookups = lookups
- for _, what in next, otf.glists do
- local dw = raw[what]
- if dw then
- for k=1,#dw do
- local gk = dw[k]
- local features = gk.features
--- if features and supported(features) then
- if not features or supported(features) then -- not always features !
- local typ = gk.type
- local chain = g_directions[typ] or 0
- local subtables = gk.subtables
- if subtables then
- local t = { }
- for s=1,#subtables do
- t[s] = subtables[s].name
- end
- subtables = t
- end
- local flags, markclass = gk.flags, nil
- if flags then
- local t = { -- forcing false packs nicer
- (flags.ignorecombiningmarks and "mark") or false,
- (flags.ignoreligatures and "ligature") or false,
- (flags.ignorebaseglyphs and "base") or false,
- flags.r2l or false,
- }
- markclass = flags.mark_class
- if markclass then
- markclass = resources.markclasses[markclass]
- end
- flags = t
- end
- --
- local name = gk.name
- --
- if features then
- -- scripts, tag, ismac
- local f = { }
- for i=1,#features do
- local df = features[i]
- local tag = strip(lower(df.tag))
- local ft = f[tag] if not ft then ft = {} f[tag] = ft end
- local dscripts = df.scripts
- for i=1,#dscripts do
- local d = dscripts[i]
- local languages = d.langs
- local script = strip(lower(d.script))
- local fts = ft[script] if not fts then fts = {} ft[script] = fts end
- for i=1,#languages do
- fts[strip(lower(languages[i]))] = true
- end
- end
- end
- sequences[#sequences+1] = {
- type = typ,
- chain = chain,
- flags = flags,
- name = name,
- subtables = subtables,
- markclass = markclass,
- features = f,
- }
- else
- lookups[name] = {
- type = typ,
- chain = chain,
- flags = flags,
- subtables = subtables,
- markclass = markclass,
- }
- end
- end
- end
- end
- end
-end
-
--- test this:
---
--- for _, what in next, otf.glists do
--- raw[what] = nil
--- end
-
-actions["prepare lookups"] = function(data,filename,raw)
- local lookups = raw.lookups
- if lookups then
- data.lookups = lookups
- end
-end
-
--- The reverse handler does a bit redundant splitting but it's seldom
--- seen so we don' tbother too much. We could store the replacement
--- in the current list (value instead of true) but it makes other code
--- uglier. Maybe some day.
-
-local function t_uncover(splitter,cache,covers)
- local result = { }
- for n=1,#covers do
- local cover = covers[n]
- local uncovered = cache[cover]
- if not uncovered then
- uncovered = lpegmatch(splitter,cover)
- cache[cover] = uncovered
- end
- result[n] = uncovered
- end
- return result
-end
-
-local function t_hashed(t,cache)
- if t then
- local ht = { }
- for i=1,#t do
- local ti = t[i]
- local tih = cache[ti]
- if not tih then
- tih = { }
- for i=1,#ti do
- tih[ti[i]] = true
- end
- cache[ti] = tih
- end
- ht[i] = tih
- end
- return ht
- else
- return nil
- end
-end
-
-local function s_uncover(splitter,cache,cover)
- if cover == "" then
- return nil
- else
- local uncovered = cache[cover]
- if not uncovered then
- uncovered = lpegmatch(splitter,cover)
- for i=1,#uncovered do
- uncovered[i] = { [uncovered[i]] = true }
- end
- cache[cover] = uncovered
- end
- return uncovered
- end
-end
-
-local s_hashed = t_hashed
-
-local function r_uncover(splitter,cache,cover,replacements)
- if cover == "" then
- return nil
- else
- -- we always have current as { } even in the case of one
- local uncovered = cover[1]
- local replaced = cache[replacements]
- if not replaced then
- replaced = lpegmatch(splitter,replacements)
- cache[replacements] = replaced
- end
- local nu, nr = #uncovered, #replaced
- local r = { }
- if nu == nr then
- for i=1,nu do
- r[uncovered[i]] = replaced[i]
- end
- end
- return r
- end
-end
-
-actions["reorganize lookups"] = function(data,filename,raw)
- -- we prefer the before lookups in a normal order
- if data.lookups then
- local splitter = data.helpers.tounicodetable
- local cache, h_cache = { }, { }
- for _, lookup in next, data.lookups do
- local rules = lookup.rules
- if rules then
- local format = lookup.format
- if format == "class" then
- local before_class = lookup.before_class
- if before_class then
- before_class = t_uncover(splitter,cache,reversed(before_class))
- end
- local current_class = lookup.current_class
- if current_class then
- current_class = t_uncover(splitter,cache,current_class)
- end
- local after_class = lookup.after_class
- if after_class then
- after_class = t_uncover(splitter,cache,after_class)
- end
- for i=1,#rules do
- local rule = rules[i]
- local class = rule.class
- local before = class.before
- if before then
- for i=1,#before do
- before[i] = before_class[before[i]] or { }
- end
- rule.before = t_hashed(before,h_cache)
- end
- local current = class.current
- local lookups = rule.lookups
- if current then
- for i=1,#current do
- current[i] = current_class[current[i]] or { }
- if lookups and not lookups[i] then
- lookups[i] = false -- e.g. we can have two lookups and one replacement
- end
- end
- rule.current = t_hashed(current,h_cache)
- end
- local after = class.after
- if after then
- for i=1,#after do
- after[i] = after_class[after[i]] or { }
- end
- rule.after = t_hashed(after,h_cache)
- end
- rule.class = nil
- end
- lookup.before_class = nil
- lookup.current_class = nil
- lookup.after_class = nil
- lookup.format = "coverage"
- elseif format == "coverage" then
- for i=1,#rules do
- local rule = rules[i]
- local coverage = rule.coverage
- if coverage then
- local before = coverage.before
- if before then
- before = t_uncover(splitter,cache,reversed(before))
- rule.before = t_hashed(before,h_cache)
- end
- local current = coverage.current
- if current then
- current = t_uncover(splitter,cache,current)
- rule.current = t_hashed(current,h_cache)
- end
- local after = coverage.after
- if after then
- after = t_uncover(splitter,cache,after)
- rule.after = t_hashed(after,h_cache)
- end
- rule.coverage = nil
- end
- end
- elseif format == "reversecoverage" then -- special case, single substitution only
- for i=1,#rules do
- local rule = rules[i]
- local reversecoverage = rule.reversecoverage
- if reversecoverage then
- local before = reversecoverage.before
- if before then
- before = t_uncover(splitter,cache,reversed(before))
- rule.before = t_hashed(before,h_cache)
- end
- local current = reversecoverage.current
- if current then
- current = t_uncover(splitter,cache,current)
- rule.current = t_hashed(current,h_cache)
- end
- local after = reversecoverage.after
- if after then
- after = t_uncover(splitter,cache,after)
- rule.after = t_hashed(after,h_cache)
- end
- local replacements = reversecoverage.replacements
- if replacements then
- rule.replacements = r_uncover(splitter,cache,current,replacements)
- end
- rule.reversecoverage = nil
- end
- end
- elseif format == "glyphs" then
- for i=1,#rules do
- local rule = rules[i]
- local glyphs = rule.glyphs
- if glyphs then
- local fore = glyphs.fore
- if fore then
- fore = s_uncover(splitter,cache,fore)
- rule.before = s_hashed(fore,h_cache)
- end
- local back = glyphs.back
- if back then
- back = s_uncover(splitter,cache,back)
- rule.after = s_hashed(back,h_cache)
- end
- local names = glyphs.names
- if names then
- names = s_uncover(splitter,cache,names)
- rule.current = s_hashed(names,h_cache)
- end
- rule.glyphs = nil
- end
- end
- end
- end
- end
- end
-end
-
-local function check_variants(unicode,the_variants,splitter,unicodes)
- local variants = the_variants.variants
- if variants then -- use splitter
- local glyphs = lpegmatch(splitter,variants)
- local done = { [unicode] = true }
- local n = 0
- for i=1,#glyphs do
- local g = glyphs[i]
- if done[g] then
- report_otf("skipping cyclic reference U+%05X in math variant U+%05X",g,unicode)
- else
- if n == 0 then
- n = 1
- variants = { g }
- else
- n = n + 1
- variants[n] = g
- end
- done[g] = true
- end
- end
- if n == 0 then
- variants = nil
- end
- end
- local parts = the_variants.parts
- if parts then
- local p = #parts
- if p > 0 then
- for i=1,p do
- local pi = parts[i]
- pi.glyph = unicodes[pi.component] or 0
- pi.component = nil
- end
- else
- parts = nil
- end
- end
- local italic_correction = the_variants.italic_correction
- if italic_correction and italic_correction == 0 then
- italic_correction = nil
- end
- return variants, parts, italic_correction
-end
-
-actions["analyze math"] = function(data,filename,raw)
- if raw.math then
- data.metadata.math = raw.math
- local unicodes = data.resources.unicodes
- local splitter = data.helpers.tounicodetable
- for unicode, description in next, data.descriptions do
- local glyph = description.glyph
- local mathkerns = glyph.mathkern -- singular
- local horiz_variants = glyph.horiz_variants
- local vert_variants = glyph.vert_variants
- local top_accent = glyph.top_accent
- if mathkerns or horiz_variants or vert_variants or top_accent then
- local math = { }
- if top_accent then
- math.top_accent = top_accent
- end
- if mathkerns then
- for k, v in next, mathkerns do
- if not next(v) then
- mathkerns[k] = nil
- else
- for k, v in next, v do
- if v == 0 then
- k[v] = nil -- height / kern can be zero
- end
- end
- end
- end
- math.kerns = mathkerns
- end
- if horiz_variants then
- math.horiz_variants, math.horiz_parts, math.horiz_italic_correction = check_variants(unicode,horiz_variants,splitter,unicodes)
- end
- if vert_variants then
- math.vert_variants, math.vert_parts, math.vert_italic_correction = check_variants(unicode,vert_variants,splitter,unicodes)
- end
- local italic_correction = description.italic
- if italic_correction and italic_correction ~= 0 then
- math.italic_correction = italic_correction
- end
- description.math = math
- end
- end
- end
-end
-
-actions["reorganize glyph kerns"] = function(data,filename,raw)
- local descriptions = data.descriptions
- local resources = data.resources
- local unicodes = resources.unicodes
- for unicode, description in next, descriptions do
- local kerns = description.glyph.kerns
- if kerns then
- local newkerns = { }
- for k, kern in next, kerns do
- local name = kern.char
- local offset = kern.off
- local lookup = kern.lookup
- if name and offset and lookup then
- local unicode = unicodes[name]
- if unicode then
- if type(lookup) == "table" then
- for l=1,#lookup do
- local lookup = lookup[l]
- local lookupkerns = newkerns[lookup]
- if lookupkerns then
- lookupkerns[unicode] = offset
- else
- newkerns[lookup] = { [unicode] = offset }
- end
- end
- else
- local lookupkerns = newkerns[lookup]
- if lookupkerns then
- lookupkerns[unicode] = offset
- else
- newkerns[lookup] = { [unicode] = offset }
- end
- end
- elseif trace_loading then
- report_otf("problems with unicode %s of kern %s of glyph U+%05X",name,k,unicode)
- end
- end
- end
- description.kerns = newkerns
- end
- end
-end
-
-actions["merge kern classes"] = function(data,filename,raw)
- local gposlist = raw.gpos
- if gposlist then
- local descriptions = data.descriptions
- local resources = data.resources
- local unicodes = resources.unicodes
- local splitter = data.helpers.tounicodetable
- for gp=1,#gposlist do
- local gpos = gposlist[gp]
- local subtables = gpos.subtables
- if subtables then
- for s=1,#subtables do
- local subtable = subtables[s]
- local kernclass = subtable.kernclass -- name is inconsistent with anchor_classes
- if kernclass then -- the next one is quite slow
- local split = { } -- saves time
- for k=1,#kernclass do
- local kcl = kernclass[k]
- local firsts = kcl.firsts
- local seconds = kcl.seconds
- local offsets = kcl.offsets
- local lookups = kcl.lookup -- singular
- if type(lookups) ~= "table" then
- lookups = { lookups }
- end
- -- we can check the max in the loop
- -- local maxseconds = getn(seconds)
- for n, s in next, firsts do
- split[s] = split[s] or lpegmatch(splitter,s)
- end
- local maxseconds = 0
- for n, s in next, seconds do
- if n > maxseconds then
- maxseconds = n
- end
- split[s] = split[s] or lpegmatch(splitter,s)
- end
- for l=1,#lookups do
- local lookup = lookups[l]
- for fk=1,#firsts do -- maxfirsts ?
- local fv = firsts[fk]
- local splt = split[fv]
- if splt then
- local extrakerns = { }
- local baseoffset = (fk-1) * maxseconds
- -- for sk=2,maxseconds do
- -- local sv = seconds[sk]
- for sk, sv in next, seconds do
- local splt = split[sv]
- if splt then -- redundant test
- local offset = offsets[baseoffset + sk]
- if offset then
- for i=1,#splt do
- extrakerns[splt[i]] = offset
- end
- end
- end
- end
- for i=1,#splt do
- local first_unicode = splt[i]
- local description = descriptions[first_unicode]
- if description then
- local kerns = description.kerns
- if not kerns then
- kerns = { } -- unicode indexed !
- description.kerns = kerns
- end
- local lookupkerns = kerns[lookup]
- if not lookupkerns then
- lookupkerns = { }
- kerns[lookup] = lookupkerns
- end
- for second_unicode, kern in next, extrakerns do
- lookupkerns[second_unicode] = kern
- end
- elseif trace_loading then
- report_otf("no glyph data for U+%05X", first_unicode)
- end
- end
- end
- end
- end
- end
- subtable.kernclass = { }
- end
- end
- end
- end
- end
-end
-
-actions["check glyphs"] = function(data,filename,raw)
- for unicode, description in next, data.descriptions do
- description.glyph = nil
- end
-end
-
--- future versions will remove _
-
-actions["check metadata"] = function(data,filename,raw)
- local metadata = data.metadata
- for _, k in next, mainfields do
- if valid_fields[k] then
- local v = raw[k]
- if not metadata[k] then
- metadata[k] = v
- end
- end
- end
- -- metadata.pfminfo = raw.pfminfo -- not already done?
- local ttftables = metadata.ttf_tables
- if ttftables then
- for i=1,#ttftables do
- ttftables[i].data = "deleted"
- end
- end
-end
-
-actions["cleanup tables"] = function(data,filename,raw)
- data.resources.indices = nil -- not needed
- data.helpers = nil
-end
-
--- kern: ttf has a table with kerns
---
--- Weird, as maxfirst and maxseconds can have holes, first seems to be indexed, but
--- seconds can start at 2 .. this need to be fixed as getn as well as # are sort of
--- unpredictable alternatively we could force an [1] if not set (maybe I will do that
--- anyway).
-
--- we can share { } as it is never set
-
---- ligatures have an extra specification.char entry that we don't use
-
-actions["reorganize glyph lookups"] = function(data,filename,raw)
- local resources = data.resources
- local unicodes = resources.unicodes
- local descriptions = data.descriptions
- local splitter = data.helpers.tounicodelist
-
- local lookuptypes = resources.lookuptypes
-
- for unicode, description in next, descriptions do
- local lookups = description.glyph.lookups
- if lookups then
- for tag, lookuplist in next, lookups do
- for l=1,#lookuplist do
- local lookup = lookuplist[l]
- local specification = lookup.specification
- local lookuptype = lookup.type
- local lt = lookuptypes[tag]
- if not lt then
- lookuptypes[tag] = lookuptype
- elseif lt ~= lookuptype then
- report_otf("conflicting lookuptypes: %s => %s and %s",tag,lt,lookuptype)
- end
- if lookuptype == "ligature" then
- lookuplist[l] = { lpegmatch(splitter,specification.components) }
- elseif lookuptype == "alternate" then
- lookuplist[l] = { lpegmatch(splitter,specification.components) }
- elseif lookuptype == "substitution" then
- lookuplist[l] = unicodes[specification.variant]
- elseif lookuptype == "multiple" then
- lookuplist[l] = { lpegmatch(splitter,specification.components) }
- elseif lookuptype == "position" then
- lookuplist[l] = {
- specification.x or 0,
- specification.y or 0,
- specification.h or 0,
- specification.v or 0
- }
- elseif lookuptype == "pair" then
- local one = specification.offsets[1]
- local two = specification.offsets[2]
- local paired = unicodes[specification.paired]
- if one then
- if two then
- lookuplist[l] = { paired, { one.x or 0, one.y or 0, one.h or 0, one.v or 0 }, { two.x or 0, two.y or 0, two.h or 0, two.v or 0 } }
- else
- lookuplist[l] = { paired, { one.x or 0, one.y or 0, one.h or 0, one.v or 0 } }
- end
- else
- if two then
- lookuplist[l] = { paired, { }, { two.x or 0, two.y or 0, two.h or 0, two.v or 0} } -- maybe nil instead of { }
- else
- lookuplist[l] = { paired }
- end
- end
- end
- end
- end
- local slookups, mlookups
- for tag, lookuplist in next, lookups do
- if #lookuplist == 1 then
- if slookups then
- slookups[tag] = lookuplist[1]
- else
- slookups = { [tag] = lookuplist[1] }
- end
- else
- if mlookups then
- mlookups[tag] = lookuplist
- else
- mlookups = { [tag] = lookuplist }
- end
- end
- end
- if slookups then
- description.slookups = slookups
- end
- if mlookups then
- description.mlookups = mlookups
- end
- end
- end
-
-end
-
-actions["reorganize glyph anchors"] = function(data,filename,raw) -- when we replace inplace we safe entries
- local descriptions = data.descriptions
- for unicode, description in next, descriptions do
- local anchors = description.glyph.anchors
- if anchors then
- for class, data in next, anchors do
- if class == "baselig" then
- for tag, specification in next, data do
- for i=1,#specification do
- local si = specification[i]
- specification[i] = { si.x or 0, si.y or 0 }
- end
- end
- else
- for tag, specification in next, data do
- data[tag] = { specification.x or 0, specification.y or 0 }
- end
- end
- end
- description.anchors = anchors
- end
- end
-end
-
--- modes: node, base, none
-
-function otf.setfeatures(tfmdata,features)
- local okay = constructors.initializefeatures("otf",tfmdata,features,trace_features,report_otf)
- if okay then
- return constructors.collectprocessors("otf",tfmdata,features,trace_features,report_otf)
- else
- return { } -- will become false
- end
-end
-
--- the first version made a top/mid/not extensible table, now we just
--- pass on the variants data and deal with it in the tfm scaler (there
--- is no longer an extensible table anyway)
---
--- we cannot share descriptions as virtual fonts might extend them (ok,
--- we could use a cache with a hash
---
--- we already assing an empty tabel to characters as we can add for
--- instance protruding info and loop over characters; one is not supposed
--- to change descriptions and if one does so one should make a copy!
-
-local function copytotfm(data,cache_id)
- if data then
- local metadata = data.metadata
- local resources = data.resources
- local properties = derivetable(data.properties)
- local descriptions = derivetable(data.descriptions)
- local goodies = derivetable(data.goodies)
- local characters = { }
- local parameters = { }
- local mathparameters = { }
- --
- local pfminfo = metadata.pfminfo or { }
- local resources = data.resources
- local unicodes = resources.unicodes
- -- local mode = data.mode or "base"
- local spaceunits = 500
- local spacer = "space"
- local designsize = metadata.designsize or metadata.design_size or 100
- local mathspecs = metadata.math
- --
- if designsize == 0 then
- designsize = 100
- end
- if mathspecs then
- for name, value in next, mathspecs do
- mathparameters[name] = value
- end
- end
- for unicode, _ in next, data.descriptions do -- use parent table
- characters[unicode] = { }
- end
- if mathspecs then
- -- we could move this to the scaler but not that much is saved
- -- and this is cleaner
- for unicode, character in next, characters do
- local d = descriptions[unicode]
- local m = d.math
- if m then
- -- watch out: luatex uses horiz_variants for the parts
- local variants = m.horiz_variants
- local parts = m.horiz_parts
- -- local done = { [unicode] = true }
- if variants then
- local c = character
- for i=1,#variants do
- local un = variants[i]
- -- if done[un] then
- -- -- report_otf("skipping cyclic reference U+%05X in math variant U+%05X",un,unicode)
- -- else
- c.next = un
- c = characters[un]
- -- done[un] = true
- -- end
- end -- c is now last in chain
- c.horiz_variants = parts
- elseif parts then
- character.horiz_variants = parts
- end
- local variants = m.vert_variants
- local parts = m.vert_parts
- -- local done = { [unicode] = true }
- if variants then
- local c = character
- for i=1,#variants do
- local un = variants[i]
- -- if done[un] then
- -- -- report_otf("skipping cyclic reference U+%05X in math variant U+%05X",un,unicode)
- -- else
- c.next = un
- c = characters[un]
- -- done[un] = true
- -- end
- end -- c is now last in chain
- c.vert_variants = parts
- elseif parts then
- character.vert_variants = parts
- end
- local italic_correction = m.vert_italic_correction
- if italic_correction then
- character.vert_italic_correction = italic_correction -- was c.
- end
- local top_accent = m.top_accent
- if top_accent then
- character.top_accent = top_accent
- end
- local kerns = m.kerns
- if kerns then
- character.mathkerns = kerns
- end
- end
- end
- end
- -- end math
- local monospaced = metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose.proportion == "Monospaced")
- local charwidth = pfminfo.avgwidth -- or unset
- local italicangle = metadata.italicangle
- local charxheight = pfminfo.os2_xheight and pfminfo.os2_xheight > 0 and pfminfo.os2_xheight
- properties.monospaced = monospaced
- parameters.italicangle = italicangle
- parameters.charwidth = charwidth
- parameters.charxheight = charxheight
- --
- local space = 0x0020 -- unicodes['space'], unicodes['emdash']
- local emdash = 0x2014 -- unicodes['space'], unicodes['emdash']
- if monospaced then
- if descriptions[space] then
- spaceunits, spacer = descriptions[space].width, "space"
- end
- if not spaceunits and descriptions[emdash] then
- spaceunits, spacer = descriptions[emdash].width, "emdash"
- end
- if not spaceunits and charwidth then
- spaceunits, spacer = charwidth, "charwidth"
- end
- else
- if descriptions[space] then
- spaceunits, spacer = descriptions[space].width, "space"
- end
- if not spaceunits and descriptions[emdash] then
- spaceunits, spacer = descriptions[emdash].width/2, "emdash/2"
- end
- if not spaceunits and charwidth then
- spaceunits, spacer = charwidth, "charwidth"
- end
- end
- spaceunits = tonumber(spaceunits) or 500 -- brrr
- -- we need a runtime lookup because of running from cdrom or zip, brrr (shouldn't we use the basename then?)
- local filename = constructors.checkedfilename(resources)
- local fontname = metadata.fontname
- local fullname = metadata.fullname or fontname
- local units = metadata.units_per_em or 1000
- --
- if units == 0 then -- catch bugs in fonts
- units = 1000
- metadata.units_per_em = 1000
- end
- --
- parameters.slant = 0
- parameters.space = spaceunits -- 3.333 (cmr10)
- parameters.space_stretch = units/2 -- 500 -- 1.666 (cmr10)
- parameters.space_shrink = 1*units/3 -- 333 -- 1.111 (cmr10)
- parameters.x_height = 2*units/5 -- 400
- parameters.quad = units -- 1000
- if spaceunits < 2*units/5 then
- -- todo: warning
- end
- if italicangle then
- parameters.italicangle = italicangle
- parameters.italicfactor = math.cos(math.rad(90+italicangle))
- parameters.slant = - math.round(math.tan(italicangle*math.pi/180))
- end
- if monospaced then
- parameters.space_stretch = 0
- parameters.space_shrink = 0
- elseif syncspace then --
- parameters.space_stretch = spaceunits/2
- parameters.space_shrink = spaceunits/3
- end
- parameters.extra_space = parameters.space_shrink -- 1.111 (cmr10)
- if charxheight then
- parameters.x_height = charxheight
- else
- local x = 0x78 -- unicodes['x']
- if x then
- local x = descriptions[x]
- if x then
- parameters.x_height = x.height
- end
- end
- end
- --
- parameters.designsize = (designsize/10)*65536
- parameters.ascender = abs(metadata.ascent or 0)
- parameters.descender = abs(metadata.descent or 0)
- parameters.units = units
- --
- properties.space = spacer
- properties.encodingbytes = 2
- properties.format = data.format or fonts.formats[filename] or "opentype"
- properties.noglyphnames = true
- properties.filename = filename
- properties.fontname = fontname
- properties.fullname = fullname
- properties.psname = fontname or fullname
- properties.name = filename or fullname
- --
- -- properties.name = specification.name
- -- properties.sub = specification.sub
- return {
- characters = characters,
- descriptions = descriptions,
- parameters = parameters,
- mathparameters = mathparameters,
- resources = resources,
- properties = properties,
- goodies = goodies,
- }
- end
-end
-
-local function otftotfm(specification)
- local cache_id = specification.hash
- local tfmdata = containers.read(constructors.cache,cache_id)
- if not tfmdata then
- local name = specification.name
- local sub = specification.sub
- local filename = specification.filename
- local format = specification.format
- local features = specification.features.normal
- local rawdata = otf.load(filename,format,sub,features and features.featurefile)
- if rawdata and next(rawdata) then
- rawdata.lookuphash = { }
- tfmdata = copytotfm(rawdata,cache_id)
- if tfmdata and next(tfmdata) then
- -- at this moment no characters are assigned yet, only empty slots
- local features = constructors.checkedfeatures("otf",features)
- local shared = tfmdata.shared
- if not shared then
- shared = { }
- tfmdata.shared = shared
- end
- shared.rawdata = rawdata
- -- shared.features = features -- default
- shared.dynamics = { }
- -- shared.processes = { }
- tfmdata.changed = { }
- shared.features = features
- shared.processes = otf.setfeatures(tfmdata,features)
- end
- end
- containers.write(constructors.cache,cache_id,tfmdata)
- end
- return tfmdata
-end
-
-local function read_from_otf(specification)
- local tfmdata = otftotfm(specification)
- if tfmdata then
- -- this late ? .. needs checking
- tfmdata.properties.name = specification.name
- tfmdata.properties.sub = specification.sub
- --
- tfmdata = constructors.scale(tfmdata,specification)
- local allfeatures = tfmdata.shared.features or specification.features.normal
- constructors.applymanipulators("otf",tfmdata,allfeatures,trace_features,report_otf)
- constructors.setname(tfmdata,specification) -- only otf?
- fonts.loggers.register(tfmdata,file.extname(specification.filename),specification)
- end
- return tfmdata
-end
-
-local function checkmathsize(tfmdata,mathsize)
- local mathdata = tfmdata.shared.rawdata.metadata.math
- local mathsize = tonumber(mathsize)
- if mathdata then -- we cannot use mathparameters as luatex will complain
- local parameters = tfmdata.parameters
- parameters.scriptpercentage = mathdata.ScriptPercentScaleDown
- parameters.scriptscriptpercentage = mathdata.ScriptScriptPercentScaleDown
- parameters.mathsize = mathsize
- end
-end
-
-registerotffeature {
- name = "mathsize",
- description = "apply mathsize as specified in the font",
- initializers = {
- base = checkmathsize,
- node = checkmathsize,
- }
-}
-
--- helpers
-
-function otf.collectlookups(rawdata,kind,script,language)
- local sequences = rawdata.resources.sequences
- if sequences then
- local featuremap, featurelist = { }, { }
- for s=1,#sequences do
- local sequence = sequences[s]
- local features = sequence.features
- features = features and features[kind]
- features = features and (features[script] or features[default] or features[wildcard])
- features = features and (features[language] or features[default] or features[wildcard])
- if features then
- local subtables = sequence.subtables
- if subtables then
- for s=1,#subtables do
- local ss = subtables[s]
- if not featuremap[s] then
- featuremap[ss] = true
- featurelist[#featurelist+1] = ss
- end
- end
- end
- end
- end
- if #featurelist > 0 then
- return featuremap, featurelist
- end
- end
- return nil, nil
-end
-
--- readers
-
-local function check_otf(forced,specification,suffix,what)
- local name = specification.name
- if forced then
- name = file.addsuffix(name,suffix,true)
- end
- local fullname = findbinfile(name,suffix) or ""
- if fullname == "" then
- fullname = fonts.names.getfilename(name,suffix) or ""
- end
- if fullname ~= "" then
- specification.filename = fullname
- specification.format = what
- return read_from_otf(specification)
- end
-end
-
-local function opentypereader(specification,suffix,what)
- local forced = specification.forced or ""
- if forced == "otf" then
- return check_otf(true,specification,forced,"opentype")
- elseif forced == "ttf" or forced == "ttc" or forced == "dfont" then
- return check_otf(true,specification,forced,"truetype")
- else
- return check_otf(false,specification,suffix,what)
- end
-end
-
-readers.opentype = opentypereader
-
-local formats = fonts.formats
-
-formats.otf = "opentype"
-formats.ttf = "truetype"
-formats.ttc = "truetype"
-formats.dfont = "truetype"
-
-function readers.otf (specification) return opentypereader(specification,"otf",formats.otf ) end
-function readers.ttf (specification) return opentypereader(specification,"ttf",formats.ttf ) end
-function readers.ttc (specification) return opentypereader(specification,"ttf",formats.ttc ) end
-function readers.dfont(specification) return opentypereader(specification,"ttf",formats.dfont) end
-
--- this will be overloaded
-
-function otf.scriptandlanguage(tfmdata,attr)
- local properties = tfmdata.properties
- return properties.script or "dflt", properties.language or "dflt"
-end
diff --git a/otfl-font-oti.lua b/otfl-font-oti.lua
deleted file mode 100644
index d6853db..0000000
--- a/otfl-font-oti.lua
+++ /dev/null
@@ -1,92 +0,0 @@
-if not modules then modules = { } end modules ['font-oti'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local lower = string.lower
-
-local allocate = utilities.storage.allocate
-
-local fonts = fonts
-local otf = { }
-fonts.handlers.otf = otf
-
-local otffeatures = fonts.constructors.newfeatures("otf")
-local registerotffeature = otffeatures.register
-
-registerotffeature {
- name = "features",
- description = "initialization of feature handler",
- default = true,
-}
-
--- these are later hooked into node and base initializaters
-
-local otftables = otf.tables -- not always defined
-
-local function setmode(tfmdata,value)
- if value then
- tfmdata.properties.mode = lower(value)
- end
-end
-
-local function setlanguage(tfmdata,value)
- if value then
- local cleanvalue = lower(value)
- local languages = otftables and otftables.languages
- local properties = tfmdata.properties
- if not languages then
- properties.language = cleanvalue
- elseif languages[value] then
- properties.language = cleanvalue
- else
- properties.language = "dflt"
- end
- end
-end
-
-local function setscript(tfmdata,value)
- if value then
- local cleanvalue = lower(value)
- local scripts = otftables and otftables.scripts
- local properties = tfmdata.properties
- if not scripts then
- properties.script = cleanvalue
- elseif scripts[value] then
- properties.script = cleanvalue
- else
- properties.script = "dflt"
- end
- end
-end
-
-registerotffeature {
- name = "mode",
- description = "mode",
- initializers = {
- base = setmode,
- node = setmode,
- }
-}
-
-registerotffeature {
- name = "language",
- description = "language",
- initializers = {
- base = setlanguage,
- node = setlanguage,
- }
-}
-
-registerotffeature {
- name = "script",
- description = "script",
- initializers = {
- base = setscript,
- node = setscript,
- }
-}
-
diff --git a/otfl-font-otn.lua b/otfl-font-otn.lua
deleted file mode 100644
index 8e67597..0000000
--- a/otfl-font-otn.lua
+++ /dev/null
@@ -1,2712 +0,0 @@
-if not modules then modules = { } end modules ['font-otn'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- this is still somewhat preliminary and it will get better in due time;
--- much functionality could only be implemented thanks to the husayni font
--- of Idris Samawi Hamid to who we dedicate this module.
-
--- in retrospect it always looks easy but believe it or not, it took a lot
--- of work to get proper open type support done: buggy fonts, fuzzy specs,
--- special made testfonts, many skype sessions between taco, idris and me,
--- torture tests etc etc ... unfortunately the code does not show how much
--- time it took ...
-
--- todo:
---
--- kerning is probably not yet ok for latin around dics nodes
--- extension infrastructure (for usage out of context)
--- sorting features according to vendors/renderers
--- alternative loop quitters
--- check cursive and r2l
--- find out where ignore-mark-classes went
--- default features (per language, script)
--- handle positions (we need example fonts)
--- handle gpos_single (we might want an extra width field in glyph nodes because adding kerns might interfere)
--- mark (to mark) code is still not what it should be (too messy but we need some more extreem husayni tests)
-
---[[ldx--
-<p>This module is a bit more split up that I'd like but since we also want to test
-with plain <l n='tex'/> it has to be so. This module is part of <l n='context'/>
-and discussion about improvements and functionality mostly happens on the
-<l n='context'/> mailing list.</p>
-
-<p>The specification of OpenType is kind of vague. Apart from a lack of a proper
-free specifications there's also the problem that Microsoft and Adobe
-may have their own interpretation of how and in what order to apply features.
-In general the Microsoft website has more detailed specifications and is a
-better reference. There is also some information in the FontForge help files.</p>
-
-<p>Because there is so much possible, fonts might contain bugs and/or be made to
-work with certain rederers. These may evolve over time which may have the side
-effect that suddenly fonts behave differently.</p>
-
-<p>After a lot of experiments (mostly by Taco, me and Idris) we're now at yet another
-implementation. Of course all errors are mine and of course the code can be
-improved. There are quite some optimizations going on here and processing speed
-is currently acceptable. Not all functions are implemented yet, often because I
-lack the fonts for testing. Many scripts are not yet supported either, but I will
-look into them as soon as <l n='context'/> users ask for it.</p>
-
-<p>Because there are different interpretations possible, I will extend the code
-with more (configureable) variants. I can also add hooks for users so that they can
-write their own extensions.</p>
-
-<p>Glyphs are indexed not by unicode but in their own way. This is because there is no
-relationship with unicode at all, apart from the fact that a font might cover certain
-ranges of characters. One character can have multiple shapes. However, at the
-<l n='tex'/> end we use unicode so and all extra glyphs are mapped into a private
-space. This is needed because we need to access them and <l n='tex'/> has to include
-then in the output eventually.</p>
-
-<p>The raw table as it coms from <l n='fontforge'/> gets reorganized in to fit out needs.
-In <l n='context'/> that table is packed (similar tables are shared) and cached on disk
-so that successive runs can use the optimized table (after loading the table is
-unpacked). The flattening code used later is a prelude to an even more compact table
-format (and as such it keeps evolving).</p>
-
-<p>This module is sparsely documented because it is a moving target. The table format
-of the reader changes and we experiment a lot with different methods for supporting
-features.</p>
-
-<p>As with the <l n='afm'/> code, we may decide to store more information in the
-<l n='otf'/> table.</p>
-
-<p>Incrementing the version number will force a re-cache. We jump the number by one
-when there's a fix in the <l n='fontforge'/> library or <l n='lua'/> code that
-results in different tables.</p>
---ldx]]--
-
--- action handler chainproc chainmore comment
---
--- gsub_single ok ok ok
--- gsub_multiple ok ok not implemented yet
--- gsub_alternate ok ok not implemented yet
--- gsub_ligature ok ok ok
--- gsub_context ok --
--- gsub_contextchain ok --
--- gsub_reversecontextchain ok --
--- chainsub -- ok
--- reversesub -- ok
--- gpos_mark2base ok ok
--- gpos_mark2ligature ok ok
--- gpos_mark2mark ok ok
--- gpos_cursive ok untested
--- gpos_single ok ok
--- gpos_pair ok ok
--- gpos_context ok --
--- gpos_contextchain ok --
---
--- todo: contextpos and contextsub and class stuff
---
--- actions:
---
--- handler : actions triggered by lookup
--- chainproc : actions triggered by contextual lookup
--- chainmore : multiple substitutions triggered by contextual lookup (e.g. fij -> f + ij)
---
--- remark: the 'not implemented yet' variants will be done when we have fonts that use them
--- remark: we need to check what to do with discretionaries
-
--- We used to have independent hashes for lookups but as the tags are unique
--- we now use only one hash. If needed we can have multiple again but in that
--- case I will probably prefix (i.e. rename) the lookups in the cached font file.
-
-local concat, insert, remove = table.concat, table.insert, table.remove
-local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
-local type, next, tonumber, tostring = type, next, tonumber, tostring
-local lpegmatch = lpeg.match
-local random = math.random
-
-local logs, trackers, nodes, attributes = logs, trackers, nodes, attributes
-
-local registertracker = trackers.register
-
-local fonts = fonts
-local otf = fonts.handlers.otf
-
-local trace_lookups = false registertracker("otf.lookups", function(v) trace_lookups = v end)
-local trace_singles = false registertracker("otf.singles", function(v) trace_singles = v end)
-local trace_multiples = false registertracker("otf.multiples", function(v) trace_multiples = v end)
-local trace_alternatives = false registertracker("otf.alternatives", function(v) trace_alternatives = v end)
-local trace_ligatures = false registertracker("otf.ligatures", function(v) trace_ligatures = v end)
-local trace_contexts = false registertracker("otf.contexts", function(v) trace_contexts = v end)
-local trace_marks = false registertracker("otf.marks", function(v) trace_marks = v end)
-local trace_kerns = false registertracker("otf.kerns", function(v) trace_kerns = v end)
-local trace_cursive = false registertracker("otf.cursive", function(v) trace_cursive = v end)
-local trace_preparing = false registertracker("otf.preparing", function(v) trace_preparing = v end)
-local trace_bugs = false registertracker("otf.bugs", function(v) trace_bugs = v end)
-local trace_details = false registertracker("otf.details", function(v) trace_details = v end)
-local trace_applied = false registertracker("otf.applied", function(v) trace_applied = v end)
-local trace_steps = false registertracker("otf.steps", function(v) trace_steps = v end)
-local trace_skips = false registertracker("otf.skips", function(v) trace_skips = v end)
-local trace_directions = false registertracker("otf.directions", function(v) trace_directions = v end)
-
-local report_direct = logs.reporter("fonts","otf direct")
-local report_subchain = logs.reporter("fonts","otf subchain")
-local report_chain = logs.reporter("fonts","otf chain")
-local report_process = logs.reporter("fonts","otf process")
-local report_prepare = logs.reporter("fonts","otf prepare")
-
-registertracker("otf.verbose_chain", function(v) otf.setcontextchain(v and "verbose") end)
-registertracker("otf.normal_chain", function(v) otf.setcontextchain(v and "normal") end)
-
-registertracker("otf.replacements", "otf.singles,otf.multiples,otf.alternatives,otf.ligatures")
-registertracker("otf.positions","otf.marks,otf.kerns,otf.cursive")
-registertracker("otf.actions","otf.replacements,otf.positions")
-registertracker("otf.injections","nodes.injections")
-
-registertracker("*otf.sample","otf.steps,otf.actions,otf.analyzing")
-
-local insert_node_after = node.insert_after
-local delete_node = nodes.delete
-local copy_node = node.copy
-local find_node_tail = node.tail or node.slide
-local set_attribute = node.set_attribute
-local has_attribute = node.has_attribute
-local flush_node_list = node.flush_list
-
-local setmetatableindex = table.setmetatableindex
-
-local zwnj = 0x200C
-local zwj = 0x200D
-local wildcard = "*"
-local default = "dflt"
-
-local nodecodes = nodes.nodecodes
-local whatcodes = nodes.whatcodes
-local glyphcodes = nodes.glyphcodes
-
-local glyph_code = nodecodes.glyph
-local glue_code = nodecodes.glue
-local disc_code = nodecodes.disc
-local whatsit_code = nodecodes.whatsit
-
-local dir_code = whatcodes.dir
-local localpar_code = whatcodes.localpar
-
-local ligature_code = glyphcodes.ligature
-
-local privateattribute = attributes.private
-
--- Something is messed up: we have two mark / ligature indices, one at the injection
--- end and one here ... this is bases in KE's patches but there is something fishy
--- there as I'm pretty sure that for husayni we need some connection (as it's much
--- more complex than an average font) but I need proper examples of all cases, not
--- of only some.
-
-local state = privateattribute('state')
-local markbase = privateattribute('markbase')
-local markmark = privateattribute('markmark')
-local markdone = privateattribute('markdone') -- assigned at the injection end
-local cursbase = privateattribute('cursbase')
-local curscurs = privateattribute('curscurs')
-local cursdone = privateattribute('cursdone')
-local kernpair = privateattribute('kernpair')
-local ligacomp = privateattribute('ligacomp') -- assigned here (ideally it should be combined)
-
-local injections = nodes.injections
-local setmark = injections.setmark
-local setcursive = injections.setcursive
-local setkern = injections.setkern
-local setpair = injections.setpair
-
-local markonce = true
-local cursonce = true
-local kernonce = true
-
-local fonthashes = fonts.hashes
-local fontdata = fonthashes.identifiers
-
-local otffeatures = fonts.constructors.newfeatures("otf")
-local registerotffeature = otffeatures.register
-
-local onetimemessage = fonts.loggers.onetimemessage
-
-otf.defaultnodealternate = "none" -- first last
-
--- we share some vars here, after all, we have no nested lookups and
--- less code
-
-local tfmdata = false
-local characters = false
-local descriptions = false
-local resources = false
-local marks = false
-local currentfont = false
-local lookuptable = false
-local anchorlookups = false
-local lookuptypes = false
-local handlers = { }
-local rlmode = 0
-local featurevalue = false
-
--- we cannot optimize with "start = first_glyph(head)" because then we don't
--- know which rlmode we're in which messes up cursive handling later on
---
--- head is always a whatsit so we can safely assume that head is not changed
-
--- we use this for special testing and documentation
-
-local checkstep = (nodes and nodes.tracers and nodes.tracers.steppers.check) or function() end
-local registerstep = (nodes and nodes.tracers and nodes.tracers.steppers.register) or function() end
-local registermessage = (nodes and nodes.tracers and nodes.tracers.steppers.message) or function() end
-
-local function logprocess(...)
- if trace_steps then
- registermessage(...)
- end
- report_direct(...)
-end
-
-local function logwarning(...)
- report_direct(...)
-end
-
-local function gref(n)
- if type(n) == "number" then
- local description = descriptions[n]
- local name = description and description.name
- if name then
- return format("U+%05X (%s)",n,name)
- else
- return format("U+%05X",n)
- end
- elseif not n then
- return "<error in tracing>"
- else
- local num, nam = { }, { }
- for i=1,#n do
- local ni = n[i]
- if tonumber(ni) then -- later we will start at 2
- local di = descriptions[ni]
- num[i] = format("U+%05X",ni)
- nam[i] = di and di.name or "?"
- end
- end
- return format("%s (%s)",concat(num," "), concat(nam," "))
- end
-end
-
-local function cref(kind,chainname,chainlookupname,lookupname,index)
- if index then
- return format("feature %s, chain %s, sub %s, lookup %s, index %s",kind,chainname,chainlookupname,lookupname,index)
- elseif lookupname then
- return format("feature %s, chain %s, sub %s, lookup %s",kind,chainname or "?",chainlookupname or "?",lookupname)
- elseif chainlookupname then
- return format("feature %s, chain %s, sub %s",kind,chainname or "?",chainlookupname)
- elseif chainname then
- return format("feature %s, chain %s",kind,chainname)
- else
- return format("feature %s",kind)
- end
-end
-
-local function pref(kind,lookupname)
- return format("feature %s, lookup %s",kind,lookupname)
-end
-
--- we can assume that languages that use marks are not hyphenated
--- we can also assume that at most one discretionary is present
-
-local function markstoligature(kind,lookupname,start,stop,char)
- local n = copy_node(start)
- local keep = start
- local current
- current, start = insert_node_after(start,start,n)
- local snext = stop.next
- current.next = snext
- if snext then
- snext.prev = current
- end
- start.prev, stop.next = nil, nil
- current.char, current.subtype, current.components = char, ligature_code, start
- return keep
-end
-
-local function toligature(kind,lookupname,start,stop,char,markflag,discfound) -- brr head
- if start == stop then
- start.char = char
- return start
- elseif discfound then
- -- print("start->stop",nodes.tosequence(start,stop))
- local components = start.components
- if components then
- flush_node_list(components)
- start.components = nil
- end
- local lignode = copy_node(start)
- lignode.font = start.font
- lignode.char = char
- lignode.subtype = ligature_code
- local next = stop.next
- local prev = start.prev
- stop.next = nil
- start.prev = nil
- lignode.components = start
- -- print("lignode",nodes.tosequence(lignode))
- -- print("components",nodes.tosequence(lignode.components))
- prev.next = lignode
- if next then
- next.prev = lignode
- end
- lignode.next = next
- lignode.prev = prev
- -- print("start->end",nodes.tosequence(start))
- return lignode
- else
- -- start is the ligature
- local deletemarks = markflag ~= "mark"
- local n = copy_node(start)
- local current
- current, start = insert_node_after(start,start,n)
- local snext = stop.next
- current.next = snext
- if snext then
- snext.prev = current
- end
- start.prev = nil
- stop.next = nil
- current.char = char
- current.subtype = ligature_code
- current.components = start
- local head = current
- -- this is messy ... we should get rid of the components eventually
- local i = 0 -- is index of base
- while start do
- if not marks[start.char] then
- i = i + 1
- elseif not deletemarks then -- quite fishy
- set_attribute(start,ligacomp,i)
- if trace_marks then
- logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(start.char),i)
- end
- head, current = insert_node_after(head,current,copy_node(start))
- end
- start = start.next
- end
- start = current.next
- while start and start.id == glyph_code do
- if marks[start.char] then
- set_attribute(start,ligacomp,i)
- if trace_marks then
- logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(start.char),i)
- end
- else
- break
- end
- start = start.next
- end
- --
- -- we do need components in funny kerning mode but maybe I can better reconstruct then
- -- as we do have the font components info available; removing components makes the
- -- previous code much simpler
- --
- -- flush_node_list(head.components)
- return head
- end
-end
-
-function handlers.gsub_single(start,kind,lookupname,replacement)
- if trace_singles then
- logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(start.char),gref(replacement))
- end
- start.char = replacement
- return start, true
-end
-
-local function get_alternative_glyph(start,alternatives,value)
- -- needs checking: (global value, brrr)
- local choice = nil
- local n = #alternatives
- local char = start.char
- --
- if value == "random" then
- local r = random(1,n)
- value, choice = format("random, choice %s",r), alternatives[r]
- elseif value == "first" then
- value, choice = format("first, choice %s",1), alternatives[1]
- elseif value == "last" then
- value, choice = format("last, choice %s",n), alternatives[n]
- else
- value = tonumber(value)
- if type(value) ~= "number" then
- value, choice = "default, choice 1", alternatives[1]
- elseif value > n then
- local defaultalt = otf.defaultnodealternate
- if defaultalt == "first" then
- value, choice = format("no %s variants, taking %s",value,n), alternatives[n]
- elseif defaultalt == "last" then
- value, choice = format("no %s variants, taking %s",value,1), alternatives[1]
- else
- value, choice = format("no %s variants, ignoring",value), false
- end
- elseif value == 0 then
- value, choice = format("choice %s (no change)",value), char
- elseif value < 1 then
- value, choice = format("no %s variants, taking %s",value,1), alternatives[1]
- else
- value, choice = format("choice %s",value), alternatives[value]
- end
- end
- return choice
-end
-
-local function multiple_glyphs(start,multiple) -- marks ?
- local nofmultiples = #multiple
- if nofmultiples > 0 then
- start.char = multiple[1]
- if nofmultiples > 1 then
- local sn = start.next
- for k=2,nofmultiples do -- todo: use insert_node
- local n = copy_node(start)
- n.char = multiple[k]
- n.next = sn
- n.prev = start
- if sn then
- sn.prev = n
- end
- start.next = n
- start = n
- end
- end
- return start, true
- else
- if trace_multiples then
- logprocess("no multiple for %s",gref(start.char))
- end
- return start, false
- end
-end
-
-function handlers.gsub_alternate(start,kind,lookupname,alternative,sequence)
- local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue
- local choice = get_alternative_glyph(start,alternative,value)
- if choice then
- if trace_alternatives then
- logprocess("%s: replacing %s by alternative %s (%s)",pref(kind,lookupname),gref(char),gref(choice),choice)
- end
- start.char = choice
- else
- if trace_alternatives then
- logwarning("%s: no variant %s for %s",pref(kind,lookupname),tostring(value),gref(char))
- end
- end
- return start, true
-end
-
-function handlers.gsub_multiple(start,kind,lookupname,multiple)
- if trace_multiples then
- logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(start.char),gref(multiple))
- end
- return multiple_glyphs(start,multiple)
-end
-
-function handlers.gsub_ligature(start,kind,lookupname,ligature,sequence)
- local s, stop, discfound = start.next, nil, false
- local startchar = start.char
- if marks[startchar] then
- while s do
- local id = s.id
- if id == glyph_code and s.subtype<256 and s.font == currentfont then
- local lg = ligature[s.char]
- if lg then
- stop = s
- ligature = lg
- s = s.next
- else
- break
- end
- else
- break
- end
- end
- if stop then
- local lig = ligature.ligature
- if lig then
- if trace_ligatures then
- local stopchar = stop.char
- start = markstoligature(kind,lookupname,start,stop,lig)
- logprocess("%s: replacing %s upto %s by ligature %s",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
- else
- start = markstoligature(kind,lookupname,start,stop,lig)
- end
- return start, true
- else
- -- ok, goto next lookup
- end
- end
- else
- local skipmark = sequence.flags[1]
- while s do
- local id = s.id
- if id == glyph_code and s.subtype<256 then
- if s.font == currentfont then
- local char = s.char
- if skipmark and marks[char] then
- s = s.next
- else
- local lg = ligature[char]
- if lg then
- stop = s
- ligature = lg
- s = s.next
- else
- break
- end
- end
- else
- break
- end
- elseif id == disc_code then
- discfound = true
- s = s.next
- else
- break
- end
- end
- if stop then
- local lig = ligature.ligature
- if lig then
- if trace_ligatures then
- local stopchar = stop.char
- start = toligature(kind,lookupname,start,stop,lig,skipmark,discfound)
- logprocess("%s: replacing %s upto %s by ligature %s",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
- else
- start = toligature(kind,lookupname,start,stop,lig,skipmark,discfound)
- end
- return start, true
- else
- -- ok, goto next lookup
- end
- end
- end
- return start, false
-end
-
---[[ldx--
-<p>We get hits on a mark, but we're not sure if the it has to be applied so
-we need to explicitly test for basechar, baselig and basemark entries.</p>
---ldx]]--
-
-function handlers.gpos_mark2base(start,kind,lookupname,markanchors,sequence)
- local markchar = start.char
- if marks[markchar] then
- local base = start.prev -- [glyph] [start=mark]
- if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then
- local basechar = base.char
- if marks[basechar] then
- while true do
- base = base.prev
- if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then
- basechar = base.char
- if not marks[basechar] then
- break
- end
- else
- if trace_bugs then
- logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
- end
- return start, false
- end
- end
- end
- local baseanchors = descriptions[basechar]
- if baseanchors then
- baseanchors = baseanchors.anchors
- end
- if baseanchors then
- local baseanchors = baseanchors['basechar']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%s,%s)",
- pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return start, true
- end
- end
- end
- if trace_bugs then
- logwarning("%s, no matching anchors for mark %s and base %s",pref(kind,lookupname),gref(markchar),gref(basechar))
- end
- end
- else -- if trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
- onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
- end
- elseif trace_bugs then
- logwarning("%s: prev node is no char",pref(kind,lookupname))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
- end
- return start, false
-end
-
-function handlers.gpos_mark2ligature(start,kind,lookupname,markanchors,sequence)
- -- check chainpos variant
- local markchar = start.char
- if marks[markchar] then
- local base = start.prev -- [glyph] [optional marks] [start=mark]
- if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then
- local basechar = base.char
- if marks[basechar] then
- while true do
- base = base.prev
- if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then
- basechar = base.char
- if not marks[basechar] then
- break
- end
- else
- if trace_bugs then
- logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
- end
- return start, false
- end
- end
- end
- local index = has_attribute(start,ligacomp)
- local baseanchors = descriptions[basechar]
- if baseanchors then
- baseanchors = baseanchors.anchors
- if baseanchors then
- local baseanchors = baseanchors['baselig']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- ba = ba[index]
- if ba then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) -- index
- if trace_marks then
- logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%s,%s)",
- pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy)
- end
- return start, true
- end
- end
- end
- end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and baselig %s",pref(kind,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- else -- if trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
- onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
- end
- elseif trace_bugs then
- logwarning("%s: prev node is no char",pref(kind,lookupname))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
- end
- return start, false
-end
-
-function handlers.gpos_mark2mark(start,kind,lookupname,markanchors,sequence)
- local markchar = start.char
- if marks[markchar] then
- local base = start.prev -- [glyph] [basemark] [start=mark]
- -- while base and has_attribute(base,ligacomp) and has_attribute(base,ligacomp) ~= has_attribute(start,ligacomp) do
- -- base = base.prev -- KE: prevents mkmk for marks on different components of a ligature
- -- end
- local slc = has_attribute(start,ligacomp)
- if slc then -- a rather messy loop ... needs checking with husayni
- while base do
- local blc = has_attribute(base,ligacomp)
- if blc and blc ~= slc then
- base = base.prev
- else
- break
- end
- end
- end
- if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then -- subtype test can go
- local basechar = base.char
- local baseanchors = descriptions[basechar]
- if baseanchors then
- baseanchors = baseanchors.anchors
- if baseanchors then
- baseanchors = baseanchors['basemark']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%s,%s)",
- pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return start,true
- end
- end
- end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and basemark %s",pref(kind,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- else -- if trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
- onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
- end
- elseif trace_bugs then
- logwarning("%s: prev node is no mark",pref(kind,lookupname))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
- end
- return start,false
-end
-
-function handlers.gpos_cursive(start,kind,lookupname,exitanchors,sequence) -- to be checked
- local alreadydone = cursonce and has_attribute(start,cursbase)
- if not alreadydone then
- local done = false
- local startchar = start.char
- if marks[startchar] then
- if trace_cursive then
- logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
- end
- else
- local nxt = start.next
- while not done and nxt and nxt.id == glyph_code and nxt.subtype<256 and nxt.font == currentfont do
- local nextchar = nxt.char
- if marks[nextchar] then
- -- should not happen (maybe warning)
- nxt = nxt.next
- else
- local entryanchors = descriptions[nextchar]
- if entryanchors then
- entryanchors = entryanchors.anchors
- if entryanchors then
- entryanchors = entryanchors['centry']
- if entryanchors then
- local al = anchorlookups[lookupname]
- for anchor, entry in next, entryanchors do
- if al[anchor] then
- local exit = exitanchors[anchor]
- if exit then
- local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
- if trace_cursive then
- logprocess("%s: moving %s to %s cursive (%s,%s) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
- end
- done = true
- break
- end
- end
- end
- end
- end
- else -- if trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar))
- onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
- end
- break
- end
- end
- end
- return start, done
- else
- if trace_cursive and trace_details then
- logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
- end
- return start, false
- end
-end
-
-function handlers.gpos_single(start,kind,lookupname,kerns,sequence)
- local startchar = start.char
- local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
- if trace_kerns then
- logprocess("%s: shifting single %s by (%s,%s) and correction (%s,%s)",pref(kind,lookupname),gref(startchar),dx,dy,w,h)
- end
- return start, false
-end
-
-function handlers.gpos_pair(start,kind,lookupname,kerns,sequence)
- -- todo: kerns in disc nodes: pre, post, replace -> loop over disc too
- -- todo: kerns in components of ligatures
- local snext = start.next
- if not snext then
- return start, false
- else
- local prev, done = start, false
- local factor = tfmdata.parameters.factor
- local lookuptype = lookuptypes[lookupname]
- while snext and snext.id == glyph_code and snext.subtype<256 and snext.font == currentfont do
- local nextchar = snext.char
- local krn = kerns[nextchar]
- if not krn and marks[nextchar] then
- prev = snext
- snext = snext.next
- else
- local krn = kerns[nextchar]
- if not krn then
- -- skip
- elseif type(krn) == "table" then
- if lookuptype == "pair" then -- probably not needed
- local a, b = krn[2], krn[3]
- if a and #a > 0 then
- local startchar = start.char
- local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
- if trace_kerns then
- logprocess("%s: shifting first of pair %s and %s by (%s,%s) and correction (%s,%s)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- if b and #b > 0 then
- local startchar = start.char
- local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
- if trace_kerns then
- logprocess("%s: shifting second of pair %s and %s by (%s,%s) and correction (%s,%s)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- else -- wrong ... position has different entries
- report_process("%s: check this out (old kern stuff)",pref(kind,lookupname))
- -- local a, b = krn[2], krn[6]
- -- if a and a ~= 0 then
- -- local k = setkern(snext,factor,rlmode,a)
- -- if trace_kerns then
- -- logprocess("%s: inserting first kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
- -- end
- -- end
- -- if b and b ~= 0 then
- -- logwarning("%s: ignoring second kern xoff %s",pref(kind,lookupname),b*factor)
- -- end
- end
- done = true
- elseif krn ~= 0 then
- local k = setkern(snext,factor,rlmode,krn)
- if trace_kerns then
- logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
- end
- done = true
- end
- break
- end
- end
- return start, done
- end
-end
-
---[[ldx--
-<p>I will implement multiple chain replacements once I run into a font that uses
-it. It's not that complex to handle.</p>
---ldx]]--
-
-local chainmores = { }
-local chainprocs = { }
-
-local function logprocess(...)
- if trace_steps then
- registermessage(...)
- end
- report_subchain(...)
-end
-
-local logwarning = report_subchain
-
-local function logprocess(...)
- if trace_steps then
- registermessage(...)
- end
- report_chain(...)
-end
-
-local logwarning = report_chain
-
--- We could share functions but that would lead to extra function calls with many
--- arguments, redundant tests and confusing messages.
-
-function chainprocs.chainsub(start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname)
- logwarning("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
- return start, false
-end
-
-function chainmores.chainsub(start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname,n)
- logprocess("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
- return start, false
-end
-
--- The reversesub is a special case, which is why we need to store the replacements
--- in a bit weird way. There is no lookup and the replacement comes from the lookup
--- itself. It is meant mostly for dealing with Urdu.
-
-function chainprocs.reversesub(start,stop,kind,chainname,currentcontext,lookuphash,replacements)
- local char = start.char
- local replacement = replacements[char]
- if replacement then
- if trace_singles then
- logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement))
- end
- start.char = replacement
- return start, true
- else
- return start, false
- end
-end
-
---[[ldx--
-<p>This chain stuff is somewhat tricky since we can have a sequence of actions to be
-applied: single, alternate, multiple or ligature where ligature can be an invalid
-one in the sense that it will replace multiple by one but not neccessary one that
-looks like the combination (i.e. it is the counterpart of multiple then). For
-example, the following is valid:</p>
-
-<typing>
-<line>xxxabcdexxx [single a->A][multiple b->BCD][ligature cde->E] xxxABCDExxx</line>
-</typing>
-
-<p>Therefore we we don't really do the replacement here already unless we have the
-single lookup case. The efficiency of the replacements can be improved by deleting
-as less as needed but that would also make the code even more messy.</p>
---ldx]]--
-
-local function delete_till_stop(start,stop,ignoremarks) -- keeps start
- local n = 1
- if start == stop then
- -- done
- elseif ignoremarks then
- repeat -- start x x m x x stop => start m
- local next = start.next
- if not marks[next.char] then
- delete_node(start,next)
- end
- n = n + 1
- until next == stop
- else -- start x x x stop => start
- repeat
- local next = start.next
- delete_node(start,next)
- n = n + 1
- until next == stop
- end
- return n
-end
-
---[[ldx--
-<p>Here we replace start by a single variant, First we delete the rest of the
-match.</p>
---ldx]]--
-
-function chainprocs.gsub_single(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
- -- todo: marks ?
- local current = start
- local subtables = currentlookup.subtables
- if #subtables > 1 then
- logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," "))
- end
- while current do
- if current.id == glyph_code then
- local currentchar = current.char
- local lookupname = subtables[1] -- only 1
- local replacement = lookuphash[lookupname]
- if not replacement then
- if trace_bugs then
- logwarning("%s: no single hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
- end
- else
- replacement = replacement[currentchar]
- if not replacement then
- if trace_bugs then
- logwarning("%s: no single for %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar))
- end
- else
- if trace_singles then
- logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement))
- end
- current.char = replacement
- end
- end
- return start, true
- elseif current == stop then
- break
- else
- current = current.next
- end
- end
- return start, false
-end
-
-chainmores.gsub_single = chainprocs.gsub_single
-
---[[ldx--
-<p>Here we replace start by a sequence of new glyphs. First we delete the rest of
-the match.</p>
---ldx]]--
-
-function chainprocs.gsub_multiple(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- delete_till_stop(start,stop) -- we could pass ignoremarks as #3 ..
- local startchar = start.char
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local replacements = lookuphash[lookupname]
- if not replacements then
- if trace_bugs then
- logwarning("%s: no multiple hits",cref(kind,chainname,chainlookupname,lookupname))
- end
- else
- replacements = replacements[startchar]
- if not replacements then
- if trace_bugs then
- logwarning("%s: no multiple for %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar))
- end
- else
- if trace_multiples then
- logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements))
- end
- return multiple_glyphs(start,replacements)
- end
- end
- return start, false
-end
-
--- function chainmores.gsub_multiple(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,n)
--- logprocess("%s: gsub_multiple not yet supported",cref(kind,chainname,chainlookupname))
--- return start, false
--- end
-
-chainmores.gsub_multiple = chainprocs.gsub_multiple
-
---[[ldx--
-<p>Here we replace start by new glyph. First we delete the rest of the match.</p>
---ldx]]--
-
--- char_1 mark_1 -> char_x mark_1 (ignore marks)
--- char_1 mark_1 -> char_x
-
--- to be checked: do we always have just one glyph?
--- we can also have alternates for marks
--- marks come last anyway
--- are there cases where we need to delete the mark
-
-function chainprocs.gsub_alternate(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local current = start
- local subtables = currentlookup.subtables
- local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue
- while current do
- if current.id == glyph_code then -- is this check needed?
- local currentchar = current.char
- local lookupname = subtables[1]
- local alternatives = lookuphash[lookupname]
- if not alternatives then
- if trace_bugs then
- logwarning("%s: no alternative hit",cref(kind,chainname,chainlookupname,lookupname))
- end
- else
- alternatives = alternatives[currentchar]
- if alternatives then
- local choice = get_alternative_glyph(current,alternatives,value)
- if choice then
- if trace_alternatives then
- logprocess("%s: replacing %s by alternative %s (%s)",cref(kind,chainname,chainlookupname,lookupname),gref(char),gref(choice),choice)
- end
- start.char = choice
- else
- if trace_alternatives then
- logwarning("%s: no variant %s for %s",cref(kind,chainname,chainlookupname,lookupname),tostring(value),gref(char))
- end
- end
- elseif trace_bugs then
- logwarning("%s: no alternative for %s",cref(kind,chainname,chainlookupname,lookupname),gref(currentchar))
- end
- end
- return start, true
- elseif current == stop then
- break
- else
- current = current.next
- end
- end
- return start, false
-end
-
--- function chainmores.gsub_alternate(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,n)
--- logprocess("%s: gsub_alternate not yet supported",cref(kind,chainname,chainlookupname))
--- return start, false
--- end
-
-chainmores.gsub_alternate = chainprocs.gsub_alternate
-
---[[ldx--
-<p>When we replace ligatures we use a helper that handles the marks. I might change
-this function (move code inline and handle the marks by a separate function). We
-assume rather stupid ligatures (no complex disc nodes).</p>
---ldx]]--
-
-function chainprocs.gsub_ligature(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
- local startchar = start.char
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local ligatures = lookuphash[lookupname]
- if not ligatures then
- if trace_bugs then
- logwarning("%s: no ligature hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
- end
- else
- ligatures = ligatures[startchar]
- if not ligatures then
- if trace_bugs then
- logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
- end
- else
- local s, discfound, last, nofreplacements = start.next, false, stop, 0
- while s do
- local id = s.id
- if id == disc_code then
- s = s.next
- discfound = true
- else
- local schar = s.char
- if marks[schar] then -- marks
- s = s.next
- else
- local lg = ligatures[schar]
- if lg then
- ligatures, last, nofreplacements = lg, s, nofreplacements + 1
- if s == stop then
- break
- else
- s = s.next
- end
- else
- break
- end
- end
- end
- end
- local l2 = ligatures.ligature
- if l2 then
- if chainindex then
- stop = last
- end
- if trace_ligatures then
- if start == stop then
- logprocess("%s: replacing character %s by ligature %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2))
- else
- logprocess("%s: replacing character %s upto %s by ligature %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char),gref(l2))
- end
- end
- start = toligature(kind,lookupname,start,stop,l2,currentlookup.flags[1],discfound)
- return start, true, nofreplacements
- elseif trace_bugs then
- if start == stop then
- logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
- else
- logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char))
- end
- end
- end
- end
- return start, false, 0
-end
-
-chainmores.gsub_ligature = chainprocs.gsub_ligature
-
-function chainprocs.gpos_mark2base(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar = start.char
- if marks[markchar] then
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local markanchors = lookuphash[lookupname]
- if markanchors then
- markanchors = markanchors[markchar]
- end
- if markanchors then
- local base = start.prev -- [glyph] [start=mark]
- if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then
- local basechar = base.char
- if marks[basechar] then
- while true do
- base = base.prev
- if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then
- basechar = base.char
- if not marks[basechar] then
- break
- end
- else
- if trace_bugs then
- logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
- end
- return start, false
- end
- end
- end
- local baseanchors = descriptions[basechar].anchors
- if baseanchors then
- local baseanchors = baseanchors['basechar']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%s,%s)",
- cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return start, true
- end
- end
- end
- if trace_bugs then
- logwarning("%s, no matching anchors for mark %s and base %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- elseif trace_bugs then
- logwarning("%s: prev node is no char",cref(kind,chainname,chainlookupname,lookupname))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
- end
- return start, false
-end
-
-function chainprocs.gpos_mark2ligature(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar = start.char
- if marks[markchar] then
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local markanchors = lookuphash[lookupname]
- if markanchors then
- markanchors = markanchors[markchar]
- end
- if markanchors then
- local base = start.prev -- [glyph] [optional marks] [start=mark]
- if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then
- local basechar = base.char
- if marks[basechar] then
- while true do
- base = base.prev
- if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then
- basechar = base.char
- if not marks[basechar] then
- break
- end
- else
- if trace_bugs then
- logwarning("%s: no base for mark %s",cref(kind,chainname,chainlookupname,lookupname),markchar)
- end
- return start, false
- end
- end
- end
- -- todo: like marks a ligatures hash
- local index = has_attribute(start,ligacomp)
- local baseanchors = descriptions[basechar].anchors
- if baseanchors then
- local baseanchors = baseanchors['baselig']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- ba = ba[index]
- if ba then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) -- index
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%s,%s)",
- cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy)
- end
- return start, true
- end
- end
- end
- end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and baselig %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- elseif trace_bugs then
- logwarning("feature %s, lookup %s: prev node is no char",kind,lookupname)
- end
- elseif trace_bugs then
- logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
- end
- return start, false
-end
-
-function chainprocs.gpos_mark2mark(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar = start.char
- if marks[markchar] then
---~ local alreadydone = markonce and has_attribute(start,markmark)
---~ if not alreadydone then
- -- local markanchors = descriptions[markchar].anchors markanchors = markanchors and markanchors.mark
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local markanchors = lookuphash[lookupname]
- if markanchors then
- markanchors = markanchors[markchar]
- end
- if markanchors then
- local base = start.prev -- [glyph] [basemark] [start=mark]
- -- while (base and has_attribute(base,ligacomp) and has_attribute(base,ligacomp) ~= has_attribute(start,ligacomp)) do
- -- base = base.prev -- KE: prevents mkmk for marks on different components of a ligature
- -- end
- local slc = has_attribute(start,ligacomp)
- if slc then -- a rather messy loop ... needs checking with husayni
- while base do
- local blc = has_attribute(base,ligacomp)
- if blc and blc ~= slc then
- base = base.prev
- else
- break
- end
- end
- end
- if base and base.id == glyph_code and base.subtype<256 and base.font == currentfont then -- subtype test can go
- local basechar = base.char
- local baseanchors = descriptions[basechar].anchors
- if baseanchors then
- baseanchors = baseanchors['basemark']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%s,%s)",
- cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return start, true
- end
- end
- end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- elseif trace_bugs then
- logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
- end
---~ elseif trace_marks and trace_details then
---~ logprocess("%s, mark %s is already bound (n=%s), ignoring mark2mark",pref(kind,lookupname),gref(markchar),alreadydone)
---~ end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
- end
- return start, false
-end
-
--- ! ! ! untested ! ! !
-
-function chainprocs.gpos_cursive(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local alreadydone = cursonce and has_attribute(start,cursbase)
- if not alreadydone then
- local startchar = start.char
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local exitanchors = lookuphash[lookupname]
- if exitanchors then
- exitanchors = exitanchors[startchar]
- end
- if exitanchors then
- local done = false
- if marks[startchar] then
- if trace_cursive then
- logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
- end
- else
- local nxt = start.next
- while not done and nxt and nxt.id == glyph_code and nxt.subtype<256 and nxt.font == currentfont do
- local nextchar = nxt.char
- if marks[nextchar] then
- -- should not happen (maybe warning)
- nxt = nxt.next
- else
- local entryanchors = descriptions[nextchar]
- if entryanchors then
- entryanchors = entryanchors.anchors
- if entryanchors then
- entryanchors = entryanchors['centry']
- if entryanchors then
- local al = anchorlookups[lookupname]
- for anchor, entry in next, entryanchors do
- if al[anchor] then
- local exit = exitanchors[anchor]
- if exit then
- local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
- if trace_cursive then
- logprocess("%s: moving %s to %s cursive (%s,%s) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
- end
- done = true
- break
- end
- end
- end
- end
- end
- else -- if trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar))
- onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
- end
- break
- end
- end
- end
- return start, done
- else
- if trace_cursive and trace_details then
- logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
- end
- return start, false
- end
- end
- return start, false
-end
-
-function chainprocs.gpos_single(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
- -- untested .. needs checking for the new model
- local startchar = start.char
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local kerns = lookuphash[lookupname]
- if kerns then
- kerns = kerns[startchar] -- needed ?
- if kerns then
- local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
- if trace_kerns then
- logprocess("%s: shifting single %s by (%s,%s) and correction (%s,%s)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h)
- end
- end
- end
- return start, false
-end
-
--- when machines become faster i will make a shared function
-
-function chainprocs.gpos_pair(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
--- logwarning("%s: gpos_pair not yet supported",cref(kind,chainname,chainlookupname))
- local snext = start.next
- if snext then
- local startchar = start.char
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local kerns = lookuphash[lookupname]
- if kerns then
- kerns = kerns[startchar]
- if kerns then
- local lookuptype = lookuptypes[lookupname]
- local prev, done = start, false
- local factor = tfmdata.parameters.factor
- while snext and snext.id == glyph_code and snext.subtype<256 and snext.font == currentfont do
- local nextchar = snext.char
- local krn = kerns[nextchar]
- if not krn and marks[nextchar] then
- prev = snext
- snext = snext.next
- else
- if not krn then
- -- skip
- elseif type(krn) == "table" then
- if lookuptype == "pair" then
- local a, b = krn[2], krn[3]
- if a and #a > 0 then
- local startchar = start.char
- local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
- if trace_kerns then
- logprocess("%s: shifting first of pair %s and %s by (%s,%s) and correction (%s,%s)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- if b and #b > 0 then
- local startchar = start.char
- local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
- if trace_kerns then
- logprocess("%s: shifting second of pair %s and %s by (%s,%s) and correction (%s,%s)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- else
- report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname))
- local a, b = krn[2], krn[6]
- if a and a ~= 0 then
- local k = setkern(snext,factor,rlmode,a)
- if trace_kerns then
- logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
- end
- end
- if b and b ~= 0 then
- logwarning("%s: ignoring second kern xoff %s",cref(kind,chainname,chainlookupname),b*factor)
- end
- end
- done = true
- elseif krn ~= 0 then
- local k = setkern(snext,factor,rlmode,krn)
- if trace_kerns then
- logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
- end
- done = true
- end
- break
- end
- end
- return start, done
- end
- end
- end
- return start, false
-end
-
--- what pointer to return, spec says stop
--- to be discussed ... is bidi changer a space?
--- elseif char == zwnj and sequence[n][32] then -- brrr
-
--- somehow l or f is global
--- we don't need to pass the currentcontext, saves a bit
--- make a slow variant then can be activated but with more tracing
-
-local function show_skip(kind,chainname,char,ck,class)
- if ck[9] then
- logwarning("%s: skipping char %s (%s) in rule %s, lookuptype %s (%s=>%s)",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10])
- else
- logwarning("%s: skipping char %s (%s) in rule %s, lookuptype %s",cref(kind,chainname),gref(char),class,ck[1],ck[2])
- end
-end
-
-local function normal_handle_contextchain(start,kind,chainname,contexts,sequence,lookuphash)
- -- local rule, lookuptype, sequence, f, l, lookups = ck[1], ck[2] ,ck[3], ck[4], ck[5], ck[6]
- local flags = sequence.flags
- local done = false
- local skipmark = flags[1]
- local skipligature = flags[2]
- local skipbase = flags[3]
- local someskip = skipmark or skipligature or skipbase -- could be stored in flags for a fast test (hm, flags could be false !)
- local markclass = sequence.markclass -- todo, first we need a proper test
- local skipped = false
- for k=1,#contexts do
- local match = true
- local current = start
- local last = start
- local ck = contexts[k]
- local seq = ck[3]
- local s = #seq
- -- f..l = mid string
- if s == 1 then
- -- never happens
- match = current.id == glyph_code and current.subtype<256 and current.font == currentfont and seq[1][current.char]
- else
- -- maybe we need a better space check (maybe check for glue or category or combination)
- -- we cannot optimize for n=2 because there can be disc nodes
- local f, l = ck[4], ck[5]
- -- current match
- if f == 1 and f == l then -- current only
- -- already a hit
- -- match = true
- else -- before/current/after | before/current | current/after
- -- no need to test first hit (to be optimized)
- if f == l then -- new, else last out of sync (f is > 1)
- -- match = true
- else
- local n = f + 1
- last = last.next
- while n <= l do
- if last then
- local id = last.id
- if id == glyph_code then
- if last.subtype<256 and last.font == currentfont then
- local char = last.char
- local ccd = descriptions[char]
- if ccd then
- local class = ccd.class
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- skipped = true
- if trace_skips then
- show_skip(kind,chainname,char,ck,class)
- end
- last = last.next
- elseif seq[n][char] then
- if n < l then
- last = last.next
- end
- n = n + 1
- else
- match = false
- break
- end
- else
- match = false
- break
- end
- else
- match = false
- break
- end
- elseif id == disc_code then
- last = last.next
- else
- match = false
- break
- end
- else
- match = false
- break
- end
- end
- end
- end
- -- before
- if match and f > 1 then
- local prev = start.prev
- if prev then
- local n = f-1
- while n >= 1 do
- if prev then
- local id = prev.id
- if id == glyph_code then
- if prev.subtype<256 and prev.font == currentfont then -- normal char
- local char = prev.char
- local ccd = descriptions[char]
- if ccd then
- local class = ccd.class
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- skipped = true
- if trace_skips then
- show_skip(kind,chainname,char,ck,class)
- end
- elseif seq[n][char] then
- n = n -1
- else
- match = false
- break
- end
- else
- match = false
- break
- end
- else
- match = false
- break
- end
- elseif id == disc_code then
- -- skip 'm
- elseif seq[n][32] then
- n = n -1
- else
- match = false
- break
- end
- prev = prev.prev
- elseif seq[n][32] then -- somehat special, as zapfino can have many preceding spaces
- n = n -1
- else
- match = false
- break
- end
- end
- elseif f == 2 then
- match = seq[1][32]
- else
- for n=f-1,1 do
- if not seq[n][32] then
- match = false
- break
- end
- end
- end
- end
- -- after
- if match and s > l then
- local current = last and last.next
- if current then
- -- removed optimization for s-l == 1, we have to deal with marks anyway
- local n = l + 1
- while n <= s do
- if current then
- local id = current.id
- if id == glyph_code then
- if current.subtype<256 and current.font == currentfont then -- normal char
- local char = current.char
- local ccd = descriptions[char]
- if ccd then
- local class = ccd.class
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- skipped = true
- if trace_skips then
- show_skip(kind,chainname,char,ck,class)
- end
- elseif seq[n][char] then
- n = n + 1
- else
- match = false
- break
- end
- else
- match = false
- break
- end
- else
- match = false
- break
- end
- elseif id == disc_code then
- -- skip 'm
- elseif seq[n][32] then -- brrr
- n = n + 1
- else
- match = false
- break
- end
- current = current.next
- elseif seq[n][32] then
- n = n + 1
- else
- match = false
- break
- end
- end
- elseif s-l == 1 then
- match = seq[s][32]
- else
- for n=l+1,s do
- if not seq[n][32] then
- match = false
- break
- end
- end
- end
- end
- end
- if match then
- -- ck == currentcontext
- if trace_contexts then
- local rule, lookuptype, f, l = ck[1], ck[2], ck[4], ck[5]
- local char = start.char
- if ck[9] then
- logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %s (%s=>%s)",
- cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10])
- else
- logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %s",
- cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype)
- end
- end
- local chainlookups = ck[6]
- if chainlookups then
- local nofchainlookups = #chainlookups
- -- we can speed this up if needed
- if nofchainlookups == 1 then
- local chainlookupname = chainlookups[1]
- local chainlookup = lookuptable[chainlookupname]
- if chainlookup then
- local cp = chainprocs[chainlookup.type]
- if cp then
- start, done = cp(start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
- else
- logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
- end
- else -- shouldn't happen
- logprocess("%s is not yet supported",cref(kind,chainname,chainlookupname))
- end
- else
- local i = 1
- repeat
- if skipped then
- while true do
- local char = start.char
- local ccd = descriptions[char]
- if ccd then
- local class = ccd.class
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- start = start.next
- else
- break
- end
- else
- break
- end
- end
- end
- local chainlookupname = chainlookups[i]
- local chainlookup = lookuptable[chainlookupname] -- can be false (n matches, <n replacement)
- local cp = chainlookup and chainmores[chainlookup.type]
- if cp then
- local ok, n
- start, ok, n = cp(start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence)
- -- messy since last can be changed !
- if ok then
- done = true
- -- skip next one(s) if ligature
- i = i + (n or 1)
- else
- i = i + 1
- end
- else
- -- is valid
- -- logprocess("%s: multiple subchains for %s are not yet supported",cref(kind,chainname,chainlookupname),chainlookup and chainlookup.type or "?")
- i = i + 1
- end
- start = start.next
- until i > nofchainlookups
- end
- else
- local replacements = ck[7]
- if replacements then
- start, done = chainprocs.reversesub(start,last,kind,chainname,ck,lookuphash,replacements) -- sequence
- else
- done = true -- can be meant to be skipped
- if trace_contexts then
- logprocess("%s: skipping match",cref(kind,chainname))
- end
- end
- end
- end
- end
- return start, done
-end
-
--- Because we want to keep this elsewhere (an because speed is less an issue) we
--- pass the font id so that the verbose variant can access the relevant helper tables.
-
-local verbose_handle_contextchain = function(font,...)
- logwarning("no verbose handler installed, reverting to 'normal'")
- otf.setcontextchain()
- return normal_handle_contextchain(...)
-end
-
-otf.chainhandlers = {
- normal = normal_handle_contextchain,
- verbose = verbose_handle_contextchain,
-}
-
-function otf.setcontextchain(method)
- if not method or method == "normal" or not otf.chainhandlers[method] then
- if handlers.contextchain then -- no need for a message while making the format
- logwarning("installing normal contextchain handler")
- end
- handlers.contextchain = normal_handle_contextchain
- else
- logwarning("installing contextchain handler '%s'",method)
- local handler = otf.chainhandlers[method]
- handlers.contextchain = function(...)
- return handler(currentfont,...) -- hm, get rid of ...
- end
- end
- handlers.gsub_context = handlers.contextchain
- handlers.gsub_contextchain = handlers.contextchain
- handlers.gsub_reversecontextchain = handlers.contextchain
- handlers.gpos_contextchain = handlers.contextchain
- handlers.gpos_context = handlers.contextchain
-end
-
-otf.setcontextchain()
-
-local missing = { } -- we only report once
-
-local function logprocess(...)
- if trace_steps then
- registermessage(...)
- end
- report_process(...)
-end
-
-local logwarning = report_process
-
-local function report_missing_cache(typ,lookup)
- local f = missing[currentfont] if not f then f = { } missing[currentfont] = f end
- local t = f[typ] if not t then t = { } f[typ] = t end
- if not t[lookup] then
- t[lookup] = true
- logwarning("missing cache for lookup %s of type %s in font %s (%s)",lookup,typ,currentfont,tfmdata.properties.fullname)
- end
-end
-
-local resolved = { } -- we only resolve a font,script,language pair once
-
--- todo: pass all these 'locals' in a table
-
-local lookuphashes = { }
-
-setmetatableindex(lookuphashes, function(t,font)
- local lookuphash = fontdata[font].resources.lookuphash
- if not lookuphash or not next(lookuphash) then
- lookuphash = false
- end
- t[font] = lookuphash
- return lookuphash
-end)
-
--- fonts.hashes.lookups = lookuphashes
-
-local special_attributes = {
- init = 1,
- medi = 2,
- fina = 3,
- isol = 4
-}
-
-local function initialize(sequence,script,language,enabled)
- local features = sequence.features
- if features then
- for kind, scripts in next, features do
- local valid = enabled[kind]
- if valid then
- local languages = scripts[script] or scripts[wildcard]
- if languages and (languages[language] or languages[wildcard]) then
- return { valid, special_attributes[kind] or false, sequence.chain or 0, kind, sequence }
- end
- end
- end
- end
- return false
-end
-
-function otf.dataset(tfmdata,sequences,font) -- generic variant, overloaded in context
- local shared = tfmdata.shared
- local properties = tfmdata.properties
- local language = properties.language or "dflt"
- local script = properties.script or "dflt"
- local enabled = shared.features
- local res = resolved[font]
- if not res then
- res = { }
- resolved[font] = res
- end
- local rs = res[script]
- if not rs then
- rs = { }
- res[script] = rs
- end
- local rl = rs[language]
- if not rl then
- rl = { }
- rs[language] = rl
- setmetatableindex(rl, function(t,k)
- local v = enabled and initialize(sequences[k],script,language,enabled)
- t[k] = v
- return v
- end)
- end
- return rl
-end
-
--- elseif id == glue_code then
--- if p[5] then -- chain
--- local pc = pp[32]
--- if pc then
--- start, ok = start, false -- p[1](start,kind,p[2],pc,p[3],p[4])
--- if ok then
--- done = true
--- end
--- if start then start = start.next end
--- else
--- start = start.next
--- end
--- else
--- start = start.next
--- end
-
-local function featuresprocessor(head,font,attr)
-
- local lookuphash = lookuphashes[font] -- we can also check sequences here
-
- if not lookuphash then
- return head, false
- end
-
- if trace_steps then
- checkstep(head)
- end
-
- tfmdata = fontdata[font]
- descriptions = tfmdata.descriptions
- characters = tfmdata.characters
- resources = tfmdata.resources
-
- marks = resources.marks
- anchorlookups = resources.lookup_to_anchor
- lookuptable = resources.lookups
- lookuptypes = resources.lookuptypes
-
- currentfont = font
- rlmode = 0
-
- local sequences = resources.sequences
- local done = false
- local datasets = otf.dataset(tfmdata,sequences,font,attr)
-
- local dirstack = { } -- could move outside function
-
- -- We could work on sub start-stop ranges instead but I wonder if there is that
- -- much speed gain (experiments showed that it made not much sense) and we need
- -- to keep track of directions anyway. Also at some point I want to play with
- -- font interactions and then we do need the full sweeps.
-
- for s=1,#sequences do
- local dataset = datasets[s]
- if dataset then
- featurevalue = dataset[1] -- todo: pass to function instead of using a global
- if featurevalue then
- local sequence = sequences[s] -- also dataset[5]
- local rlparmode = 0
- local topstack = 0
- local success = false
- local attribute = dataset[2]
- local chain = dataset[3] -- sequence.chain or 0
- local typ = sequence.type
- local subtables = sequence.subtables
- if chain < 0 then
- -- this is a limited case, no special treatments like 'init' etc
- local handler = handlers[typ]
- -- we need to get rid of this slide! probably no longer needed in latest luatex
- local start = find_node_tail(head) -- slow (we can store tail because there's always a skip at the end): todo
- while start do
- local id = start.id
- if id == glyph_code then
- if start.subtype<256 and start.font == font then
- local a = has_attribute(start,0)
- if a then
- a = a == attr
- else
- a = true
- end
- if a then
- for i=1,#subtables do
- local lookupname = subtables[i]
- local lookupcache = lookuphash[lookupname]
- if lookupcache then
- local lookupmatch = lookupcache[start.char]
- if lookupmatch then
- start, success = handler(start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
- if success then
- break
- end
- end
- else
- report_missing_cache(typ,lookupname)
- end
- end
- if start then start = start.prev end
- else
- start = start.prev
- end
- else
- start = start.prev
- end
- else
- start = start.prev
- end
- end
- else
- local handler = handlers[typ]
- local ns = #subtables
- local start = head -- local ?
- rlmode = 0 -- to be checked ?
- if ns == 1 then -- happens often
- local lookupname = subtables[1]
- local lookupcache = lookuphash[lookupname]
- if not lookupcache then -- also check for empty cache
- report_missing_cache(typ,lookupname)
- else
- while start do
- local id = start.id
- if id == glyph_code then
- if start.subtype<256 and start.font == font then
- local a = has_attribute(start,0)
- if a then
- a = (a == attr) and (not attribute or has_attribute(start,state,attribute))
- else
- a = not attribute or has_attribute(start,state,attribute)
- end
- if a then
- local lookupmatch = lookupcache[start.char]
- if lookupmatch then
- -- sequence kan weg
- local ok
- start, ok = handler(start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
- if ok then
- success = true
- end
- end
- if start then start = start.next end
- else
- start = start.next
- end
- else
- start = start.next
- end
- elseif id == whatsit_code then -- will be function
- local subtype = start.subtype
- if subtype == dir_code then
- local dir = start.dir
- if dir == "+TRT" or dir == "+TLT" then
- topstack = topstack + 1
- dirstack[topstack] = dir
- elseif dir == "-TRT" or dir == "-TLT" then
- topstack = topstack - 1
- end
- local newdir = dirstack[topstack]
- if newdir == "+TRT" then
- rlmode = -1
- elseif newdir == "+TLT" then
- rlmode = 1
- else
- rlmode = rlparmode
- end
- if trace_directions then
- report_process("directions after txtdir %s: txtdir=%s:%s, parmode=%s, txtmode=%s",dir,topstack,newdir or "unset",rlparmode,rlmode)
- end
- elseif subtype == localpar_code then
- local dir = start.dir
- if dir == "TRT" then
- rlparmode = -1
- elseif dir == "TLT" then
- rlparmode = 1
- else
- rlparmode = 0
- end
- rlmode = rlparmode
- if trace_directions then
- report_process("directions after pardir %s: parmode=%s, txtmode=%s",dir,rlparmode,rlmode)
- end
- end
- start = start.next
- else
- start = start.next
- end
- end
- end
- else
- while start do
- local id = start.id
- if id == glyph_code then
- if start.subtype<256 and start.font == font then
- local a = has_attribute(start,0)
- if a then
- a = (a == attr) and (not attribute or has_attribute(start,state,attribute))
- else
- a = not attribute or has_attribute(start,state,attribute)
- end
- if a then
- for i=1,ns do
- local lookupname = subtables[i]
- local lookupcache = lookuphash[lookupname]
- if lookupcache then
- local lookupmatch = lookupcache[start.char]
- if lookupmatch then
- -- we could move all code inline but that makes things even more unreadable
- local ok
- start, ok = handler(start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
- if ok then
- success = true
- break
- end
- end
- else
- report_missing_cache(typ,lookupname)
- end
- end
- if start then start = start.next end
- else
- start = start.next
- end
- else
- start = start.next
- end
- elseif id == whatsit_code then
- local subtype = start.subtype
- if subtype == dir_code then
- local dir = start.dir
- if dir == "+TRT" or dir == "+TLT" then
- topstack = topstack + 1
- dirstack[topstack] = dir
- elseif dir == "-TRT" or dir == "-TLT" then
- topstack = topstack - 1
- end
- local newdir = dirstack[topstack]
- if newdir == "+TRT" then
- rlmode = -1
- elseif newdir == "+TLT" then
- rlmode = 1
- else
- rlmode = rlparmode
- end
- if trace_directions then
- report_process("directions after txtdir %s: txtdir=%s:%s, parmode=%s, txtmode=%s",dir,topstack,newdir or "unset",rlparmode,rlmode)
- end
- elseif subtype == localpar_code then
- local dir = start.dir
- if dir == "TRT" then
- rlparmode = -1
- elseif dir == "TLT" then
- rlparmode = 1
- else
- rlparmode = 0
- end
- rlmode = rlparmode
- if trace_directions then
- report_process("directions after pardir %s: parmode=%s, txtmode=%s",dir,rlparmode,rlmode)
- end
- end
- start = start.next
- else
- start = start.next
- end
- end
- end
- end
- if success then
- done = true
- end
- if trace_steps then -- ?
- registerstep(head)
- end
- end
- end
- end
- return head, done
-end
-
-local function generic(lookupdata,lookupname,unicode,lookuphash)
- local target = lookuphash[lookupname]
- if target then
- target[unicode] = lookupdata
- else
- lookuphash[lookupname] = { [unicode] = lookupdata }
- end
-end
-
-local action = {
-
- substitution = generic,
- multiple = generic,
- alternate = generic,
- position = generic,
-
- ligature = function(lookupdata,lookupname,unicode,lookuphash)
- local target = lookuphash[lookupname]
- if not target then
- target = { }
- lookuphash[lookupname] = target
- end
- for i=1,#lookupdata do
- local li = lookupdata[i]
- local tu = target[li]
- if not tu then
- tu = { }
- target[li] = tu
- end
- target = tu
- end
- target.ligature = unicode
- end,
-
- pair = function(lookupdata,lookupname,unicode,lookuphash)
- local target = lookuphash[lookupname]
- if not target then
- target = { }
- lookuphash[lookupname] = target
- end
- local others = target[unicode]
- local paired = lookupdata[1]
- if others then
- others[paired] = lookupdata
- else
- others = { [paired] = lookupdata }
- target[unicode] = others
- end
- end,
-
-}
-
-local function prepare_lookups(tfmdata)
-
- local rawdata = tfmdata.shared.rawdata
- local resources = rawdata.resources
- local lookuphash = resources.lookuphash
- local anchor_to_lookup = resources.anchor_to_lookup
- local lookup_to_anchor = resources.lookup_to_anchor
- local lookuptypes = resources.lookuptypes
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
-
- -- we cannot free the entries in the descriptions as sometimes we access
- -- then directly (for instance anchors) ... selectively freeing does save
- -- much memory as it's only a reference to a table and the slot in the
- -- description hash is not freed anyway
-
- for unicode, character in next, characters do -- we cannot loop over descriptions !
-
- local description = descriptions[unicode]
-
- if description then
-
- local lookups = description.slookups
- if lookups then
- for lookupname, lookupdata in next, lookups do
- action[lookuptypes[lookupname]](lookupdata,lookupname,unicode,lookuphash)
- end
- end
-
- local lookups = description.mlookups
- if lookups then
- for lookupname, lookuplist in next, lookups do
- local lookuptype = lookuptypes[lookupname]
- for l=1,#lookuplist do
- local lookupdata = lookuplist[l]
- action[lookuptype](lookupdata,lookupname,unicode,lookuphash)
- end
- end
- end
-
- local list = description.kerns
- if list then
- for lookup, krn in next, list do -- ref to glyph, saves lookup
- local target = lookuphash[lookup]
- if target then
- target[unicode] = krn
- else
- lookuphash[lookup] = { [unicode] = krn }
- end
- end
- end
-
- local list = description.anchors
- if list then
- for typ, anchors in next, list do -- types
- if typ == "mark" or typ == "cexit" then -- or entry?
- for name, anchor in next, anchors do
- local lookups = anchor_to_lookup[name]
- if lookups then
- for lookup, _ in next, lookups do
- local target = lookuphash[lookup]
- if target then
- target[unicode] = anchors
- else
- lookuphash[lookup] = { [unicode] = anchors }
- end
- end
- end
- end
- end
- end
- end
-
- end
-
- end
-
-end
-
-local function split(replacement,original)
- local result = { }
- for i=1,#replacement do
- result[original[i]] = replacement[i]
- end
- return result
-end
-
--- not shared as we hook into lookups now
-
---~ local function uncover_1(covers,result) -- multiple covers
---~ local nofresults = #result
---~ for n=1,#covers do
---~ nofresults = nofresults + 1
---~ local u = { }
---~ local c = covers[n]
---~ for i=1,#c do
---~ u[c[i]] = true
---~ end
---~ result[nofresults] = u
---~ end
---~ end
-
---~ local function uncover_2(covers,result) -- single covers (turned into multiple with n=1)
---~ local nofresults = #result
---~ for n=1,#covers do
---~ nofresults = nofresults + 1
---~ result[nofresults] = { [covers[n]] = true }
---~ end
---~ end
-
---~ local function uncover_1(covers,result) -- multiple covers
---~ local nofresults = #result
---~ for n=1,#covers do
---~ nofresults = nofresults + 1
---~ result[nofresults] = covers[n]
---~ end
---~ end
-
---~ local function prepare_contextchains(tfmdata)
---~ local rawdata = tfmdata.shared.rawdata
---~ local resources = rawdata.resources
---~ local lookuphash = resources.lookuphash
---~ local lookups = rawdata.lookups
---~ if lookups then
---~ for lookupname, lookupdata in next, rawdata.lookups do
---~ local lookuptype = lookupdata.type
---~ if not lookuptype then
---~ report_prepare("missing lookuptype for %s",lookupname)
---~ else -- => lookuphash[lookupname][unicode]
---~ local rules = lookupdata.rules
---~ if rules then
---~ local fmt = lookupdata.format
---~ -- if fmt == "coverage" then
---~ if fmt == "coverage" or fmt == "glyphs" then
---~ if lookuptype ~= "chainsub" and lookuptype ~= "chainpos" then
---~ -- todo: dejavu-serif has one (but i need to see what use it has)
---~ report_prepare("unsupported coverage %s for %s",lookuptype,lookupname)
---~ else
---~ local contexts = lookuphash[lookupname]
---~ if not contexts then
---~ contexts = { }
---~ lookuphash[lookupname] = contexts
---~ end
---~ local t, nt = { }, 0
---~ for nofrules=1,#rules do -- does #rules>1 happen often?
---~ local rule = rules[nofrules]
---~ local current = rule.current
---~ local before = rule.before
---~ local after = rule.after
---~ local sequence = { }
---~ if before then
---~ uncover_1(before,sequence)
---~ end
---~ local start = #sequence + 1
---~ uncover_1(current,sequence)
---~ local stop = #sequence
---~ if after then
---~ uncover_1(after,sequence)
---~ end
---~ if sequence[1] then
---~ nt = nt + 1
---~ t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups }
---~ for unic, _ in next, sequence[start] do
---~ local cu = contexts[unic]
---~ if not cu then
---~ contexts[unic] = t
---~ end
---~ end
---~ end
---~ end
---~ end
---~ elseif fmt == "reversecoverage" then -- we could combine both branches (only dufference is replacements)
---~ if lookuptype ~= "reversesub" then
---~ report_prepare("unsupported reverse coverage %s for %s",lookuptype,lookupname)
---~ else
---~ local contexts = lookuphash[lookupname]
---~ if not contexts then
---~ contexts = { }
---~ lookuphash[lookupname] = contexts
---~ end
---~ local t, nt = { }, 0
---~ for nofrules=1,#rules do
---~ local rule = rules[nofrules]
---~ local current = rule.current
---~ local before = rule.before
---~ local after = rule.after
---~ local replacements = rule.replacements
---~ local sequence = { }
---~ if before then
---~ uncover_1(before,sequence)
---~ end
---~ local start = #sequence + 1
---~ uncover_1(current,sequence)
---~ local stop = #sequence
---~ if after then
---~ uncover_1(after,sequence)
---~ end
---~ if sequence[1] then
---~ nt = nt + 1
---~ t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups, replacements }
---~ for unic, _ in next, sequence[start] do
---~ local cu = contexts[unic]
---~ if not cu then
---~ contexts[unic] = t
---~ end
---~ end
---~ end
---~ end
---~ end
---~ -- elseif fmt == "glyphs" then --maybe just make then before = { fore } and share with coverage
---~ -- if lookuptype ~= "chainsub" and lookuptype ~= "chainpos" then
---~ -- report_prepare("unsupported coverage %s for %s",lookuptype,lookupname)
---~ -- else
---~ -- local contexts = lookuphash[lookupname]
---~ -- if not contexts then
---~ -- contexts = { }
---~ -- lookuphash[lookupname] = contexts
---~ -- end
---~ -- local t, nt = { }, 0
---~ -- for nofrules=1,#rules do -- we can make glyphs a special case (less tables)
---~ -- local rule = rules[nofrules]
---~ -- local current = rule.names
---~ -- local before = rule.fore
---~ -- local after = rule.back
---~ -- local sequence = { }
---~ -- if before then
---~ -- uncover_1(before,sequence)
---~ -- end
---~ -- local start = #sequence + 1
---~ -- uncover_1(current,sequence)
---~ -- local stop = #sequence
---~ -- if after then
---~ -- uncover_1(after,sequence)
---~ -- end
---~ -- if sequence then
---~ -- nt = nt + 1
---~ -- t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups }
---~ -- for unic, _ in next, sequence[start] do
---~ -- local cu = contexts[unic]
---~ -- if not cu then
---~ -- contexts[unic] = t
---~ -- end
---~ -- end
---~ -- end
---~ -- end
---~ -- end
---~ end
---~ end
---~ end
---~ end
---~ end
---~ end
-
-local valid = {
- coverage = { chainsub = true, chainpos = true, contextsub = true },
- reversecoverage = { reversesub = true },
- glyphs = { chainsub = true, chainpos = true },
-}
-
-local function prepare_contextchains(tfmdata)
- local rawdata = tfmdata.shared.rawdata
- local resources = rawdata.resources
- local lookuphash = resources.lookuphash
- local lookups = rawdata.lookups
- if lookups then
- for lookupname, lookupdata in next, rawdata.lookups do
- local lookuptype = lookupdata.type
- if lookuptype then
- local rules = lookupdata.rules
- if rules then
- local format = lookupdata.format
- local validformat = valid[format]
- if not validformat then
- report_prepare("unsupported format %s",format)
- elseif not validformat[lookuptype] then
- -- todo: dejavu-serif has one (but i need to see what use it has)
- report_prepare("unsupported %s %s for %s",format,lookuptype,lookupname)
- else
- local contexts = lookuphash[lookupname]
- if not contexts then
- contexts = { }
- lookuphash[lookupname] = contexts
- end
- local t, nt = { }, 0
- for nofrules=1,#rules do
- local rule = rules[nofrules]
- local current = rule.current
- local before = rule.before
- local after = rule.after
- local replacements = rule.replacements
- local sequence = { }
- local nofsequences = 0
- -- Wventually we can store start, stop and sequence in the cached file
- -- but then less sharing takes place so best not do that without a lot
- -- of profiling so let's forget about it.
- if before then
- for n=1,#before do
- nofsequences = nofsequences + 1
- sequence[nofsequences] = before[n]
- end
- end
- local start = nofsequences + 1
- for n=1,#current do
- nofsequences = nofsequences + 1
- sequence[nofsequences] = current[n]
- end
- local stop = nofsequences
- if after then
- for n=1,#after do
- nofsequences = nofsequences + 1
- sequence[nofsequences] = after[n]
- end
- end
- if sequence[1] then
- -- Replacements only happen with reverse lookups as they are single only. We
- -- could pack them into current (replacement value instead of true) and then
- -- use sequence[start] instead but it's somewhat ugly.
- nt = nt + 1
- t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups, replacements }
- for unic, _ in next, sequence[start] do
- local cu = contexts[unic]
- if not cu then
- contexts[unic] = t
- end
- end
- end
- end
- end
- else
- -- no rules
- end
- else
- report_prepare("missing lookuptype for %s",lookupname)
- end
- end
- end
-end
-
--- we can consider lookuphash == false (initialized but empty) vs lookuphash == table
-
-local function featuresinitializer(tfmdata,value)
- if true then -- value then
- -- beware we need to use the topmost properties table
- local rawdata = tfmdata.shared.rawdata
- local properties = rawdata.properties
- if not properties.initialized then
- local starttime = trace_preparing and os.clock()
- local resources = rawdata.resources
- resources.lookuphash = resources.lookuphash or { }
- prepare_contextchains(tfmdata)
- prepare_lookups(tfmdata)
- properties.initialized = true
- if trace_preparing then
- report_prepare("preparation time is %0.3f seconds for %s",os.clock()-starttime,tfmdata.properties.fullname or "?")
- end
- end
- end
-end
-
-registerotffeature {
- name = "features",
- description = "features",
- default = true,
- initializers = {
- position = 1,
- node = featuresinitializer,
- },
- processors = {
- node = featuresprocessor,
- }
-}
diff --git a/otfl-font-pfb.lua b/otfl-font-pfb.lua
deleted file mode 100644
index 66abf23..0000000
--- a/otfl-font-pfb.lua
+++ /dev/null
@@ -1,8 +0,0 @@
-local fonts = fonts
-local readers = fonts.readers
-
-fonts.formats.pfb = "pfb"
-fonts.formats.pfa = "pfa"
-
-function readers.pfb(specification) return readers.opentype(specification,"pfb","type1") end
-function readers.pfa(specification) return readers.opentype(specification,"pfa","type1") end
diff --git a/otfl-luat-ovr.lua b/otfl-luat-ovr.lua
deleted file mode 100644
index 63a9e19..0000000
--- a/otfl-luat-ovr.lua
+++ /dev/null
@@ -1,36 +0,0 @@
-if not modules then modules = { } end modules ['luat-ovr'] = {
- version = 1.001,
- comment = "companion to luatex-*.tex",
- author = "Khaled Hosny and Elie Roux",
- copyright = "Luaotfload Development Team",
- license = "GNU GPL v2"
-}
-
-
-local write_nl, format, name = texio.write_nl, string.format, "luaotfload"
-local dummyfunction = function() end
-
-callbacks = {
- register = dummyfunction,
-}
-
-function logs.report(category,fmt,...)
- if fmt then
- write_nl('log', format("%s | %s: %s",name,category,format(fmt,...)))
- elseif category then
- write_nl('log', format("%s | %s",name,category))
- else
- write_nl('log', format("%s |",name))
- end
-end
-
-function logs.info(category,fmt,...)
- if fmt then
- write_nl(format("%s | %s: %s",name,category,format(fmt,...)))
- elseif category then
- write_nl(format("%s | %s",name,category))
- else
- write_nl(format("%s |",name))
- end
- io.flush()
-end
diff --git a/otfl-node-inj.lua b/otfl-node-inj.lua
deleted file mode 100644
index 246aaad..0000000
--- a/otfl-node-inj.lua
+++ /dev/null
@@ -1,498 +0,0 @@
-if not modules then modules = { } end modules ['node-inj'] = {
- version = 1.001,
- comment = "companion to node-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- This is very experimental (this will change when we have luatex > .50 and
--- a few pending thingies are available. Also, Idris needs to make a few more
--- test fonts. Btw, future versions of luatex will have extended glyph properties
--- that can be of help.
-
-local next = next
-
-local trace_injections = false trackers.register("nodes.injections", function(v) trace_injections = v end)
-
-local report_injections = logs.reporter("nodes","injections")
-
-local attributes, nodes, node = attributes, nodes, node
-
-fonts = fonts
-local fontdata = fonts.hashes.identifiers
-
-nodes.injections = nodes.injections or { }
-local injections = nodes.injections
-
-local nodecodes = nodes.nodecodes
-local glyph_code = nodecodes.glyph
-local nodepool = nodes.pool
-local newkern = nodepool.kern
-
-local traverse_id = node.traverse_id
-local unset_attribute = node.unset_attribute
-local has_attribute = node.has_attribute
-local set_attribute = node.set_attribute
-local copy_node = node.copy
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-
-local markbase = attributes.private('markbase')
-local markmark = attributes.private('markmark')
-local markdone = attributes.private('markdone')
-local cursbase = attributes.private('cursbase')
-local curscurs = attributes.private('curscurs')
-local cursdone = attributes.private('cursdone')
-local kernpair = attributes.private('kernpair')
-local ligacomp = attributes.private('ligacomp')
-local fontkern = attributes.private('fontkern')
-
-if context then
-
- local kern = nodes.pool.register(newkern())
-
- set_attribute(kern,fontkern,1) -- we can have several, attributes are shared
-
- newkern = function(k)
- local c = copy_node(kern)
- c.kern = k
- return c
- end
-
-end
-
--- This injector has been tested by Idris Samawi Hamid (several arabic fonts as well as
--- the rather demanding Husayni font), Khaled Hosny (latin and arabic) and Kaj Eigner
--- (arabic, hebrew and thai) and myself (whatever font I come across). I'm pretty sure
--- that this code is not 100% okay but examples are needed to figure things out.
-
-local cursives = { }
-local marks = { }
-local kerns = { }
-
--- Currently we do gpos/kern in a bit inofficial way but when we have the extra fields in
--- glyphnodes to manipulate ht/dp/wd explicitly I will provide an alternative; also, we
--- can share tables.
-
--- For the moment we pass the r2l key ... volt/arabtype tests .. idris: this needs
--- checking with husayni (volt and fontforge).
-
-function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext)
- local dx, dy = factor*(exit[1]-entry[1]), factor*(exit[2]-entry[2])
- local ws, wn = tfmstart.width, tfmnext.width
- local bound = #cursives + 1
- set_attribute(start,cursbase,bound)
- set_attribute(nxt,curscurs,bound)
- cursives[bound] = { rlmode, dx, dy, ws, wn }
- return dx, dy, bound
-end
-
-function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr)
- local x, y, w, h = factor*spec[1], factor*spec[2], factor*spec[3], factor*spec[4]
- -- dy = y - h
- if x ~= 0 or w ~= 0 or y ~= 0 or h ~= 0 then
- local bound = has_attribute(current,kernpair)
- if bound then
- local kb = kerns[bound]
- -- inefficient but singles have less, but weird anyway, needs checking
- kb[2], kb[3], kb[4], kb[5] = (kb[2] or 0) + x, (kb[3] or 0) + y, (kb[4] or 0)+ w, (kb[5] or 0) + h
- else
- bound = #kerns + 1
- set_attribute(current,kernpair,bound)
- kerns[bound] = { rlmode, x, y, w, h, r2lflag, tfmchr.width }
- end
- return x, y, w, h, bound
- end
- return x, y, w, h -- no bound
-end
-
-function injections.setkern(current,factor,rlmode,x,tfmchr)
- local dx = factor*x
- if dx ~= 0 then
- local bound = #kerns + 1
- set_attribute(current,kernpair,bound)
- kerns[bound] = { rlmode, dx }
- return dx, bound
- else
- return 0, 0
- end
-end
-
-function injections.setmark(start,base,factor,rlmode,ba,ma,index) -- ba=baseanchor, ma=markanchor
- local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2]) -- the index argument is no longer used but when this
- local bound = has_attribute(base,markbase) -- fails again we should pass it
-local index = 1
- if bound then
- local mb = marks[bound]
- if mb then
- -- if not index then index = #mb + 1 end
-index = #mb + 1
- mb[index] = { dx, dy, rlmode }
- set_attribute(start,markmark,bound)
- set_attribute(start,markdone,index)
- return dx, dy, bound
- else
- report_injections("possible problem, U+%05X is base mark without data (id: %s)",base.char,bound)
- end
- end
--- index = index or 1
- index = index or 1
- bound = #marks + 1
- set_attribute(base,markbase,bound)
- set_attribute(start,markmark,bound)
- set_attribute(start,markdone,index)
- marks[bound] = { [index] = { dx, dy, rlmode } }
- return dx, dy, bound
-end
-
-local function dir(n)
- return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset"
-end
-
-local function trace(head)
- report_injections("begin run")
- for n in traverse_id(glyph_code,head) do
- if n.subtype < 256 then
- local kp = has_attribute(n,kernpair)
- local mb = has_attribute(n,markbase)
- local mm = has_attribute(n,markmark)
- local md = has_attribute(n,markdone)
- local cb = has_attribute(n,cursbase)
- local cc = has_attribute(n,curscurs)
- report_injections("char U+%05X, font=%s",n.char,n.font)
- if kp then
- local k = kerns[kp]
- if k[3] then
- report_injections(" pairkern: dir=%s, x=%s, y=%s, w=%s, h=%s",dir(k[1]),k[2] or "?",k[3] or "?",k[4] or "?",k[5] or "?")
- else
- report_injections(" kern: dir=%s, dx=%s",dir(k[1]),k[2] or "?")
- end
- end
- if mb then
- report_injections(" markbase: bound=%s",mb)
- end
- if mm then
- local m = marks[mm]
- if mb then
- local m = m[mb]
- if m then
- report_injections(" markmark: bound=%s, index=%s, dx=%s, dy=%s",mm,md or "?",m[1] or "?",m[2] or "?")
- else
- report_injections(" markmark: bound=%s, missing index",mm)
- end
- else
- m = m[1]
- report_injections(" markmark: bound=%s, dx=%s, dy=%s",mm,m and m[1] or "?",m and m[2] or "?")
- end
- end
- if cb then
- report_injections(" cursbase: bound=%s",cb)
- end
- if cc then
- local c = cursives[cc]
- report_injections(" curscurs: bound=%s, dir=%s, dx=%s, dy=%s",cc,dir(c[1]),c[2] or "?",c[3] or "?")
- end
- end
- end
- report_injections("end run")
-end
-
--- todo: reuse tables (i.e. no collection), but will be extra fields anyway
--- todo: check for attribute
-
--- We can have a fast test on a font being processed, so we can check faster for marks etc
--- but I'll make a context variant anyway.
-
-function injections.handler(head,where,keep)
- local has_marks, has_cursives, has_kerns = next(marks), next(cursives), next(kerns)
- if has_marks or has_cursives then
- if trace_injections then
- trace(head)
- end
- -- in the future variant we will not copy items but refs to tables
- local done, ky, rl, valid, cx, wx, mk, nofvalid = false, { }, { }, { }, { }, { }, { }, 0
- if has_kerns then -- move outside loop
- local nf, tm = nil, nil
- for n in traverse_id(glyph_code,head) do -- only needed for relevant fonts
- if n.subtype < 256 then
- nofvalid = nofvalid + 1
- valid[nofvalid] = n
- if n.font ~= nf then
- nf = n.font
- tm = fontdata[nf].resources.marks
- end
- if tm then
- mk[n] = tm[n.char]
- end
- local k = has_attribute(n,kernpair)
- if k then
- local kk = kerns[k]
- if kk then
- local x, y, w, h = kk[2] or 0, kk[3] or 0, kk[4] or 0, kk[5] or 0
- local dy = y - h
- if dy ~= 0 then
- ky[n] = dy
- end
- if w ~= 0 or x ~= 0 then
- wx[n] = kk
- end
- rl[n] = kk[1] -- could move in test
- end
- end
- end
- end
- else
- local nf, tm = nil, nil
- for n in traverse_id(glyph_code,head) do
- if n.subtype < 256 then
- nofvalid = nofvalid + 1
- valid[nofvalid] = n
- if n.font ~= nf then
- nf = n.font
- tm = fontdata[nf].resources.marks
- end
- if tm then
- mk[n] = tm[n.char]
- end
- end
- end
- end
- if nofvalid > 0 then
- -- we can assume done == true because we have cursives and marks
- local cx = { }
- if has_kerns and next(ky) then
- for n, k in next, ky do
- n.yoffset = k
- end
- end
- -- todo: reuse t and use maxt
- if has_cursives then
- local p_cursbase, p = nil, nil
- -- since we need valid[n+1] we can also use a "while true do"
- local t, d, maxt = { }, { }, 0
- for i=1,nofvalid do -- valid == glyphs
- local n = valid[i]
- if not mk[n] then
- local n_cursbase = has_attribute(n,cursbase)
- if p_cursbase then
- local n_curscurs = has_attribute(n,curscurs)
- if p_cursbase == n_curscurs then
- local c = cursives[n_curscurs]
- if c then
- local rlmode, dx, dy, ws, wn = c[1], c[2], c[3], c[4], c[5]
- if rlmode >= 0 then
- dx = dx - ws
- else
- dx = dx + wn
- end
- if dx ~= 0 then
- cx[n] = dx
- rl[n] = rlmode
- end
- -- if rlmode and rlmode < 0 then
- dy = -dy
- -- end
- maxt = maxt + 1
- t[maxt] = p
- d[maxt] = dy
- else
- maxt = 0
- end
- end
- elseif maxt > 0 then
- local ny = n.yoffset
- for i=maxt,1,-1 do
- ny = ny + d[i]
- local ti = t[i]
- ti.yoffset = ti.yoffset + ny
- end
- maxt = 0
- end
- if not n_cursbase and maxt > 0 then
- local ny = n.yoffset
- for i=maxt,1,-1 do
- ny = ny + d[i]
- local ti = t[i]
- ti.yoffset = ny
- end
- maxt = 0
- end
- p_cursbase, p = n_cursbase, n
- end
- end
- if maxt > 0 then
- local ny = n.yoffset
- for i=maxt,1,-1 do
- ny = ny + d[i]
- local ti = t[i]
- ti.yoffset = ny
- end
- maxt = 0
- end
- if not keep then
- cursives = { }
- end
- end
- if has_marks then
- for i=1,nofvalid do
- local p = valid[i]
- local p_markbase = has_attribute(p,markbase)
- if p_markbase then
- local mrks = marks[p_markbase]
- local nofmarks = #mrks
- for n in traverse_id(glyph_code,p.next) do
- local n_markmark = has_attribute(n,markmark)
- if p_markbase == n_markmark then
- local index = has_attribute(n,markdone) or 1
- local d = mrks[index]
- if d then
- local rlmode = d[3]
- if rlmode and rlmode >= 0 then
- -- new per 2010-10-06, width adapted per 2010-02-03
- -- we used to negate the width of marks because in tfm
- -- that makes sense but we no longer do that so as a
- -- consequence the sign of p.width was changed (we need
- -- to keep an eye on it as we don't have that many fonts
- -- that enter this branch .. I'm still not sure if this
- -- one is right
- local k = wx[p]
- if k then
- n.xoffset = p.xoffset + p.width + d[1] - k[2]
- else
- -- n.xoffset = p.xoffset + p.width + d[1]
- -- lucida U\char"032F (default+mark)
- n.xoffset = p.xoffset - p.width + d[1] -- 01-05-2011
- end
- else
- local k = wx[p]
- if k then
- n.xoffset = p.xoffset - d[1] - k[2]
- else
- n.xoffset = p.xoffset - d[1]
- end
- end
- if mk[p] then
- n.yoffset = p.yoffset + d[2]
- else
- n.yoffset = n.yoffset + p.yoffset + d[2]
- end
- if nofmarks == 1 then
- break
- else
- nofmarks = nofmarks - 1
- end
- end
- else
- -- KE: there can be <mark> <mkmk> <mark> sequences in ligatures
- end
- end
- end
- end
- if not keep then
- marks = { }
- end
- end
- -- todo : combine
- if next(wx) then
- for n, k in next, wx do
- -- only w can be nil (kernclasses), can be sped up when w == nil
- local x, w = k[2] or 0, k[4]
- if w then
- local rl = k[1] -- r2l = k[6]
- local wx = w - x
- if rl < 0 then -- KE: don't use r2l here
- if wx ~= 0 then
- insert_node_before(head,n,newkern(wx))
- end
- if x ~= 0 then
- insert_node_after (head,n,newkern(x))
- end
- else
- if x ~= 0 then
- insert_node_before(head,n,newkern(x))
- end
- if wx ~= 0 then
- insert_node_after(head,n,newkern(wx))
- end
- end
- elseif x ~= 0 then
- -- this needs checking for rl < 0 but it is unlikely that a r2l script
- -- uses kernclasses between glyphs so we're probably safe (KE has a
- -- problematic font where marks interfere with rl < 0 in the previous
- -- case)
- insert_node_before(head,n,newkern(x))
- end
- end
- end
- if next(cx) then
- for n, k in next, cx do
- if k ~= 0 then
- local rln = rl[n]
- if rln and rln < 0 then
- insert_node_before(head,n,newkern(-k))
- else
- insert_node_before(head,n,newkern(k))
- end
- end
- end
- end
- if not keep then
- kerns = { }
- end
- return head, true
- elseif not keep then
- kerns, cursives, marks = { }, { }, { }
- end
- elseif has_kerns then
- if trace_injections then
- trace(head)
- end
- for n in traverse_id(glyph_code,head) do
- if n.subtype < 256 then
- local k = has_attribute(n,kernpair)
- if k then
- local kk = kerns[k]
- if kk then
- local rl, x, y, w = kk[1], kk[2] or 0, kk[3], kk[4]
- if y and y ~= 0 then
- n.yoffset = y -- todo: h ?
- end
- if w then
- -- copied from above
- -- local r2l = kk[6]
- local wx = w - x
- if rl < 0 then -- KE: don't use r2l here
- if wx ~= 0 then
- insert_node_before(head,n,newkern(wx))
- end
- if x ~= 0 then
- insert_node_after (head,n,newkern(x))
- end
- else
- if x ~= 0 then
- insert_node_before(head,n,newkern(x))
- end
- if wx ~= 0 then
- insert_node_after(head,n,newkern(wx))
- end
- end
- else
- -- simple (e.g. kernclass kerns)
- if x ~= 0 then
- insert_node_before(head,n,newkern(x))
- end
- end
- end
- end
- end
- end
- if not keep then
- kerns = { }
- end
- return head, true
- else
- -- no tracing needed
- end
- return head, false
-end
diff --git a/tests/alternate_sub.tex b/tests/alternate_sub.tex
index e646ddb..862c665 100644
--- a/tests/alternate_sub.tex
+++ b/tests/alternate_sub.tex
@@ -13,4 +13,3 @@
\1 \char"06DD
\2 \char"06DD
\bye
-\bye
diff --git a/tests/color.tex b/tests/color.tex
index 1be8896..188889c 100644
--- a/tests/color.tex
+++ b/tests/color.tex
@@ -4,7 +4,7 @@
\font\testb=file:lmroman10-regular.otf:color=FFFF0099;+trep at 10pt
\font\testc=file:lmroman10-regular.otf:color=559922;+trep at 12pt
-\testa \input knuth \par
-\testb \input knuth \par
-\testc \input knuth \par
+\testa FF0000BB \par
+\testb FFFF0099 \par
+\testc 559922 \par
\bye
diff --git a/tests/fontspec_lookup.ltx b/tests/fontspec_lookup.ltx
new file mode 100644
index 0000000..6645427
--- /dev/null
+++ b/tests/fontspec_lookup.ltx
@@ -0,0 +1,41 @@
+\documentclass[a5paper,12pt]{scrartcl}
+\usepackage{fontspec}
+%% --------------------------------------------------------------------
+%% weirdness ahead
+%% --------------------------------------------------------------------
+\setmainfont
+ [Numbers=Lining,
+ BoldFont={TeX Gyre Pagella Bold},
+ BoldItalicFont={TeX Gyre Termes BoldItalic}]
+ {EB Garamond}
+%% --------------------------------------------------------------------
+
+%% --------------------------------------------------------------------
+%% excerpt from samples/knuth.tex
+%% --------------------------------------------------------------------
+\def\knuth{%
+ Thus, I came to the conclusion that the designer of a new
+ system must not only be the implementer and first
+ large--scale user; the designer should also write the first
+ user manual.
+
+ The separation of any of these four components would have
+ hurt \TeX\ significantly. If I had not participated fully in
+ all these activities, literally hundreds of improvements
+ would never have been made, because I would never have
+ thought of them or perceived why they were important.
+
+}
+
+%% --------------------------------------------------------------------
+%% main
+%% --------------------------------------------------------------------
+\begin{document}
+
+ \section{regular} {\rmfamily\upshape\knuth}
+ \section{bold face} {\rmfamily\bfseries\knuth}
+ \section{italic} {\rmfamily\itshape\knuth}
+ \section{slanted} {\rmfamily\slshape\knuth}
+ \section{bold italic} {\rmfamily\bfseries\itshape\knuth}
+
+\end{document}
diff --git a/tests/fullname.tex b/tests/fullname.tex
index 0209c98..78cf4d0 100644
--- a/tests/fullname.tex
+++ b/tests/fullname.tex
@@ -3,7 +3,13 @@
\font\testa={LM Roman Slanted 10 Regular} at 10pt
\font\testb={LM Roman 9 Italic} at 10pt
\font\testc={TeX Gyre Termes Bold} at 25pt
+% Also testing with absolute filename, please change the path according to your
+% system
+\font\testd=file:/usr/share/texmf/fonts/opentype/public/lm/lmmono10-italic.otf
+
\testa abcd ABCD\par
\testb abcd ABCD\par
\testc abcd ABCD\par
+\testd abcd ABCD\par
+
\bye
diff --git a/tests/lookups.tex b/tests/lookups.tex
new file mode 100644
index 0000000..db26312
--- /dev/null
+++ b/tests/lookups.tex
@@ -0,0 +1,14 @@
+\input luaotfload.sty
+%% lookup font by name (involving database)
+\font\first=name:iwonaregular at 42pt
+%% lookup font by file name (kpse)
+\font\second=file:antpoltltsemiexpd-bolditalic.otf at 42pt
+%% lookup font by name, with style in slash notation
+\font\third={name:Antykwa torunska/I} at 42pt
+
+{\first foo \endgraf}
+{\second bar \endgraf}
+{\third baz \endgraf}
+
+\bye
+
diff --git a/tests/marks.tex b/tests/marks.tex
index d33c82a..9dcf460 100644
--- a/tests/marks.tex
+++ b/tests/marks.tex
@@ -1,6 +1,10 @@
\input luaotfload.sty
\font\test={file:GenBasR.ttf:script=latn}
-\test a\char"0308
-\quad A\char"0308
-\quad j\char"0323
+%font\test={name:AntykwaTorunska:script=latn}
+\test ä\quad Ä
+
+\test a\char"0308 %% -> combining dihaeresis
+\quad A\char"0308 %% -> combining dihaeresis
+\quad j\char"0323 %% -> combining dot below
+
\bye
diff --git a/tests/math.tex b/tests/math.tex
index 55bb2aa..a2615f1 100644
--- a/tests/math.tex
+++ b/tests/math.tex
@@ -35,7 +35,6 @@ $$
$$
\Umathaccent "0 "4 "23DE {a+b}
-+ \Umathbotaccent "0 "4 "23DF {a+b} = C
$$
$$
diff --git a/tests/microtypography.tex b/tests/microtypography.tex
index 7d032e3..99deb5f 100644
--- a/tests/microtypography.tex
+++ b/tests/microtypography.tex
@@ -4,6 +4,33 @@
\font\testa=file:texgyretermes-regular:script=latn at 12pt
\font\testb=file:texgyretermes-regular:script=latn;protrusion=default at 12pt
-\testa \input tufte \par
-\testb \input tufte \par
+
+\testa We thrive in information thick worlds because of our
+marvelous and everyday capacity to select, edit,
+single out, structure, highlight, group, pair, merge,
+harmonize, synthesize, focus, organize, condense,
+reduce, boil down, choose, categorize, catalog, classify,
+list, abstract, scan, look into, idealize, isolate,
+discriminate, distinguish, screen, pigeonhole, pick over,
+sort, integrate, blend, inspect, filter, lump, skip,
+smooth, chunk, average, approximate, cluster, aggregate,
+outline, summarize, itemize, review, dip into,
+flip through, browse, glance into, leaf through, skim,
+refine, enumerate, glean, synopsize, winnow the wheat
+from the chaff and separate the sheep from the goats.\par
+
+\testb We thrive in information thick worlds because of our
+marvelous and everyday capacity to select, edit,
+single out, structure, highlight, group, pair, merge,
+harmonize, synthesize, focus, organize, condense,
+reduce, boil down, choose, categorize, catalog, classify,
+list, abstract, scan, look into, idealize, isolate,
+discriminate, distinguish, screen, pigeonhole, pick over,
+sort, integrate, blend, inspect, filter, lump, skip,
+smooth, chunk, average, approximate, cluster, aggregate,
+outline, summarize, itemize, review, dip into,
+flip through, browse, glance into, leaf through, skim,
+refine, enumerate, glean, synopsize, winnow the wheat
+from the chaff and separate the sheep from the goats.\par
+
\bye
diff --git a/tests/opbd.tex b/tests/opbd.tex
index 50c4dfd..1a838cd 100644
--- a/tests/opbd.tex
+++ b/tests/opbd.tex
@@ -4,6 +4,32 @@
\font\testa=file:texgyrepagella-regular:script=latn at 12pt
\font\testb=file:texgyrepagella-regular:mode=node;script=latn;protrusion=yes;featurefile=opbd.fea;+opbd at 12pt
-\testa \input tufte \par
-\testb \input tufte \par
+
+\testa We thrive in information thick worlds because of our
+marvelous and everyday capacity to select, edit,
+single out, structure, highlight, group, pair, merge,
+harmonize, synthesize, focus, organize, condense,
+reduce, boil down, choose, categorize, catalog, classify,
+list, abstract, scan, look into, idealize, isolate,
+discriminate, distinguish, screen, pigeonhole, pick over,
+sort, integrate, blend, inspect, filter, lump, skip,
+smooth, chunk, average, approximate, cluster, aggregate,
+outline, summarize, itemize, review, dip into,
+flip through, browse, glance into, leaf through, skim,
+refine, enumerate, glean, synopsize, winnow the wheat
+from the chaff and separate the sheep from the goats.\par
+
+\testb We thrive in information thick worlds because of our
+marvelous and everyday capacity to select, edit,
+single out, structure, highlight, group, pair, merge,
+harmonize, synthesize, focus, organize, condense,
+reduce, boil down, choose, categorize, catalog, classify,
+list, abstract, scan, look into, idealize, isolate,
+discriminate, distinguish, screen, pigeonhole, pick over,
+sort, integrate, blend, inspect, filter, lump, skip,
+smooth, chunk, average, approximate, cluster, aggregate,
+outline, summarize, itemize, review, dip into,
+flip through, browse, glance into, leaf through, skim,
+refine, enumerate, glean, synopsize, winnow the wheat
+from the chaff and separate the sheep from the goats.\par
\bye
diff --git a/tests/zero_width_marks_lig.tex b/tests/zero_width_marks_lig.tex
new file mode 100644
index 0000000..2c6dba9
--- /dev/null
+++ b/tests/zero_width_marks_lig.tex
@@ -0,0 +1,16 @@
+\input luaotfload.sty
+
+% https://bugs.freedesktop.org/attachment.cgi?id=72363
+\font\testa=file:TestLig.ttf:script=tibt;+ccmp+abvs+blws+kern at 10pt
+
+\testa གཚོའི་ཁིའུ་ཨཱཿཀ
+
+% good result for the first part:
+% https://bugs.freedesktop.org/attachment.cgi?id=72365
+% for the second part, the two circles shoud appear clearlybefore the last
+% letter, not mixed with it
+
+% see http://lists.freedesktop.org/archives/harfbuzz/2013-April/003101.html
+% for some technical details.
+
+\bye